code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class SearchResultsAnswer(Answer): <NEW_LINE> <INDENT> _validation = { '_type': {'required': True}, 'id': {'readonly': True}, 'web_search_url': {'readonly': True}, 'follow_up_queries': {'readonly': True}, 'total_estimated_matches': {'readonly': True}, 'is_family_friendly': {'readonly': True}, } <NEW_LINE> _attribute_map = { '_type': {'key': '_type', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'web_search_url': {'key': 'webSearchUrl', 'type': 'str'}, 'follow_up_queries': {'key': 'followUpQueries', 'type': '[Query]'}, 'total_estimated_matches': {'key': 'totalEstimatedMatches', 'type': 'long'}, 'is_family_friendly': {'key': 'isFamilyFriendly', 'type': 'bool'}, } <NEW_LINE> _subtype_map = { '_type': {'Videos': 'Videos'} } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(SearchResultsAnswer, self).__init__(**kwargs) <NEW_LINE> self.total_estimated_matches = None <NEW_LINE> self.is_family_friendly = None <NEW_LINE> self._type = 'SearchResultsAnswer' | SearchResultsAnswer.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: Videos
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param _type: Required. Constant filled by server.
:type _type: str
:ivar id: A String identifier.
:vartype id: str
:ivar web_search_url: The URL To Bing's search result for this item.
:vartype web_search_url: str
:ivar follow_up_queries:
:vartype follow_up_queries:
list[~azure.cognitiveservices.search.videosearch.models.Query]
:ivar total_estimated_matches: The estimated number of webpages that are
relevant to the query. Use this number along with the count and offset
query parameters to page the results.
:vartype total_estimated_matches: long
:ivar is_family_friendly:
:vartype is_family_friendly: bool | 6259905956b00c62f0fb3e86 |
class Cache(_BaseCache): <NEW_LINE> <INDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if config.CACHE_ENABLED: <NEW_LINE> <INDENT> new_cls = TimedCache <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_cls = NullCache <NEW_LINE> <DEDENT> instance = super(Cache, cls).__new__(new_cls) <NEW_LINE> instance.__init__(*args, **kwargs) <NEW_LINE> return instance | A factory class which returns an instance of a cache subclass.
If config.CACHE_ENABLED is False, the dummy inactive cache will be
returned | 625990591b99ca4002290015 |
class UserAvatar(Model): <NEW_LINE> <INDENT> __core__ = False <NEW_LINE> AVATAR_TYPES = ( (0, 'letter_avatar'), (1, 'upload'), (2, 'gravatar'), ) <NEW_LINE> ALLOWED_SIZES = (20, 32, 48, 52, 64, 80, 96, 120) <NEW_LINE> user = FlexibleForeignKey('sentry.User', unique=True, related_name='avatar') <NEW_LINE> file = FlexibleForeignKey('sentry.File', unique=True, null=True, on_delete=models.SET_NULL) <NEW_LINE> ident = models.CharField(max_length=32, unique=True, db_index=True) <NEW_LINE> avatar_type = models.PositiveSmallIntegerField(default=0, choices=AVATAR_TYPES) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'sentry' <NEW_LINE> db_table = 'sentry_useravatar' <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.ident: <NEW_LINE> <INDENT> self.ident = uuid.uuid4().hex <NEW_LINE> <DEDENT> return super(UserAvatar, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.file: <NEW_LINE> <INDENT> self.file.delete() <NEW_LINE> <DEDENT> return super(UserAvatar, self).delete(*args, **kwargs) <NEW_LINE> <DEDENT> def get_cache_key(self, size): <NEW_LINE> <INDENT> return 'avatar:%s:%s' % (self.user_id, size) <NEW_LINE> <DEDENT> def clear_cached_photos(self): <NEW_LINE> <INDENT> cache.delete_many([self.get_cache_key(x) for x in self.ALLOWED_SIZES]) <NEW_LINE> <DEDENT> def get_cached_photo(self, size): <NEW_LINE> <INDENT> if not self.file: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if size not in self.ALLOWED_SIZES: <NEW_LINE> <INDENT> size = min(self.ALLOWED_SIZES, key=lambda x: abs(x - size)) <NEW_LINE> <DEDENT> cache_key = self.get_cache_key(size) <NEW_LINE> photo = cache.get(cache_key) <NEW_LINE> if photo is None: <NEW_LINE> <INDENT> photo_file = self.file.getfile() <NEW_LINE> with Image.open(photo_file) as image: <NEW_LINE> <INDENT> image = image.resize((size, size)) <NEW_LINE> image_file = BytesIO() <NEW_LINE> image.save(image_file, 'PNG') <NEW_LINE> photo = image_file.getvalue() <NEW_LINE> cache.set(cache_key, photo) <NEW_LINE> <DEDENT> <DEDENT> return photo | A UserAvatar associates a User with their avatar photo File
and contains their preferences for avatar type. | 625990597cff6e4e811b6ffe |
class Cartesian(Space): <NEW_LINE> <INDENT> def __init__(self, *spaces): <NEW_LINE> <INDENT> def wrap(space): <NEW_LINE> <INDENT> if isinstance(space, Space): return space <NEW_LINE> if not islist(space): return Singular(space) <NEW_LINE> if len(space) == 1: return Singular(space[0]) <NEW_LINE> return Nominal(*space) <NEW_LINE> <DEDENT> self.spaces = [wrap(s) for s in spaces] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> prod = 1 <NEW_LINE> for space in self.spaces: <NEW_LINE> <INDENT> d = len(space) <NEW_LINE> if d in (0, None): return d <NEW_LINE> prod *= d <NEW_LINE> <DEDENT> return prod <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> n = len(self.spaces) <NEW_LINE> if not n: return <NEW_LINE> for v in self.subiter(n): yield v <NEW_LINE> <DEDENT> def subiter(self, dim): <NEW_LINE> <INDENT> space = self.spaces[dim-1] <NEW_LINE> if dim == 1: <NEW_LINE> <INDENT> for w in space: yield (w,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for v in self.subiter(dim-1): <NEW_LINE> <INDENT> for w in space: yield v + (w,) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def cast(self, value): <NEW_LINE> <INDENT> dims = zip(self.spaces, value) <NEW_LINE> value = (d[0].cast(d[1]) for d in dims) <NEW_LINE> if None in value: return None <NEW_LINE> return value <NEW_LINE> <DEDENT> def move(self, start, step): <NEW_LINE> <INDENT> dims = zip(self.spaces, start, step) <NEW_LINE> stop = (d[0].move(d[1], d[2]) for d in dims) <NEW_LINE> if None in stop: return None <NEW_LINE> return stop | Cartesian product of multiple Spaces. A multidimensional set of allowed values for a given combination of knobs. Each multi-value is a tuple. | 6259905991af0d3eaad3b3e3 |
class Tokenizer(ABC): <NEW_LINE> <INDENT> languages = [] <NEW_LINE> def __init__(self, normalize: bool, lower: bool, language: str = "unk"): <NEW_LINE> <INDENT> if language not in self.languages: <NEW_LINE> <INDENT> raise NotImplementedError( "{} is not in {}".format(language, self.languages) ) <NEW_LINE> <DEDENT> self.tokenizer = None <NEW_LINE> self.lower = lower <NEW_LINE> self.normalize = normalize <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def vocab_size(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def encode(self, sentnece: str): <NEW_LINE> <INDENT> NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def decode(self, tokens: List[int]): <NEW_LINE> <INDENT> NotImplementedError() <NEW_LINE> <DEDENT> def _normalize(self, sentence: str): <NEW_LINE> <INDENT> sentence = normalize(sentence) <NEW_LINE> if self.lower: <NEW_LINE> <INDENT> sentence = sentence.lower() <NEW_LINE> <DEDENT> sentence = unicodedata.normalize("NFKC", sentence) <NEW_LINE> return sentence <NEW_LINE> <DEDENT> def tokenize(self, sentence: str): <NEW_LINE> <INDENT> return sentence.split() <NEW_LINE> <DEDENT> def tokenize_list(self, sentences: List[str]): <NEW_LINE> <INDENT> return [self.tokenize(sentence) for sentence in sentences] | Base Tokenizer
Attributes:
tokenizer: tokenizer e.g. MeCab, Sudachi | 62599059baa26c4b54d5085f |
class _DbfsHost(ParamType): <NEW_LINE> <INDENT> def convert(self, value, param, ctx): <NEW_LINE> <INDENT> if value.startswith("https://"): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fail("The host does not start with https://") | Used to validate the configured host | 6259905915baa7234946354d |
@collection( name='quality-metrics-fastqc', properties={ 'title': 'FastQC Quality Metrics', 'description': 'Listing of FastQC Quality Metrics', }) <NEW_LINE> class QualityMetricFastqc(QualityMetric): <NEW_LINE> <INDENT> item_type = 'quality_metric_fastqc' <NEW_LINE> schema = load_schema('encoded:schemas/quality_metric_fastqc.json') <NEW_LINE> embedded_list = QualityMetric.embedded_list | Subclass of quality metrics for fastq files. | 62599059097d151d1a2c2627 |
class Merge(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def merge_sort(array): <NEW_LINE> <INDENT> length = len(array) <NEW_LINE> if length < 2: <NEW_LINE> <INDENT> return array <NEW_LINE> <DEDENT> middle = length//2 <NEW_LINE> left = Merge.merge_sort(array[:middle]) <NEW_LINE> right = Merge.merge_sort(array[middle:]) <NEW_LINE> return Merge.merge(left, right) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def merge(left, right): <NEW_LINE> <INDENT> new = [] <NEW_LINE> left_index, right_index = 0, 0 <NEW_LINE> len_left, len_right = len(left), len(right) <NEW_LINE> while left_index < len_left and right_index < len_right: <NEW_LINE> <INDENT> if left[left_index] <= right[right_index]: <NEW_LINE> <INDENT> new.append(left[left_index]) <NEW_LINE> left_index += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new.append(right[right_index]) <NEW_LINE> right_index += 1 <NEW_LINE> <DEDENT> <DEDENT> new += left[left_index:] <NEW_LINE> new += right[right_index:] <NEW_LINE> return new | Contains various merge sort implementations.
http://en.wikipedia.org/wiki/Merge_sort | 625990590fa83653e46f64a1 |
class SimpleTrajectoryWriter(TrajectoryWriter): <NEW_LINE> <INDENT> header = "frame ID x y z\n" <NEW_LINE> row_template = Template("$frame $id $x $y $z\n") <NEW_LINE> def write_header(trajectory_file: pathlib.Path) -> None: <NEW_LINE> <INDENT> with trajectory_file.open("w") as f: <NEW_LINE> <INDENT> f.write(SimpleTrajectoryWriter.header) <NEW_LINE> <DEDENT> <DEDENT> def write_trajectory( trajectory_file: pathlib.Path, step: int, agents_per_level: Dict[Level, Agent], ) -> None: <NEW_LINE> <INDENT> with trajectory_file.open("a") as f: <NEW_LINE> <INDENT> for lvl, agents in agents_per_level.items(): <NEW_LINE> <INDENT> for agent in agents: <NEW_LINE> <INDENT> row = SimpleTrajectoryWriter.row_template.substitute( frame=step, id=agent.id, x=agent.pos.x.m, y=agent.pos.y.m, z=lvl.id, ) <NEW_LINE> f.write(row) | First simple trajectory writer, output is a csv with spaces as delimiter. First line contains the information,
what kind of data the specific column holds.
Note: The z-coordinate is currently just the level information! This needs to be replaced when we have a proper
mapping of (x,y)-position+level to the z position.
Output is structured as follows (example):
------
frame ID x y z
0 1 0 0 2
0 2 1 -1 0
1 1 0.1 -0.1 2
1 2 1 -1.1 0
.... | 62599059a79ad1619776b59b |
class QandAInline(admin.TabularInline): <NEW_LINE> <INDENT> model = QandA <NEW_LINE> extra = 2 | Set up a tabular list of questions and answers
for the admin interface | 6259905929b78933be26aba2 |
class Assign(Code): <NEW_LINE> <INDENT> def __init__(self, lhs, rhs): <NEW_LINE> <INDENT> self.lhs = lhs <NEW_LINE> self.rhs = rhs <NEW_LINE> super(Assign, self).__init__() | Assign object on right to object on left.
| 625990598da39b475be047a2 |
class Configuration(object): <NEW_LINE> <INDENT> def __init__(self, baseConfiguration): <NEW_LINE> <INDENT> self.baseUri = baseConfiguration.baseUri or "" <NEW_LINE> self.hostname = baseConfiguration.uriHostname or "" <NEW_LINE> self.basePath = baseConfiguration.uriPath or "/" <NEW_LINE> self.username = baseConfiguration.username <NEW_LINE> self.password = baseConfiguration.password | Defines a set of SFTP configuration parameters. | 625990593eb6a72ae038bc1b |
class AllMarketDataSchema(MarketDataSchema): <NEW_LINE> <INDENT> history = f.List(f.Nested(HistoryItemSchema()), required=True) <NEW_LINE> asks = f.List(f.Nested(OrderItemSchema()), required=True) <NEW_LINE> bids = f.List(f.Nested(OrderItemSchema()), required=True) | A list of open buy and sell market orders, and the purchase history | 6259905907d97122c4218260 |
class SugiyamaGraphWidget(RelationalVisualizationWidget): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> verbose_name = _("Sugiyama Graph") <NEW_LINE> verbose_name_plural = _("Sugiyama Graphs") | Widget which shows Sugiyama layered graph. | 625990597b25080760ed87bd |
class SearchVariantAnnotationSetsRunner(AbstractSearchRunner): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> super(SearchVariantAnnotationSetsRunner, self).__init__(args) <NEW_LINE> self._variantSetId = args.variantSetId <NEW_LINE> <DEDENT> def _run(self, variantSetId): <NEW_LINE> <INDENT> iterator = self._client.searchVariantAnnotationSets( variantSetId=variantSetId) <NEW_LINE> self._output(iterator) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self._run(self._variantSetId) | Runner class for the variantannotationsets/search method. | 62599059b7558d5895464a09 |
class Meta(object): <NEW_LINE> <INDENT> model = User <NEW_LINE> exclude = [] | Meta options for the User Admin Form | 62599059baa26c4b54d50860 |
class IOPubChannel(ZMQSocketChannel): <NEW_LINE> <INDENT> def __init__(self, context, session, address): <NEW_LINE> <INDENT> super(IOPubChannel, self).__init__(context, session, address) <NEW_LINE> self.ioloop = ioloop.IOLoop() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.socket = self.context.socket(zmq.SUB) <NEW_LINE> self.socket.setsockopt(zmq.SUBSCRIBE,b'') <NEW_LINE> self.socket.setsockopt(zmq.IDENTITY, self.session.bsession) <NEW_LINE> self.socket.connect(self.address) <NEW_LINE> self.stream = zmqstream.ZMQStream(self.socket, self.ioloop) <NEW_LINE> self.stream.on_recv(self._handle_recv) <NEW_LINE> self._run_loop() <NEW_LINE> <DEDENT> def call_handlers(self, msg): <NEW_LINE> <INDENT> raise NotImplementedError('call_handlers must be defined in a subclass.') <NEW_LINE> <DEDENT> def flush(self, timeout=1.0): <NEW_LINE> <INDENT> stop_time = time.time() + timeout <NEW_LINE> for i in range(2): <NEW_LINE> <INDENT> self._flushed = False <NEW_LINE> self.ioloop.add_callback(self._flush) <NEW_LINE> while not self._flushed and time.time() < stop_time: <NEW_LINE> <INDENT> time.sleep(0.01) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _flush(self): <NEW_LINE> <INDENT> self.stream.flush() <NEW_LINE> self._flushed = True | The iopub channel which listens for messages that the kernel publishes.
This channel is where all output is published to frontends. | 625990590c0af96317c5783d |
class hReview(LocationAwareMicroformat): <NEW_LINE> <INDENT> ITEM_TYPE = ( ('product', _('Product')), ('business', _('Business')), ('event', _('Event')), ('person', _('Person')), ('place', _('Place')), ('website', _('Website')), ('url', _('URL')), ('book', _('Book')), ('film', _('Film')), ('music', _('Music')), ('software', _('Software')), ) <NEW_LINE> RATINGS = ( (1, _('1')), (2, _('2')), (3, _('3')), (4, _('4')), (5, _('5')), ) <NEW_LINE> summary = models.TextField( _("Summary"), blank=True, help_text=_('To serve as a title for the review.') ) <NEW_LINE> description = models.TextField( _('Description'), blank=True ) <NEW_LINE> rating = models.IntegerField( _('Rating'), choices=RATINGS, help_text=_('1 = worst, 5 = best') ) <NEW_LINE> dtreviewed = models.DateTimeField( _('Date of Review'), null=True, blank=True ) <NEW_LINE> reviewer = models.CharField( _('Reviewer Name'), max_length=256, default=_('Anonymous'), help_text=_('Defaults to "Anonymous" if not supplied') ) <NEW_LINE> type = models.CharField( _("Item Type"), max_length=8, choices=ITEM_TYPE, help_text=_('The kind of thing this review is for') ) <NEW_LINE> fn = models.CharField( _('Item Name'), max_length=256 ) <NEW_LINE> url = models.URLField( _('Item URL'), blank=True, verify_exists=True ) <NEW_LINE> tel = models.CharField( _('Telephone'), max_length=64, blank=True ) <NEW_LINE> photo = models.ImageField( upload_to='hreviewphoto', null=True, blank=True ) <NEW_LINE> dtstart = models.DateTimeField( _('Start'), null=True, blank=True ) <NEW_LINE> dtend = models.DateTimeField( _('End'), null=True, blank=True ) <NEW_LINE> all_day_event = models.BooleanField( _('All day event'), default=False ) <NEW_LINE> tz = models.CharField( _('Timezone'), max_length=8, blank=True, choices=TIMEZONE, help_text=_("Hour(s) from GMT") ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('hReview') <NEW_LINE> verbose_name_plural = _('hReviews') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s: %s/5"%(self.fn, self.get_rating_display()) | hReview is a simple, open, distributed format, suitable for embedding
reviews (of products, services, businesses, events, etc.) in HTML, XHTML,
Atom, RSS, and arbitrary XML. hReview is one of several microformats open
standards.
For more information see:
http://microformats.org/wiki/hreview
(I've omitted the "version" field as I'm assuming version 0.2 of the hReview
microformat specification [or later]. See the URL referenced above for more
information.) | 625990596e29344779b01c08 |
class UserSettingsForm(forms.Form): <NEW_LINE> <INDENT> username = forms.CharField(required=False, max_length=30) <NEW_LINE> email = forms.EmailField(required=False) <NEW_LINE> mobile_phone = forms.CharField(required=False, max_length=20) <NEW_LINE> desk_phone = forms.CharField(required=False, max_length=20) <NEW_LINE> first_name = forms.CharField(required=False, max_length=30) <NEW_LINE> last_name = forms.CharField(required=False, max_length=30) <NEW_LINE> organization = forms.CharField(required=False, max_length=30) <NEW_LINE> image_file = forms.FileField(required=False) | Form to set BuildingSpeak
user login info. | 625990594e4d5625663739c3 |
class SumIntegerRV(RandomVariable): <NEW_LINE> <INDENT> def __init__(self, mass_function, ndraw): <NEW_LINE> <INDENT> mass_function = np.array(mass_function) <NEW_LINE> mass_function /= mass_function.sum() <NEW_LINE> self._rv = Multinomial(mass_function) <NEW_LINE> self._mass_fn = mass_function <NEW_LINE> self._mass_function = dict(zip(*conv_integer_rv(self._mass_fn, ndraw))) <NEW_LINE> self._sample_space = self._mass_function.keys() <NEW_LINE> self._ndraw = ndraw <NEW_LINE> <DEDENT> def trial(self): <NEW_LINE> <INDENT> return np.sum(self._rv.sample(self._ndraw)) | Given a mass function on non-negative integers,
form the random variable that is the convolution
of this mass function `ndraw` times
The mass function specifies a random variable that
is i with probability proportional to mass_function[i] is specifi | 6259905916aa5153ce401aa0 |
class GroupMembershipAuth(AmivTokenAuth): <NEW_LINE> <INDENT> def has_resource_write_permission(self, user_id): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def has_item_write_permission(self, user_id, item): <NEW_LINE> <INDENT> if user_id == str(get_id(item['user'])): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> collection = current_app.data.driver.db['groups'] <NEW_LINE> group = collection.find_one({'_id': get_id(item['group'])}, {'moderator': 1}) <NEW_LINE> return user_id == str(group.get('moderator')) <NEW_LINE> <DEDENT> <DEDENT> def create_user_lookup_filter(self, user_id): <NEW_LINE> <INDENT> group_collection = current_app.data.driver.db['groups'] <NEW_LINE> groups = group_collection.find({'moderator': ObjectId(user_id)}, {'_id': 1}) <NEW_LINE> moderated_groups = [group['_id'] for group in groups] <NEW_LINE> membership_collection = current_app.data.driver.db['groupmemberships'] <NEW_LINE> memberships = membership_collection.find({'user': ObjectId(user_id)}, {'group': 1}) <NEW_LINE> member_groups = [membership['group'] for membership in memberships] <NEW_LINE> return {'$or': [ {'group': {'$in': moderated_groups}}, {'group': {'$in': member_groups}} ]} | Auth for group memberships. | 62599059ac7a0e7691f73a9d |
@python_2_unicode_compatible <NEW_LINE> class Country(models.Model): <NEW_LINE> <INDENT> country = CountryField( db_index=True, unique=True, help_text=ugettext_lazy("Two character ISO country code.") ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{name} ({code})".format( name=str(self.country.name), code=str(self.country) ) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['country'] | Representation of a country.
This is used to define country-based access rules.
There is a data migration that creates entries for
each country code.
.. no_pii: | 62599059379a373c97d9a5e1 |
class InputFiles: <NEW_LINE> <INDENT> def __init__(self, file1: BinaryIO, interleaved: bool = False): <NEW_LINE> <INDENT> self.file1 = file1 <NEW_LINE> self.interleaved = interleaved <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> return dnaio.open(self.file1, interleaved=self.interleaved, mode="r") | this is from cutadapt - basically just creates a dnaio object | 625990598e7ae83300eea64a |
class DatabaseExercise(models.Model): <NEW_LINE> <INDENT> name = models.CharField( max_length=128 ) <NEW_LINE> description = models.TextField( max_length=256, null=True, blank=True ) <NEW_LINE> image = models.ImageField( null=True, blank=True ) <NEW_LINE> is_active = models.BooleanField( verbose_name='activity', default=True ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.name}' | DatabaseExercise model stores information about all exercises used in
the training process. | 62599059460517430c432b30 |
class TrainingHistory: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.data = [] <NEW_LINE> <DEDENT> def add(self, epoch_result): <NEW_LINE> <INDENT> self.data.append(epoch_result) <NEW_LINE> <DEDENT> def frame(self): <NEW_LINE> <INDENT> return pd.DataFrame(self.data).set_index('epoch_idx') | Simple aggregator for the training history.
An output of training storing scalar metrics in a pandas dataframe. | 6259905915baa7234946354f |
class _PRO(object): <NEW_LINE> <INDENT> __slot__ = 'name', 'maxx', 'allocation', 'need', 'flag' <NEW_LINE> def __init__(self, name, maxx, allocation, need, flag=False): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.maxx = maxx <NEW_LINE> self.allocation = allocation <NEW_LINE> self.need = need <NEW_LINE> self.flag = flag <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.name | 内部节点类 用于实现进程的独立 | 625990592c8b7c6e89bd4dab |
class LabelSmoothing(nn.Module): <NEW_LINE> <INDENT> def __init__(self, size, padding_idx, smoothing=0.0): <NEW_LINE> <INDENT> super(LabelSmoothing, self).__init__() <NEW_LINE> self.criterion = nn.KLDivLoss(size_average=False) <NEW_LINE> self.padding_idx = padding_idx <NEW_LINE> self.confidence = 1.0 - smoothing <NEW_LINE> self.smoothing = smoothing <NEW_LINE> self.size = size <NEW_LINE> self.true_dist = None <NEW_LINE> <DEDENT> def forward(self, x, target): <NEW_LINE> <INDENT> assert x.size(1) == self.size <NEW_LINE> true_dist = x.data.clone() <NEW_LINE> true_dist.fill_(self.smoothing / (self.size - 2)) <NEW_LINE> true_dist.scatter_(1, target.data.unsqueeze(1), self.confidence) <NEW_LINE> true_dist[:, self.padding_idx] = 0 <NEW_LINE> mask = torch.nonzero(target.data == self.padding_idx) <NEW_LINE> if mask.sum() and len(mask) > 0: <NEW_LINE> <INDENT> true_dist.index_fill_(0, mask.squeeze(), 0.0) <NEW_LINE> <DEDENT> self.true_dist = true_dist <NEW_LINE> return self.criterion(x, Variable(true_dist, requires_grad=False)) | Implement label smoothing. | 62599059a79ad1619776b59c |
class UserOauthToken(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField( User, on_delete=models.CASCADE, help_text='User for whom token is generated', ) <NEW_LINE> access_token = models.CharField( max_length=255, blank=False, help_text='Access Token of the user which is used to access the content' ) <NEW_LINE> refresh_token = models.CharField( max_length=255, blank=False, help_text='Refresh Token of the user used when Access Token expiers' ) <NEW_LINE> token_expiry = models.DateTimeField( help_text='Datetime when Access Token expires' ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.user.username <NEW_LINE> <DEDENT> def is_token_expired(self): <NEW_LINE> <INDENT> return timezone.now() > (self.token_expiry - datetime.timedelta(minutes=5)) | Stores User's Oauth2 details | 62599059e5267d203ee6ce9f |
class AmE06(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> input = open("./Corpus/AmE06/AmE06.pkl", 'rb') <NEW_LINE> reader = load(input) <NEW_LINE> input.close() <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> filelist = [] <NEW_LINE> words = [] <NEW_LINE> for files in os.listdir("./Corpus/AmE06/"): <NEW_LINE> <INDENT> if files.endswith(".txt"): <NEW_LINE> <INDENT> filelist.append(files) <NEW_LINE> <DEDENT> <DEDENT> if(len(filelist)== 500): <NEW_LINE> <INDENT> for name in filelist: <NEW_LINE> <INDENT> f = open("./Corpus/AmE06/" + name) <NEW_LINE> lines = f.readlines() <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> tmp1 = nltk.sent_tokenize(line) <NEW_LINE> for lin in tmp1: <NEW_LINE> <INDENT> tmp = nltk.word_tokenize(lin) <NEW_LINE> for word in tmp: <NEW_LINE> <INDENT> for c in string.punctuation: <NEW_LINE> <INDENT> word= word.replace(c,"") <NEW_LINE> <DEDENT> words.append(word) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> f.close() <NEW_LINE> <DEDENT> a = open("./Corpus/AmE06/finalcorpa.txt", "wb") <NEW_LINE> for word in words: <NEW_LINE> <INDENT> if word not in ".,;!?\"": <NEW_LINE> <INDENT> a.write(word + '\n') <NEW_LINE> <DEDENT> <DEDENT> a.close() <NEW_LINE> reader = WordListCorpusReader('./Corpus/AmE06', ['finalcorpa.txt']) <NEW_LINE> output = open("./Corpus/AmE06/AmE06.pkl", 'wb') <NEW_LINE> dump(reader, output, -1) <NEW_LINE> output.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reader = WordListCorpusReader('./Corpus/AmE06', ['finalcorpa.txt']) <NEW_LINE> output = open("./Corpus/AmE06/AmE06.pkl", 'wb') <NEW_LINE> dump(reader, output, -1) <NEW_LINE> output.close() <NEW_LINE> <DEDENT> <DEDENT> self.corpa = reader <NEW_LINE> <DEDENT> def getCorpa(self): <NEW_LINE> <INDENT> return self.corpa | Class for the AmE06 Wordlist corpora construction and encapsulation. | 62599059d7e4931a7ef3d63d |
class Parameter(object): <NEW_LINE> <INDENT> EQUALS_OPERATION = '=' <NEW_LINE> NOT_EQUALS_OPERATION = '!=' <NEW_LINE> GREATER_THAN_OPERATION = '>' <NEW_LINE> GREATER_THAN_OR_EQUALS_OPERATION = '>=' <NEW_LINE> LESS_THAN_OPERATION = '<' <NEW_LINE> LESS_THAN_OR_EQUALS_OPERATION = '<=' <NEW_LINE> CONTAINS_OPERATION = '=~' <NEW_LINE> NOT_CONTAINS_OPERATION = '!~' <NEW_LINE> def __init__(self, field, op, value): <NEW_LINE> <INDENT> self.field = field <NEW_LINE> self.op = op <NEW_LINE> self.value = value | A data-filtering parameter. | 62599059a17c0f6771d5d680 |
class RequestResult(object): <NEW_LINE> <INDENT> def __init__(self, request_id): <NEW_LINE> <INDENT> self.request_id = request_id <NEW_LINE> self.request_timeout_count = 60 <NEW_LINE> self.request_timeout = 5 <NEW_LINE> <DEDENT> def status(self, service): <NEW_LINE> <INDENT> for count in range(self.request_timeout_count): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> time.sleep(self.request_timeout) <NEW_LINE> return service.get_operation_status(self.request_id) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> status_exception = e <NEW_LINE> <DEDENT> <DEDENT> raise AzureRequestStatusError( '%s: %s' % ( type(status_exception).__name__, format(status_exception) ) ) <NEW_LINE> <DEDENT> def wait_for_request_completion(self, service): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> result = self.status(service) <NEW_LINE> while result.status == 'InProgress': <NEW_LINE> <INDENT> count = count + 1 <NEW_LINE> if count > self.request_timeout_count: <NEW_LINE> <INDENT> raise AzureRequestTimeout( 'Operation %s timed out' % self.request_id ) <NEW_LINE> <DEDENT> time.sleep(self.request_timeout) <NEW_LINE> result = self.status(service) <NEW_LINE> <DEDENT> if result.status != 'Succeeded': <NEW_LINE> <INDENT> raise AzureRequestError( 'Operation %s failed. %s (%s)' % ( self.request_id, format(result.error.message), format(result.error.code) ) ) | operate on azure request ID and provide methods
to get status information as well as define operations
based on the request status | 6259905973bcbd0ca4bcb851 |
class View(object): <NEW_LINE> <INDENT> methods = None <NEW_LINE> decorators = () <NEW_LINE> def dispatch_request(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def as_view(cls, name, *class_args, **class_kwargs): <NEW_LINE> <INDENT> def view(*args, **kwargs): <NEW_LINE> <INDENT> self = view.view_class(*class_args, **class_kwargs) <NEW_LINE> return self.dispatch_request(*args, **kwargs) <NEW_LINE> <DEDENT> if cls.decorators: <NEW_LINE> <INDENT> view.__name__ = name <NEW_LINE> view.__module__ = cls.__module__ <NEW_LINE> for decorator in cls.decorators: <NEW_LINE> <INDENT> view = decorator(view) <NEW_LINE> <DEDENT> <DEDENT> view.view_class = cls <NEW_LINE> view.__name__ = name <NEW_LINE> view.__doc__ = cls.__doc__ <NEW_LINE> view.__module__ = cls.__module__ <NEW_LINE> view.methods = cls.methods <NEW_LINE> return view | Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET']
def dispatch_request(self, name):
return 'Hello %s!' % name
api.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function! | 625990594428ac0f6e659af9 |
class TestProbKill(object): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.copy_prob_kill = Carnivore.prob_kill <NEW_LINE> self.copy_params = Carnivore.params.copy() <NEW_LINE> self.carn = Carnivore(20, 13) <NEW_LINE> self.herb = Herbivore(20, 13) <NEW_LINE> <DEDENT> def teardown(self): <NEW_LINE> <INDENT> Carnivore.prob_kill = self.copy_prob_kill <NEW_LINE> Carnivore.params = self.copy_params <NEW_LINE> <DEDENT> def test_carn_kills_herb(self): <NEW_LINE> <INDENT> Carnivore.prob_kill = lambda _, __: 1 <NEW_LINE> nt.assert_true(self.carn.carn_kills_herb(self.herb), "Herbivore should die") <NEW_LINE> <DEDENT> def test_carn_kill_one(self): <NEW_LINE> <INDENT> self.carn.set_parameters({'DeltaPhiMax': 0.02}) <NEW_LINE> nt.assert_true(self.carn.carn_kills_herb(self.herb), "Herbivore should die") <NEW_LINE> <DEDENT> def test_carn_kill(self): <NEW_LINE> <INDENT> nt.assert_almost_equal(0.01983, self.carn.prob_kill(0.8), 5, "Returns wrong value for prob_kill") | Collects test that uses different parameters than default and allows
us to override methods. | 625990590c0af96317c5783e |
class DockerImage(object): <NEW_LINE> <INDENT> def __init__(self, repo, tag, client=None): <NEW_LINE> <INDENT> self.repo = repo <NEW_LINE> self.tag = tag <NEW_LINE> self.client = client <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<DockerImage(" + self.repo + "," + self.tag + ")>" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.repo + ":" + self.tag <NEW_LINE> <DEDENT> def exists(self, client=None): <NEW_LINE> <INDENT> if client is None: <NEW_LINE> <INDENT> client = self.client <NEW_LINE> <DEDENT> return str(self) in [img for sublist in client.images() for img in sublist['RepoTags']] <NEW_LINE> <DEDENT> def download(self, client=None): <NEW_LINE> <INDENT> if client is None: <NEW_LINE> <INDENT> client = self.client <NEW_LINE> <DEDENT> logging.debug("Downloading %s", self) <NEW_LINE> [logging.debug(line) for line in client.pull(repository=self.repo, tag=self.tag)] <NEW_LINE> if self.exists(): <NEW_LINE> <INDENT> logging.debug("Download complete") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error("Something went wrong while downloading %s", self) <NEW_LINE> <DEDENT> <DEDENT> def build(self, dockerfile, client=None): <NEW_LINE> <INDENT> if client is None: <NEW_LINE> <INDENT> client = self.client <NEW_LINE> <DEDENT> [logging.debug(line) for line in client.build(path=dockerfile, rm=True, tag=str(self))] <NEW_LINE> <DEDENT> def addtag(self, repo, tag, client=None): <NEW_LINE> <INDENT> if client is None: <NEW_LINE> <INDENT> client = self.client <NEW_LINE> <DEDENT> client.tag(image=str(self), repository=repo, tag=tag) <NEW_LINE> return DockerImage(client, repo, tag) | Simple representation of a docker image as defined by the repo and tag fields | 6259905976e4537e8c3f0b4a |
class DivFunction(DiffFunction): <NEW_LINE> <INDENT> def __init__(self, f1, f2): <NEW_LINE> <INDENT> if f1.ndim != f2.ndim: <NEW_LINE> <INDENT> raise ValueError('functions dimension mismatch.') <NEW_LINE> <DEDENT> DiffFunction.__init__(self, _intersection(f1.input_ranges, f2.input_ranges), delta_list=None) <NEW_LINE> self._f1 = f1 <NEW_LINE> self._f2 = f2 <NEW_LINE> <DEDENT> def __call__(self, xi): <NEW_LINE> <INDENT> return self._f1(xi) / self._f2(xi) <NEW_LINE> <DEDENT> def deriv(self, xi, j): <NEW_LINE> <INDENT> f2_val = self._f2(xi) <NEW_LINE> return self._f1.deriv(xi, j) / f2_val - (self._f1(xi) * self._f2.deriv(xi, j) / (f2_val**2)) <NEW_LINE> <DEDENT> def jacobian(self, xi): <NEW_LINE> <INDENT> f1_val = self._f1(xi)[..., np.newaxis] <NEW_LINE> f2_val = self._f2(xi)[..., np.newaxis] <NEW_LINE> f1_jac = self._f1.jacobian(xi) <NEW_LINE> f2_jac = self._f2.jacobian(xi) <NEW_LINE> return f1_jac / f2_val - (f1_val * f2_jac) / (f2_val**2) | division of two DiffFunctions
Parameters
----------
f1 : DiffFunction
the first function.
f2 : DiffFunction
the second function. | 62599059b5575c28eb7137ab |
class DropdownToolButton(QToolButtonBase): <NEW_LINE> <INDENT> TOOLTIP = '' <NEW_LINE> def __init__(self, icon, parent=None): <NEW_LINE> <INDENT> super(DropdownToolButton, self).__init__(icon, parent) <NEW_LINE> self.setToolTip(self.TOOLTIP) <NEW_LINE> self.setPopupMode(QToolButtonBase.InstantPopup) <NEW_LINE> self.setStyleSheet("QToolButton::menu-indicator {image: none;}") <NEW_LINE> self.setup_menu() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def setup_menu(self): <NEW_LINE> <INDENT> menu = QMenu() <NEW_LINE> self.setMenu(menu) | A toolbutton with a dropdown menu. | 62599059379a373c97d9a5e3 |
class Configuration(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.username = getpass.getuser() <NEW_LINE> self.email = getpass.getuser()+'@localhost' | Class provides configuration parameters to all commands and sub commands | 6259905945492302aabfda96 |
class InvalidPluginFileMethodAlreadyExistError(Exception): <NEW_LINE> <INDENT> pass | Raise when two methods with the same name exist in a plugin file
| 62599059d6c5a102081e36df |
class Ubicacion(models.Model): <NEW_LINE> <INDENT> nombreCorto = models.CharField(max_length = 25) <NEW_LINE> nombreLargo = models.CharField(max_length = 50, blank = True) <NEW_LINE> idPincel = models.CharField(max_length = 25, unique = True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.nombreCorto | Representa un tipo de ubicación dentro del
centro escolar. | 625990593617ad0b5ee07709 |
class FlowField( HasPrivateTraits ): <NEW_LINE> <INDENT> digest = Property <NEW_LINE> def _get_digest( self ): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def v( self, xx): <NEW_LINE> <INDENT> v = array((0., 0., 0.)) <NEW_LINE> dv = array(((0., 0., 0.), (0., 0., 0.), (0., 0., 0.))) <NEW_LINE> return -v, -dv | An abstract base class for a spatial flow field. | 625990590fa83653e46f64a5 |
class FeatureNotSpecifiedError(FeatureError): <NEW_LINE> <INDENT> pass | Exception raised when a feature is unexpectedly not specified. | 6259905982261d6c527309aa |
class MultilineText(Element): <NEW_LINE> <INDENT> def __init__(self, text="", size=None, elements=None, normal_params=None): <NEW_LINE> <INDENT> Element.__init__(self, text, elements, normal_params) <NEW_LINE> self._size = size <NEW_LINE> self.visible = False <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> Element.finish(self) <NEW_LINE> if not self._size: <NEW_LINE> <INDENT> self._size = self.get_fus_rect() <NEW_LINE> <DEDENT> self.set_size(self._size) <NEW_LINE> for line in self.get_lines(STATE_NORMAL): <NEW_LINE> <INDENT> e = OneLineText(line) <NEW_LINE> e.finish() <NEW_LINE> e.set_writer(self.current_state.fusionner.title._writer) <NEW_LINE> self.add_elements([e]) <NEW_LINE> <DEDENT> self.format_txt() <NEW_LINE> <DEDENT> def build_elements(self): <NEW_LINE> <INDENT> for e in self._elements: <NEW_LINE> <INDENT> e.father = None <NEW_LINE> <DEDENT> self._elements = [] <NEW_LINE> self._blit_before = [] <NEW_LINE> self._blit_after = [] <NEW_LINE> self.set_size(self._size) <NEW_LINE> for line in self.get_lines(STATE_NORMAL): <NEW_LINE> <INDENT> e = OneLineText(line) <NEW_LINE> e.finish() <NEW_LINE> e.set_writer(self.current_state.fusionner.title._writer) <NEW_LINE> self.add_elements([e]) <NEW_LINE> <DEDENT> self.format_txt() <NEW_LINE> <DEDENT> def format_txt(self): <NEW_LINE> <INDENT> title = self._states[STATE_NORMAL].fusionner.title <NEW_LINE> (x, y) = title._pos <NEW_LINE> r = title.get_rect() <NEW_LINE> for i in self._elements: <NEW_LINE> <INDENT> (w, h) = i.get_fus_size() <NEW_LINE> if title._align is "left": <NEW_LINE> <INDENT> x = title._pos[0] <NEW_LINE> <DEDENT> elif title._align is "center": <NEW_LINE> <INDENT> x = (r.width - w) // 2 <NEW_LINE> <DEDENT> elif title._align is "right": <NEW_LINE> <INDENT> x = r.width - w <NEW_LINE> <DEDENT> i.set_topleft((x, y)) <NEW_LINE> y += title._space + h <NEW_LINE> <DEDENT> <DEDENT> def set_font_color(self, color, state=None, center_title=True): <NEW_LINE> <INDENT> Element.set_font_color(self, color, state, center_title) <NEW_LINE> self.build_elements() <NEW_LINE> <DEDENT> def set_font_size(self, size, state=None, center_title=True): <NEW_LINE> <INDENT> Element.set_font_size(self, size, state, center_title) <NEW_LINE> self.build_elements() <NEW_LINE> <DEDENT> def set_font(self, fontname, state=None, center_title=True): <NEW_LINE> <INDENT> Element.set_font(self, fontname, state, center_title) <NEW_LINE> self.set_hovered_states(self._states_hover) <NEW_LINE> <DEDENT> def set_font_effects(self, biu, state=None, center=True, preserve=False): <NEW_LINE> <INDENT> Element.set_font_effects(self, biu, state, center, preserve) <NEW_LINE> self.build_elements() | Simple text on multiple lines. | 62599059a219f33f346c7dc4 |
class OrderedCounter(Counter, OrderedDict): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r)" % (self.__class__.__name__, OrderedDict(self)) <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return self.__class__, (OrderedDict(self),) | Counter that remembers the order elements are first encountered
Examples:
>>> OrderedCounter('abracadabra')
OrderedCounter(OrderedDict([('a', 5), ('b', 2), ('r', 2), ('c', 1), ('d', 1)])) | 625990599c8ee82313040c6a |
class InitialReissuanceTokenTest(BitcoinTestFramework): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.setup_clean_chain = True <NEW_LINE> self.num_nodes = 2 <NEW_LINE> self.node_args = [["-initialreissuancetokens=200000000", "-initialfreecoins=2000000000000000"], ["-initialreissuancetokens=200000000", "-initialfreecoins=2000000000000000"]] <NEW_LINE> <DEDENT> def setup_network(self, split=False): <NEW_LINE> <INDENT> self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, self.node_args) <NEW_LINE> connect_nodes_bi(self.nodes,0,1) <NEW_LINE> self.is_network_split = False <NEW_LINE> self.sync_all() <NEW_LINE> <DEDENT> def run_test(self): <NEW_LINE> <INDENT> self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 20000000, "", "", True) <NEW_LINE> self.nodes[0].generate(101) <NEW_LINE> self.sync_all() <NEW_LINE> walletinfo=self.nodes[0].getwalletinfo() <NEW_LINE> balance=walletinfo['balance'] <NEW_LINE> token="" <NEW_LINE> for i in balance: <NEW_LINE> <INDENT> token = i <NEW_LINE> if token != "bitcoin": <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2, "", "", False, token) <NEW_LINE> self.nodes[0].generate(101) <NEW_LINE> self.sync_all() <NEW_LINE> walletinfo1 = self.nodes[0].getwalletinfo() <NEW_LINE> assert_equal(walletinfo1["balance"]["bitcoin"], 20000000) <NEW_LINE> assert_equal(walletinfo1["balance"][token], 2) <NEW_LINE> self.nodes[0].reissueasset("bitcoin", 1234) <NEW_LINE> self.nodes[0].generate(101) <NEW_LINE> self.sync_all() <NEW_LINE> walletinfo1 = self.nodes[0].getwalletinfo() <NEW_LINE> assert_equal(walletinfo1["balance"]["bitcoin"], 20001234) <NEW_LINE> self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1, "", "", False) <NEW_LINE> self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1, "", "", False, token) <NEW_LINE> self.nodes[0].generate(101) <NEW_LINE> self.sync_all() <NEW_LINE> self.nodes[1].reissueasset("bitcoin", 1000) <NEW_LINE> self.nodes[1].generate(101) <NEW_LINE> self.sync_all() <NEW_LINE> walletinfo2 = self.nodes[1].getwalletinfo() <NEW_LINE> assert_equal(walletinfo2["balance"]["bitcoin"], 1001) | Test creation of initial reissuance token for default asset on chain set up | 625990598e71fb1e983bd089 |
class SpatialImage4D(SpatialImage3D): <NEW_LINE> <INDENT> def __init__(self, file_path, name, interp_order, output_pixdim, output_axcodes, loader): <NEW_LINE> <INDENT> SpatialImage3D.__init__(self, file_path=file_path, name=name, interp_order=interp_order, output_pixdim=output_pixdim, output_axcodes=output_axcodes, loader=loader) <NEW_LINE> <DEDENT> @property <NEW_LINE> def spatial_rank(self): <NEW_LINE> <INDENT> return int(np.sum([dim > 1 for dim in self.shape[:3]])) <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> if len(self.file_path) == 1: <NEW_LINE> <INDENT> return SpatialImage3D._load_single_file( self, self.file_path[0], self.loader[0]) <NEW_LINE> <DEDENT> mod_list = [] <NEW_LINE> for mod in range(len(self.file_path)): <NEW_LINE> <INDENT> mod_3d = SpatialImage3D(file_path=(self.file_path[mod],), name=(self.name[mod],), interp_order=(self.interp_order[mod],), output_pixdim=(self.output_pixdim[mod],), output_axcodes=(self.output_axcodes[mod],), loader=(self.loader[mod],)) <NEW_LINE> mod_data_5d = mod_3d.get_data() <NEW_LINE> mod_list.append(mod_data_5d) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> image_data = np.concatenate(mod_list, axis=4) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> tf.logging.fatal( "multi-modal data shapes not consistent -- trying to " "concatenate {}.".format([mod.shape for mod in mod_list])) <NEW_LINE> raise <NEW_LINE> <DEDENT> return image_data | 4D image from a set of 3D volumes,
supports resampling and reorientation.
The 3D volumes are concatenated in the fifth dim (modality dim)
(4D image from a single file is currently not supported) | 625990597d847024c075d99c |
@Registers.agent <NEW_LINE> class CartpoleDqn(Agent): <NEW_LINE> <INDENT> def __init__(self, env, alg, agent_config, **kwargs): <NEW_LINE> <INDENT> super(CartpoleDqn, self).__init__(env, alg, agent_config, **kwargs) <NEW_LINE> self.epsilon = 1.0 <NEW_LINE> self.episode_count = agent_config.get("episode_count", 100000) <NEW_LINE> <DEDENT> def infer_action(self, state, use_explore): <NEW_LINE> <INDENT> if use_explore and random.random() < self.epsilon: <NEW_LINE> <INDENT> action = np.random.randint(0, self.alg.action_dim) <NEW_LINE> <DEDENT> elif use_explore: <NEW_LINE> <INDENT> send_data = message(state, cmd="predict") <NEW_LINE> self.send_explorer.send(send_data) <NEW_LINE> action = self.recv_explorer.recv() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = self.alg.predict(state) <NEW_LINE> <DEDENT> if use_explore: <NEW_LINE> <INDENT> self.epsilon -= 1.0 / self.episode_count <NEW_LINE> self.epsilon = max(0.01, self.epsilon) <NEW_LINE> <DEDENT> self.transition_data.update( {"cur_state": state, "action": action,} ) <NEW_LINE> return action <NEW_LINE> <DEDENT> def handle_env_feedback(self, next_raw_state, reward, done, info, use_explore): <NEW_LINE> <INDENT> self.transition_data.update({ "next_state": next_raw_state, "reward": np.sign(reward) if use_explore else reward, "done": done, "info": info }) <NEW_LINE> if use_explore: <NEW_LINE> <INDENT> train_data = {k: [v] for k, v in self.transition_data.items()} <NEW_LINE> train_data = message(train_data, agent_id=self.id) <NEW_LINE> self.send_explorer.send(train_data) <NEW_LINE> <DEDENT> return self.transition_data <NEW_LINE> <DEDENT> def sync_model(self): <NEW_LINE> <INDENT> return ("none") | Cartpole Agent with dqn algorithm. | 62599059cb5e8a47e493cc66 |
class TestStructure(unittest.TestCase): <NEW_LINE> <INDENT> def test_structure(self): <NEW_LINE> <INDENT> structure = foundations.data_structures.Structure(John="Doe", Jane="Doe") <NEW_LINE> self.assertIn("John", structure) <NEW_LINE> self.assertTrue(hasattr(structure, "John")) <NEW_LINE> setattr(structure, "John", "Nemo") <NEW_LINE> self.assertEqual(structure["John"], "Nemo") <NEW_LINE> structure["John"] = "Vador" <NEW_LINE> self.assertEqual(structure["John"], "Vador") <NEW_LINE> del (structure["John"]) <NEW_LINE> self.assertNotIn("John", structure) <NEW_LINE> self.assertFalse(hasattr(structure, "John")) <NEW_LINE> structure.John = "Doe" <NEW_LINE> self.assertIn("John", structure) <NEW_LINE> self.assertTrue(hasattr(structure, "John")) <NEW_LINE> del (structure.John) <NEW_LINE> self.assertNotIn("John", structure) <NEW_LINE> self.assertFalse(hasattr(structure, "John")) <NEW_LINE> structure = foundations.data_structures.Structure(John=None, Jane=None) <NEW_LINE> self.assertIsNone(structure.John) <NEW_LINE> self.assertIsNone(structure["John"]) <NEW_LINE> structure.update(**{"John": "Doe", "Jane": "Doe"}) <NEW_LINE> self.assertEqual(structure.John, "Doe") <NEW_LINE> self.assertEqual(structure["John"], "Doe") <NEW_LINE> <DEDENT> def test_structure_pickle(self): <NEW_LINE> <INDENT> structure = foundations.data_structures.Structure(John="Doe", Jane="Doe") <NEW_LINE> data = pickle.dumps(structure) <NEW_LINE> data = pickle.loads(data) <NEW_LINE> self.assertEqual(structure, data) <NEW_LINE> data = pickle.dumps(structure, pickle.HIGHEST_PROTOCOL) <NEW_LINE> data = pickle.loads(data) <NEW_LINE> self.assertEqual(structure, data) | Defines :class:`foundations.data_structures.Structure` class units tests methods. | 625990597b25080760ed87bf |
class Transactional(object): <NEW_LINE> <INDENT> def __init__(self, method): <NEW_LINE> <INDENT> self.method = method <NEW_LINE> <DEDENT> def __get__(self, obj, T): <NEW_LINE> <INDENT> def transaction(*args, **kwargs): <NEW_LINE> <INDENT> state = memento(obj) <NEW_LINE> try: <NEW_LINE> <INDENT> return self.method(obj, *args, **kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> state() <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> return transaction | Adds transactional semantics to methods. Methods decorated with
@Transactional will rollback to entry-state upon exceptions. | 6259905963b5f9789fe86732 |
class ItemSelectedEvent(Event): <NEW_LINE> <INDENT> def __init__(self, item): <NEW_LINE> <INDENT> self.__item = item <NEW_LINE> <DEDENT> @property <NEW_LINE> def item(self): <NEW_LINE> <INDENT> return self.__item | Item selected in the project tree. | 6259905976e4537e8c3f0b4c |
class MessagePrinter: <NEW_LINE> <INDENT> def __init__(self, dbg): <NEW_LINE> <INDENT> self.debugMode = dbg <NEW_LINE> self.verboseMode = False <NEW_LINE> self.prefix = '' <NEW_LINE> <DEDENT> def setVerbose(self): <NEW_LINE> <INDENT> self.verboseMode = True <NEW_LINE> <DEDENT> def setPrefix(self, prefix): <NEW_LINE> <INDENT> self.prefix = prefix <NEW_LINE> <DEDENT> def warning(self, *objs): <NEW_LINE> <INDENT> print("WARNING: ", self.prefix, *objs, file=sys.stderr) <NEW_LINE> <DEDENT> def error(self,*objs): <NEW_LINE> <INDENT> print("ERROR: ", self.prefix, *objs, file=sys.stderr) <NEW_LINE> exit(42) <NEW_LINE> <DEDENT> def debug(self, *objs): <NEW_LINE> <INDENT> if self.debugMode: <NEW_LINE> <INDENT> print("DEBUG: ", self.prefix, *objs, file=sys.stdout) <NEW_LINE> <DEDENT> <DEDENT> def debug_message(self, *objs): <NEW_LINE> <INDENT> self.debug(*objs) <NEW_LINE> <DEDENT> def verbose_message(self, *objs): <NEW_LINE> <INDENT> self.verbose(objs) <NEW_LINE> <DEDENT> def verbose(self, *objs): <NEW_LINE> <INDENT> if self.verboseMode: <NEW_LINE> <INDENT> print("VERBOSE: ", self.prefix, *objs, file=sys.stdout) | i dunno. stuff. | 62599059b5575c28eb7137ac |
class Scale(Widget): <NEW_LINE> <INDENT> def __init__(self, master=None, cnf={}, **kw): <NEW_LINE> <INDENT> Widget.__init__(self, master, 'scale', cnf, kw) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> value = self.tk.call(self._w, 'get') <NEW_LINE> try: <NEW_LINE> <INDENT> return self.tk.getint(value) <NEW_LINE> <DEDENT> except (ValueError, TclError): <NEW_LINE> <INDENT> return self.tk.getdouble(value) <NEW_LINE> <DEDENT> <DEDENT> def set(self, value): <NEW_LINE> <INDENT> self.tk.call(self._w, 'set', value) <NEW_LINE> <DEDENT> def coords(self, value=None): <NEW_LINE> <INDENT> return self._getints(self.tk.call(self._w, 'coords', value)) <NEW_LINE> <DEDENT> def identify(self, x, y): <NEW_LINE> <INDENT> return self.tk.call(self._w, 'identify', x, y) | Scale widget which can display a numerical scale. | 6259905945492302aabfda98 |
class RunningMeter(object): <NEW_LINE> <INDENT> def __init__(self, name, val=None, smooth=0.99): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._sm = smooth <NEW_LINE> self._val = val <NEW_LINE> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> val = (value if self._val is None else value*(1-self._sm) + self._val*self._sm) <NEW_LINE> if not math.isnan(val): <NEW_LINE> <INDENT> self._val = val <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'{self._name}: {self._val:.4f}' <NEW_LINE> <DEDENT> @property <NEW_LINE> def val(self): <NEW_LINE> <INDENT> if self._val is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self._val <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name | running meteor of a scalar value
(useful for monitoring training loss) | 625990594a966d76dd5f04b2 |
class PosteriorTrace(SampleStream): <NEW_LINE> <INDENT> def __init__(self, generator=None, plot_every=1000, window=False, block=False, file='trace.pdf'): <NEW_LINE> <INDENT> self.__dict__.update(locals()) <NEW_LINE> self.posteriors = [] <NEW_LINE> self.priors = [] <NEW_LINE> self.likelihoods = [] <NEW_LINE> SampleStream.__init__(self, generator) <NEW_LINE> <DEDENT> def process(self, h): <NEW_LINE> <INDENT> self.posteriors.append( getattr(h, 'posterior_score') ) <NEW_LINE> self.priors.append( getattr(h, 'prior') ) <NEW_LINE> self.likelihoods.append( getattr(h, 'likelihood') ) <NEW_LINE> n = len(self.posteriors) <NEW_LINE> if n>0 and self.plot_every is not None and n % self.plot_every == 0: <NEW_LINE> <INDENT> self.plot() <NEW_LINE> <DEDENT> return h <NEW_LINE> <DEDENT> def plot(self): <NEW_LINE> <INDENT> import matplotlib.pyplot as plt <NEW_LINE> plt.clf() <NEW_LINE> plt.figure(2,figsize=(10,4)) <NEW_LINE> plt.subplot(1,2,1) <NEW_LINE> plt.plot(self.posteriors) <NEW_LINE> plt.xlabel('# steps') <NEW_LINE> plt.ylabel('posterior score') <NEW_LINE> plt.subplot(1,2,2) <NEW_LINE> plt.plot(self.priors, self.likelihoods) <NEW_LINE> plt.xlabel('prior') <NEW_LINE> plt.ylabel('likelihood') <NEW_LINE> if self.file is not None: <NEW_LINE> <INDENT> plt.savefig(self.file) <NEW_LINE> <DEDENT> if self.window: <NEW_LINE> <INDENT> plt.show(block=self.block) <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, t, value, traceback): <NEW_LINE> <INDENT> self.plot() <NEW_LINE> return SampleStream.__exit__(self, t, value, traceback) | A class for plotting/showing a posterior summary trace plot. | 62599059d6c5a102081e36e1 |
@method_decorator(user_passes_test(is_center), name="dispatch") <NEW_LINE> class AnnounTfmCreateView(CreateView): <NEW_LINE> <INDENT> model = AnnouncementsTfm <NEW_LINE> template_name = "announcements/announcements_form.html" <NEW_LINE> form_class = CreateAnnouncementsTfmForm <NEW_LINE> success_url = reverse_lazy("announ_tfms_list") <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> announ = AnnouncementsTfm.objects.filter(centers_id=self.request.user.userinfos.centers.id) <NEW_LINE> announ = announ.exclude(status=AnnouncementsTfm.STATUS_CLOSE) <NEW_LINE> if AnnouncementsTfm.objects.exclude(status=AnnouncementsTfm.STATUS_CLOSE): <NEW_LINE> <INDENT> messages.warning(self.request, "Ya tiene una convocatoria abierta en curso", 'warning') <NEW_LINE> return HttpResponseRedirect(reverse("announ_tfms_list")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().get(request, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['back_url'] = "announ_tfms_list" <NEW_LINE> return context <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> self.object = form.save(commit=False) <NEW_LINE> self.object.status = AnnouncementsTfm.STATUS_OPEN <NEW_LINE> self.object.centers = self.request.user.userinfos.centers <NEW_LINE> self.object.save() <NEW_LINE> return HttpResponseRedirect(self.get_success_url()) | Controlador para la actualización de la información de una
convocatoria.
Atributos
model(models.Model): Modelo que se quiere crear.
template_name(str): template donde se va a renderizar la vista.
form_class(forms.Modelform): formulario para la creación del modelo.
success_url(str): url de redirección cuando todo a ido correctamente. | 6259905932920d7e50bc7607 |
class TXTJSONFormatter(TXTFormatter): <NEW_LINE> <INDENT> ext = 'json' <NEW_LINE> def write(self, content, stream): <NEW_LINE> <INDENT> collection = [{'id': doc.id, 'text': ''.join(self._iter_text(doc))} for doc in content.units('document')] <NEW_LINE> json.dump(collection, stream) | Formatter for multiple plain-text documents embedded in JSON. | 62599059baa26c4b54d50865 |
class Players(OOBTree): <NEW_LINE> <INDENT> implements(interfaces.IPlayers) <NEW_LINE> def get_player(self, player_id): <NEW_LINE> <INDENT> return self.get(player_id) <NEW_LINE> <DEDENT> def create_player(self, name, details): <NEW_LINE> <INDENT> player_id = str(uuid.uuid4()) <NEW_LINE> self[player_id] = Player(name, details) <NEW_LINE> return { 'id': player_id } | Players container, which contains individual player data objects
| 625990597d847024c075d99e |
class NUMERIC(_NumericType, sqltypes.NUMERIC): <NEW_LINE> <INDENT> __visit_name__ = 'NUMERIC' <NEW_LINE> def __init__(self, precision=None, scale=None, asdecimal=True, **kw): <NEW_LINE> <INDENT> super(NUMERIC, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) | MySQL NUMERIC type. | 6259905999cbb53fe68324a1 |
class Products(models.Model): <NEW_LINE> <INDENT> openfoodfats_id = models.BigIntegerField(null=True) <NEW_LINE> name_product = models.CharField(max_length=150, unique=True) <NEW_LINE> nutriscore_product = models.CharField(max_length=1) <NEW_LINE> store_product = models.CharField(max_length=100) <NEW_LINE> picture = models.URLField() <NEW_LINE> url_site = models.URLField() <NEW_LINE> fat = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> saturated_fat = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> sugars = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> salt = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> categorie = models.ForeignKey(Categories, on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name_product <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Produits" | Second class to load lot of product of each categories
on the site openfoodfacts | 62599059b7558d5895464a0c |
class Attention(nn.Module): <NEW_LINE> <INDENT> def __init__(self, d_model=300): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.q_linear = nn.Linear(d_model, d_model) <NEW_LINE> self.v_linear = nn.Linear(d_model, d_model) <NEW_LINE> self.k_linear = nn.Linear(d_model, d_model) <NEW_LINE> self.out = nn.Linear(d_model, d_model) <NEW_LINE> self.d_k = d_model <NEW_LINE> <DEDENT> def forward(self, q, k, v, mask): <NEW_LINE> <INDENT> k = self.k_linear(k) <NEW_LINE> q = self.q_linear(q) <NEW_LINE> v = self.v_linear(v) <NEW_LINE> weights = torch.matmul(q, k.transpose(1, 2)) / math.sqrt(self.d_k) <NEW_LINE> mask = mask.unsqueeze(1) <NEW_LINE> weights = weights.masked_fill(mask == 0, -1e9) <NEW_LINE> normlized_weights = F.softmax(weights, dim=-1) <NEW_LINE> output = torch.matmul(normlized_weights, v) <NEW_LINE> output = self.out(output) <NEW_LINE> return output, normlized_weights | Attention1つ分 | 6259905907d97122c4218266 |
class WindowsFileLock(BaseFileLock): <NEW_LINE> <INDENT> def _acquire(self): <NEW_LINE> <INDENT> open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC <NEW_LINE> try: <NEW_LINE> <INDENT> fd = os.open(self._lock_file, open_mode) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) <NEW_LINE> <DEDENT> except (IOError, OSError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.close(fd) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> fd.close() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._lock_file_fd = fd <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def _release(self): <NEW_LINE> <INDENT> fd = self._lock_file_fd <NEW_LINE> self._lock_file_fd = None <NEW_LINE> msvcrt.locking(fd, msvcrt.LK_UNLCK, 1) <NEW_LINE> try: <NEW_LINE> <INDENT> os.close(fd) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> fd.close() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.remove(self._lock_file) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return None | Uses the :func:`msvcrt.locking` function to hard lock the lock file on
windows systems. | 6259905923e79379d538dabe |
class ModelOnProbation(models.Model): <NEW_LINE> <INDENT> last_encountered = models.DateTimeField(auto_now_add=True) <NEW_LINE> last_encountered_admin_field_entry = ('Scraping information', {'fields': ['last_encountered'], 'classes': ['collapse']}) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def was_scraped(self): <NEW_LINE> <INDENT> self.last_encountered = datetime.now() <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if "was_scraped" in kwargs: <NEW_LINE> <INDENT> self.was_scraped() <NEW_LINE> del kwargs["was_scraped"] <NEW_LINE> <DEDENT> super(ModelOnProbation, self).save(*args, **kwargs) | An abstract model that includes a field representing the last time it
was encountered during scraping.
If this item is not encountered during a scraping pass, it should be
deleted (since it no longer exists). | 62599059fff4ab517ebcede6 |
class RootDirectory(Directory): <NEW_LINE> <INDENT> default_icon = 'server.png' <NEW_LINE> icon_map = [] <NEW_LINE> _rootdirs = {} <NEW_LINE> def __new__(cls, path, autoindex=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return RootDirectory._rootdirs[(path, autoindex)] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> return object.__new__(cls) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, path, autoindex=None): <NEW_LINE> <INDENT> super(RootDirectory, self).__init__('.', autoindex=autoindex) <NEW_LINE> self.abspath = os.path.abspath(path) <NEW_LINE> self.rootdir = self <NEW_LINE> self._descendants = {} <NEW_LINE> RootDirectory._register_rootdir(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _register_rootdir(cls, rootdir): <NEW_LINE> <INDENT> cls._rootdirs[(rootdir.abspath, rootdir.autoindex)] = rootdir <NEW_LINE> <DEDENT> def _register_descendant(self, entry): <NEW_LINE> <INDENT> self._descendants[(entry.path, entry.autoindex)] = entry | This class wraps a root directory. | 6259905945492302aabfda9a |
class AdviceBar(base.BaseGadget): <NEW_LINE> <INDENT> short_description = 'Advice Bar' <NEW_LINE> description = 'Allows learners to receive advice from predefined tips.' <NEW_LINE> height_px = 300 <NEW_LINE> width_px = 100 <NEW_LINE> panel = 'bottom' <NEW_LINE> _dependency_ids = [] <NEW_LINE> _customization_arg_specs = [ { 'name': 'adviceObjects', 'description': 'Title and content for each tip.', 'schema': { 'type': 'list', 'validators': [{ 'id': 'has_length_at_least', 'min_value': 1, }, { 'id': 'has_length_at_most', 'max_value': 3, }], 'items': { 'type': 'dict', 'properties': [{ 'name': 'adviceTitle', 'description': 'Tip title (visible on advice bar)', 'schema': { 'type': 'unicode', 'validators': [{ 'id': 'is_nonempty', }] }, }, { 'name': 'adviceHtml', 'description': 'Advice content (visible upon click)', 'schema': { 'type': 'html', }, }] } }, 'default_value': [{ 'adviceTitle': 'Tip title', 'adviceHtml': '' }] } ] <NEW_LINE> _MAX_TIP_COUNT = 3 <NEW_LINE> _MIN_TIP_COUNT = 1 <NEW_LINE> def validate(self, customization_args): <NEW_LINE> <INDENT> tip_count = len(customization_args['adviceObjects']['value']) <NEW_LINE> if tip_count > self._MAX_TIP_COUNT: <NEW_LINE> <INDENT> raise utils.ValidationError( 'AdviceBars are limited to %d tips, found %d.' % ( self._MAX_TIP_COUNT, tip_count)) <NEW_LINE> <DEDENT> elif tip_count < self._MIN_TIP_COUNT: <NEW_LINE> <INDENT> raise utils.ValidationError( 'AdviceBar requires at least %d tips, found %s.' % ( self._MIN_TIP_COUNT, tip_count)) | Base gadget for providing an AdviceBar. | 625990597047854f46340982 |
class NamedObject: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.name) | Provides functionality for an object to be described by name. If this is
listed as a parent class, an object will use name in equality comparisons,
hash functions, and string outputs. | 625990597cff6e4e811b7006 |
class LayerNorm(nn.Module): <NEW_LINE> <INDENT> def __init__(self, d): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.gamma = nn.Parameter(torch.ones(d), requires_grad=True) <NEW_LINE> self.beta = nn.Parameter(torch.zeros(d), requires_grad=True) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> mean = x.mean(-1, keepdim=True) <NEW_LINE> std = x.std(-1, keepdim=True) <NEW_LINE> return self.gamma * (x - mean) / (std + 1e-6) + self.beta | Applies layer normalization to last dimension
Args:
d: dimension of hidden units | 62599059627d3e7fe0e0844f |
class TestBootFromVolumeIsolatedHostsFilter( test.TestCase, integrated_helpers.InstanceHelperMixin): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestBootFromVolumeIsolatedHostsFilter, self).setUp() <NEW_LINE> self.useFixture(nova_fixtures.RealPolicyFixture()) <NEW_LINE> self.useFixture(nova_fixtures.NeutronFixture(self)) <NEW_LINE> self.glance = self.useFixture(nova_fixtures.GlanceFixture(self)) <NEW_LINE> self.useFixture(nova_fixtures.CinderFixture(self)) <NEW_LINE> self.useFixture(func_fixtures.PlacementFixture()) <NEW_LINE> api_fixture = self.useFixture(nova_fixtures.OSAPIFixture( api_version='v2.1')) <NEW_LINE> self.api = api_fixture.admin_api <NEW_LINE> self.start_service('conductor') <NEW_LINE> enabled_filters = CONF.filter_scheduler.enabled_filters <NEW_LINE> enabled_filters.append('IsolatedHostsFilter') <NEW_LINE> self.flags( enabled_filters=enabled_filters, isolated_images=[self.glance.auto_disk_config_enabled_image['id']], isolated_hosts=['host1'], restrict_isolated_hosts_to_isolated_images=True, group='filter_scheduler') <NEW_LINE> self.start_service('scheduler') <NEW_LINE> for host in ('host1', 'host2'): <NEW_LINE> <INDENT> self.start_service('compute', host=host) <NEW_LINE> <DEDENT> <DEDENT> def test_boot_from_volume_with_isolated_image(self): <NEW_LINE> <INDENT> image_id = nova_fixtures.CinderFixture.IMAGE_BACKED_VOL <NEW_LINE> server_req_body = { 'server': { 'flavorRef': '1', 'name': 'test_boot_from_volume_with_isolated_image', 'networks': 'none', 'block_device_mapping_v2': [{ 'boot_index': 0, 'uuid': image_id, 'source_type': 'volume', 'destination_type': 'volume' }] } } <NEW_LINE> with utils.temporary_mutation(self.api, microversion='2.37'): <NEW_LINE> <INDENT> server = self.api.post_server(server_req_body) <NEW_LINE> <DEDENT> server = self._wait_for_state_change(server, 'ACTIVE') <NEW_LINE> self.assertEqual('host2', server['OS-EXT-SRV-ATTR:host']) | Regression test for bug #1746483
The IsolatedHostsFilter checks for images restricted to certain hosts via
config options. When creating a server from a root volume, the image is
in the volume (and it's related metadata from Cinder). When creating a
volume-backed server, the imageRef is not required.
The regression is that the RequestSpec.image.id field is not set and the
IsolatedHostsFilter blows up trying to load the image id. | 62599059097d151d1a2c262f |
class Pylearn2DatasetNoise(Dataset): <NEW_LINE> <INDENT> def __init__(self, dataset, batch_size, noise_dim, which_sources=[0,1], **kwargs): <NEW_LINE> <INDENT> self.pylearn2_dataset = dataset <NEW_LINE> self.sources = self.pylearn2_dataset.get_data_specs()[1] <NEW_LINE> self.sources = tuple([self.sources[i] for i in which_sources]) <NEW_LINE> self.sources = self.sources + tuple('eps') <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.noise_dim = noise_dim <NEW_LINE> self.which_sources = which_sources <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> num_examples = self.pylearn2_dataset.get_num_examples() <NEW_LINE> iterator = self.pylearn2_dataset.iterator( self.batch_size, num_examples/self.batch_size, mode='sequential', data_specs=self.pylearn2_dataset.get_data_specs(), return_tuple=True) <NEW_LINE> return iterator <NEW_LINE> <DEDENT> def get_data(self,state=None,request=None): <NEW_LINE> <INDENT> batch = next(state) <NEW_LINE> timelen = batch[0].shape[0] <NEW_LINE> batch = tuple([batch[i] for i in self.which_sources]) <NEW_LINE> eps = np.random.normal(0,1,size=(timelen, self.batch_size, self.noise_dim)).astype(floatX) <NEW_LINE> batch = batch + tuple(eps) <NEW_LINE> return (batch,) | Pylearn2DatasetNoise is the same as `Pylearn2Dataset` with some an
extra batch of random nubmer.
Parameters
----------
dataset: `pylearn2.dataset` object
Note that this is expecting the actual the object will be
initialized inside
batch_size: int
Batch size to be used by the `pylearn2.dataset` iterator.
noise_dim: int
Dimmension of the noise batch | 625990591b99ca4002290019 |
class MatrixFactorizationConfig(object): <NEW_LINE> <INDENT> learning_rate = 2e-2 <NEW_LINE> epochs = 1000 <NEW_LINE> max_users_num = 943 <NEW_LINE> max_items_num = 1682 <NEW_LINE> decay_rate = 1.0 <NEW_LINE> data_file = os.path.join(BasicConfig.DATA_ROOT, 'mf/u.data') <NEW_LINE> checkpoints_dir = os.path.join(BasicConfig.CHECKPOINTS_ROOT, 'mf') <NEW_LINE> model_dir = os.path.join(checkpoints_dir, 'mf') | configuration for matrix factorization model | 62599059507cdc57c63a6368 |
class AdapterInterface(object): <NEW_LINE> <INDENT> def __init__(self, container): <NEW_LINE> <INDENT> self.settings = get_settings(container) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return '{}.{}'.format(self.__module__, type(self).__name__) <NEW_LINE> <DEDENT> def describe_adapter(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def configurator_factory(self, container): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def owssecurity_factory(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def owsregistry_factory(self, request): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def owsproxy_config(self, container): <NEW_LINE> <INDENT> raise NotImplementedError | Common interface allowing functionality overriding using an adapter implementation. | 62599059a17c0f6771d5d683 |
class CompanyName(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, db.Sequence('company_name_id_seq'), primary_key=True) <NEW_LINE> company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) <NEW_LINE> company = db.relationship('Company', backref=db.backref('names', lazy=True)) <NEW_LINE> language = db.Column(db.String(80)) <NEW_LINE> name = db.Column(db.String(120), nullable=True) <NEW_LINE> isDefault = db.Column(db.Boolean, default=False) | 회사 이름(다국어) | 6259905924f1403a926863b0 |
class SetDescriptions(Resource): <NEW_LINE> <INDENT> def put(self): <NEW_LINE> <INDENT> roomId = request.form['roomId'] <NEW_LINE> description = request.form['description'] <NEW_LINE> try: <NEW_LINE> <INDENT> r = rocket.groups_set_description(room_id=roomId,description=description) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> r = None <NEW_LINE> <DEDENT> if not r: <NEW_LINE> <INDENT> return {'success':False,'message':'设置失败。'},401 <NEW_LINE> <DEDENT> return { 'success':True, 'description':r.json()['description'], },200 | 设置群组描述
https://rocket.chat/docs/developer-guides/rest-api/groups/setdescription/ | 625990594428ac0f6e659aff |
class TwentyTwenty(AbstractCMS): <NEW_LINE> <INDENT> left_legend = models.CharField(_(u'Left Legend'), max_length=200, blank=True) <NEW_LINE> left_image = models.ImageField(_(u'Left Image'), upload_to='product_onepage') <NEW_LINE> right_legend = models.CharField(_(u'Right Legend'), max_length=200, blank=True) <NEW_LINE> right_image = models.ImageField(_(u'Right Image'), upload_to='product_onepage') <NEW_LINE> description = HTMLField(_(u'Text')) <NEW_LINE> template = models.CharField(_('template'), choices=settings.ONEPAGE_TWENTYTWENTY_TEMPLATE_CHOICES, default=ONEPAGE_TWENTYTWENTY_DEFAULT_TEMPLATE, max_length=100, blank=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return 'TwentyTwenty %s' % (self.title) | TwentyTwenty content to use the TwentyTwenty library that merge two images
in divided slider to visually compare them | 62599059379a373c97d9a5e8 |
class Database(object): <NEW_LINE> <INDENT> def __init__(self, url, **kwargs): <NEW_LINE> <INDENT> self._engine = create_engine(url, pool_pre_ping=True, pool_recycle=3600, **kwargs) <NEW_LINE> self._Session = sessionmaker(bind=self._engine, expire_on_commit=False) <NEW_LINE> <DEDENT> def get_engine(self): <NEW_LINE> <INDENT> return self._engine <NEW_LINE> <DEDENT> def create_schema(self): <NEW_LINE> <INDENT> BaseModel.metadata.create_all(self._engine) <NEW_LINE> <DEDENT> def create_metrics_schema(self): <NEW_LINE> <INDENT> BaseMetricsModel.metadata.create_all(self._engine) <NEW_LINE> <DEDENT> def make_session(self) -> Session: <NEW_LINE> <INDENT> return self._Session() <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def session(self) -> Session: <NEW_LINE> <INDENT> sess = self.make_session() <NEW_LINE> try: <NEW_LINE> <INDENT> yield sess <NEW_LINE> sess.commit() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> sess.rollback() <NEW_LINE> raise <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> sess.close() <NEW_LINE> <DEDENT> <DEDENT> @backoff.on_exception(backoff.expo, DBAPIError, max_tries=RETRY_CONNECTION_LIMIT, giveup=lambda err: not getattr(err, 'connection_invalidated', False)) <NEW_LINE> def autoretry(self, func): <NEW_LINE> <INDENT> with self.session() as session: <NEW_LINE> <INDENT> return func(session) | Maintains state for accessing the database. | 625990593eb6a72ae038bc23 |
class SecurityPolicy(ParanoidSecurityPolicy): <NEW_LINE> <INDENT> classProvides(ISecurityPolicy) <NEW_LINE> implements(IInteraction) <NEW_LINE> def checkPermission(self, permission, object): <NEW_LINE> <INDENT> return checkPermission(permission, object) | Security policy that bridges between zope.security security mechanisms
and Zope 2's security policy.
Don't let the name of the base class fool you... This really just
delegates to Zope 2's security manager. | 625990596e29344779b01c10 |
class AssertionCredentials(OAuth2Credentials): <NEW_LINE> <INDENT> def __init__(self, assertion_type, user_agent, token_uri='https://accounts.google.com/o/oauth2/token', **kwargs): <NEW_LINE> <INDENT> super(AssertionCredentials, self).__init__( None, None, None, None, None, token_uri, user_agent) <NEW_LINE> self.assertion_type = assertion_type <NEW_LINE> <DEDENT> def _generate_refresh_request_body(self): <NEW_LINE> <INDENT> assertion = self._generate_assertion() <NEW_LINE> body = urllib.urlencode({ 'assertion_type': self.assertion_type, 'assertion': assertion, 'grant_type': "assertion", }) <NEW_LINE> return body <NEW_LINE> <DEDENT> def _generate_assertion(self): <NEW_LINE> <INDENT> _abstract() | Abstract Credentials object used for OAuth 2.0 assertion grants
This credential does not require a flow to instantiate because it represents
a two legged flow, and therefore has all of the required information to
generate and refresh its own access tokens. It must be subclassed to
generate the appropriate assertion string.
AssertionCredentials objects may be safely pickled and unpickled. | 6259905956ac1b37e63037c9 |
class ApiTaskStatus(Enum): <NEW_LINE> <INDENT> STARTING = 'starting' <NEW_LINE> PAUSING = 'pausing' <NEW_LINE> STOPPING = 'stopping' <NEW_LINE> DELETING = 'deleting' <NEW_LINE> DELETED = 'deleted' <NEW_LINE> COMPLETED = 'completed' <NEW_LINE> ERROR = 'error' | Enumeration with all possible(supported) api statuses
| 62599059e64d504609df9eb1 |
class DynamicFormatMiddleware: <NEW_LINE> <INDENT> def _flatten_dict(self, obj, prefix=''): <NEW_LINE> <INDENT> encoded_dict = QueryDict('').copy() <NEW_LINE> if hasattr(obj, 'items'): <NEW_LINE> <INDENT> for key, value in obj.items(): <NEW_LINE> <INDENT> item_key = '%(prefix)s%(key)s' % { 'prefix': prefix, 'key': key } <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> for i, item in enumerate(value): <NEW_LINE> <INDENT> if isinstance(item, dict): <NEW_LINE> <INDENT> item_prefix = '%(key)s-%(index)d-' % { 'key': key, 'index': i } <NEW_LINE> encoded_dict.update(self._flatten_dict(item, prefix=item_prefix)) <NEW_LINE> id_value = item.get('id', None) <NEW_LINE> if id_value: <NEW_LINE> <INDENT> encoded_dict.update({ key: id_value }) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> encoded_dict.update({ key: item }) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> encoded_dict[item_key] = value.get('id', value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> encoded_dict[item_key] = unicode(value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return encoded_dict <NEW_LINE> <DEDENT> def process_request(self, request): <NEW_LINE> <INDENT> content_type = request.META.get('CONTENT_TYPE', '') <NEW_LINE> if content_type != '' and content_type in ('application/json'): <NEW_LINE> <INDENT> content_length = 0 <NEW_LINE> if request.META.get('CONTENT_LENGTH', '') != '': <NEW_LINE> <INDENT> content_length = int(request.META.get('CONTENT_LENGTH', 0)) <NEW_LINE> <DEDENT> if content_length > 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> decoded_dict = simplejson.loads(request.raw_post_data) <NEW_LINE> request.POST = request.POST.copy() <NEW_LINE> request.POST = self._flatten_dict(decoded_dict) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return HttpResponse('Invalid JSON', status=400) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> if isinstance(response, DynamicResponse): <NEW_LINE> <INDENT> return response.render_response(request, response) <NEW_LINE> <DEDENT> return response | Provides support for dynamic content negotiation, both in request and reponse. | 625990598e7ae83300eea651 |
class Strategy(object, metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> def master_setup(self): <NEW_LINE> <INDENT> self.scheduled_functions = [] <NEW_LINE> self.setup() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_signals(self, event): <NEW_LINE> <INDENT> raise NotImplementedError("Should implement get_signals()") <NEW_LINE> <DEDENT> def log_vars(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> raise NotImplementedError("Should implement setup()") <NEW_LINE> <DEDENT> def set_params(self, bars, events, portfolio, engine): <NEW_LINE> <INDENT> self.bars = bars <NEW_LINE> self.symbols = bars.symbols <NEW_LINE> self.events = events <NEW_LINE> self.portfolio = portfolio <NEW_LINE> self.engine = engine <NEW_LINE> <DEDENT> def schedule_function(self, function, sch_rule): <NEW_LINE> <INDENT> self.scheduled_functions.append(ScheduledFunction(function, sch_rule)) <NEW_LINE> <DEDENT> def execute_scheduled_functions(self, event): <NEW_LINE> <INDENT> if event.type != 'MARKET': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for func in self.scheduled_functions: <NEW_LINE> <INDENT> if func.on_date(self.bars.current_date(), self.bars.date_idx, self.bars.starts) == True: <NEW_LINE> <INDENT> func.function() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __place_order(self, symbol, amount, amount_type, target): <NEW_LINE> <INDENT> signal = SignalEvent(symbol, amount=amount, amount_type=amount_type, target=target) <NEW_LINE> self.events.put(signal) <NEW_LINE> <DEDENT> def order(self, symbol, shares): <NEW_LINE> <INDENT> self.__place_order(symbol, shares, 'SHARES', False) <NEW_LINE> <DEDENT> def order_target(self, symbol, shares): <NEW_LINE> <INDENT> self.__place_order(symbol, shares, 'SHARES', True) <NEW_LINE> <DEDENT> def order_value(self, symbol, value): <NEW_LINE> <INDENT> self.__place_order(symbol, value, 'VALUE', False) <NEW_LINE> <DEDENT> def order_target_value(self, symbol, value): <NEW_LINE> <INDENT> self.__place_order(symbol, value, 'VALUE', True) <NEW_LINE> <DEDENT> def order_percent(self, symbol, percentage): <NEW_LINE> <INDENT> self.__place_order(symbol, percentage, 'PERCENTAGE', False) <NEW_LINE> <DEDENT> def order_target_percent(self,symbol,percentage): <NEW_LINE> <INDENT> self.__place_order(symbol, percentage, 'PERCENTAGE', True) | Class defining a trading strategy.
...
Attributes
----------
scheduled_functions(list)
A list of scheduled functions for the strategy
Methods
-------
get_signals | 62599059a8ecb033258727db |
class EventLogModel(BaseModel): <NEW_LINE> <INDENT> start_time = pw.DateTimeField(default=datetime.now) <NEW_LINE> end_time = pw.DateTimeField(default=datetime.now) <NEW_LINE> category = pw.CharField() <NEW_LINE> subcommand = pw.CharField(null=True) <NEW_LINE> message = pw.CharField(null=True) <NEW_LINE> returncode = pw.IntegerField(default=-1) <NEW_LINE> params = JSONField(null=True) <NEW_LINE> profile = pw.CharField(null=True) <NEW_LINE> repo_url = pw.CharField(null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> database = DB | Keep a log of background jobs. | 6259905955399d3f05627ae4 |
class ProfileHandlerAPI(SessionHandler): <NEW_LINE> <INDENT> def get(self, profile_id): <NEW_LINE> <INDENT> viewer = self.user_model <NEW_LINE> q = User.query(User.username == profile_id) <NEW_LINE> user = q.get() <NEW_LINE> user_json = {} <NEW_LINE> if user != None: <NEW_LINE> <INDENT> user_json['type'] = "user" <NEW_LINE> user_json['account_type'] = user.account_type <NEW_LINE> user_json['username'] = user.username <NEW_LINE> user_json['firstname'] = user.first_name <NEW_LINE> user_json['lastname'] = user.last_name <NEW_LINE> user_json['email'] = user.email_address <NEW_LINE> user_json['profession'] = user.profession <NEW_LINE> user_json['employer'] = user.employer <NEW_LINE> user_json['subscriptions'] = user.subscriptions <NEW_LINE> user_json['friend_count'] = user.friend_count <NEW_LINE> user_json['picture'] = "/img?user_id={}".format(user.key.urlsafe()) <NEW_LINE> <DEDENT> self.response.headers['Content-Type'] = 'application/json' <NEW_LINE> self.response.out.write(json.dumps(user_json)) | handler to display a profile page | 625990597047854f46340984 |
class Bonk(TBase): <NEW_LINE> <INDENT> def __init__(self, type=None, message=None,): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- type
- message | 6259905991f36d47f2231972 |
class TestMatch(TestCase): <NEW_LINE> <INDENT> def test_show_hide(self): <NEW_LINE> <INDENT> vim = Mock() <NEW_LINE> arg = _MatchArg(Mark(1, 3), Mark(5, 9), 'sent') <NEW_LINE> match = _Match(arg, vim) <NEW_LINE> vim.add_match.side_effect = [12] <NEW_LINE> match.show(3) <NEW_LINE> vim.add_match.assert_called_once_with( Mark(1, 3), Mark(5, 9), 'CoqStcSent') <NEW_LINE> match.hide(3) <NEW_LINE> vim.del_match.assert_called_once_with(12) <NEW_LINE> <DEDENT> def test_redraw_shown(self): <NEW_LINE> <INDENT> vim = Mock() <NEW_LINE> arg = _MatchArg(Mark(1, 3), Mark(5, 9), 'sent') <NEW_LINE> match = _Match(arg, vim) <NEW_LINE> vim.add_match.side_effect = [12, 13] <NEW_LINE> match.show(3) <NEW_LINE> match.redraw(3) <NEW_LINE> vim.del_match.assert_called_once_with(12) <NEW_LINE> vim.add_match.assert_called_with(Mark(1, 3), Mark(5, 9), 'CoqStcSent') <NEW_LINE> <DEDENT> def test_redraw_not_shown(self): <NEW_LINE> <INDENT> vim = Mock() <NEW_LINE> arg = _MatchArg(Mark(1, 3), Mark(5, 9), 'sent') <NEW_LINE> match = _Match(arg, vim) <NEW_LINE> vim.add_match.side_effect = [12, 13] <NEW_LINE> match.redraw(3) <NEW_LINE> vim.del_match.assert_not_called() <NEW_LINE> vim.add_match.assert_not_called() <NEW_LINE> match.show(3) <NEW_LINE> vim.add_match.assert_called_with(Mark(1, 3), Mark(5, 9), 'CoqStcSent') | Test for call `_Match`. | 625990594a966d76dd5f04b6 |
class undoiter: <NEW_LINE> <INDENT> def __init__(self, base_iter): <NEW_LINE> <INDENT> self._itr = base_iter <NEW_LINE> self._undo_stack=[] <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self._undo_stack: <NEW_LINE> <INDENT> return self._undo_stack.pop() <NEW_LINE> <DEDENT> return self._itr.next() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def undo(self,val): <NEW_LINE> <INDENT> self._undo_stack.append(val) | Undoable iterator class | 6259905999cbb53fe68324a4 |
class CTD_ANON_24 (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = pyxb.binding.datatypes.decimal <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/Users/ethanwaldie/thesis/malmo/Schemas/MissionHandlers.xsd', 490, 10) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __variance = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'variance'), 'variance', '__httpProjectMalmo_microsoft_com_CTD_ANON_24_variance', pyxb.binding.datatypes.decimal, unicode_default='0') <NEW_LINE> __variance._DeclarationLocation = pyxb.utils.utility.Location('/Users/ethanwaldie/thesis/malmo/Schemas/MissionHandlers.xsd', 493, 16) <NEW_LINE> __variance._UseLocation = pyxb.utils.utility.Location('/Users/ethanwaldie/thesis/malmo/Schemas/MissionHandlers.xsd', 493, 16) <NEW_LINE> variance = property(__variance.value, __variance.set, None, None) <NEW_LINE> _ElementMap.update({ }) <NEW_LINE> _AttributeMap.update({ __variance.name() : __variance }) | Complex type [anonymous] with content type SIMPLE | 62599059009cb60464d02af9 |
class Credential: <NEW_LINE> <INDENT> credential_list=[] <NEW_LINE> def __init__(self,account_name,password): <NEW_LINE> <INDENT> self.account_name = account_name <NEW_LINE> self.password = password <NEW_LINE> <DEDENT> credential_list=[] <NEW_LINE> def save_credential(self): <NEW_LINE> <INDENT> Credential.credential_list.append(self) <NEW_LINE> <DEDENT> def delete_credential(self): <NEW_LINE> <INDENT> Credential.credential_list.remove(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_name(cls,name): <NEW_LINE> <INDENT> for credential in cls.credential_list: <NEW_LINE> <INDENT> if credential.account_name == name: <NEW_LINE> <INDENT> return credential <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def display_credentials(cls): <NEW_LINE> <INDENT> return cls.credential_list <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def credential_exist(cls,name): <NEW_LINE> <INDENT> for credential in cls.credential_list: <NEW_LINE> <INDENT> if credential.account_name == name: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | class that generates new instance for credentials | 62599059d99f1b3c44d06c66 |
class PlaceTypeForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = PlaceType <NEW_LINE> exclude = ('slug', ) <NEW_LINE> widgets = { 'label':TextInput(attrs={'class':'input-medium search-query'}) } | PlaceType model form | 62599059a17c0f6771d5d684 |
class LowLevelClient: <NEW_LINE> <INDENT> def __init__(self, access_token, service_url): <NEW_LINE> <INDENT> self.accessToken = access_token <NEW_LINE> if service_url is None: <NEW_LINE> <INDENT> self.serviceUrl = TAGLIATELLE_URL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.serviceUrl = service_url <NEW_LINE> <DEDENT> <DEDENT> def get_tags(self, key="", resource_uri=""): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> query_params = {} <NEW_LINE> if resource_uri is not None: <NEW_LINE> <INDENT> query_params['resourceUri'] = resource_uri <NEW_LINE> <DEDENT> if key is not None: <NEW_LINE> <INDENT> query_params['key'] = key <NEW_LINE> <DEDENT> req = Request(self.serviceUrl + "/v0/tags?" + urlencode(query_params)) <NEW_LINE> req.add_header('Authorization', 'Bearer ' + self.accessToken) <NEW_LINE> response = urlopen(req) <NEW_LINE> data = json.load(response) <NEW_LINE> results = [] <NEW_LINE> if "results" in data: <NEW_LINE> <INDENT> for tag in data["results"]: <NEW_LINE> <INDENT> results.append(TagResponse(tag.get("key"), tag.get("value"), tag.get("resourceUri"), tag.get("id"), tag.get("createdAt"), tag.get("createdBy"), tag.get("modifiedAt"), tag.get("modifiedBy"), tag.get("_links"))) <NEW_LINE> <DEDENT> return TagBulkResponse(data.get('count'), data.get('total'), data.get('offset'), results) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return TagBulkResponse(0, 0, 0, []) <NEW_LINE> <DEDENT> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> print(e.read()) <NEW_LINE> <DEDENT> <DEDENT> def post_tag(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> req = Request(self.serviceUrl + "/v0/tags") <NEW_LINE> req.add_header("Authorization", "Bearer " + self.accessToken) <NEW_LINE> req.add_header("Content-Type", "application/json") <NEW_LINE> req.get_method = lambda: 'POST' <NEW_LINE> req.data = json.dumps(request) <NEW_LINE> urlopen(req) <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> print(e.read()) <NEW_LINE> <DEDENT> <DEDENT> def put_tag(self, id, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> req = Request(self.serviceUrl + "/v0/tags/" + id) <NEW_LINE> req.add_header("Authorization", "Bearer " + self.accessToken) <NEW_LINE> req.add_header("Content-Type", "application/json") <NEW_LINE> req.get_method = lambda: 'PUT' <NEW_LINE> req.data = json.dumps(request) <NEW_LINE> urlopen(req) <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> print(e.read()) <NEW_LINE> <DEDENT> <DEDENT> def delete_tag(self, id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> req = Request(self.serviceUrl + "/v0/tags/" + id) <NEW_LINE> req.add_header("Authorization", "Bearer " + self.accessToken) <NEW_LINE> req.get_method = lambda: 'DELETE' <NEW_LINE> urlopen(req) <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> print(e.read()) | This class wraps all the low level operations with Tagliatelle API | 6259905945492302aabfda9d |
class DelayedEvent(_messages.Message): <NEW_LINE> <INDENT> cause = _messages.StringField(1) <NEW_LINE> metrics = _messages.StringField(2, repeated=True) | An event generated whenever a resource limitation or transient error
delays execution of a pipeline that was otherwise ready to run.
Fields:
cause: A textual description of the cause of the delay. The string can
change without notice because it is often generated by another service
(such as Compute Engine).
metrics: If the delay was caused by a resource shortage, this field lists
the Compute Engine metrics that are preventing this operation from
running (for example, `CPUS` or `INSTANCES`). If the particular metric
is not known, a single `UNKNOWN` metric will be present. | 625990590a50d4780f7068a1 |
class Limits(object): <NEW_LINE> <INDENT> def __init__(self, max_text_features=80, max_css_features=120, max_links=2500): <NEW_LINE> <INDENT> self.max_text_features = max_text_features <NEW_LINE> self.max_css_features = max_css_features <NEW_LINE> self.max_links = max_links | Limits to avoid exploding memory consumption in some cases | 6259905921a7993f00c67533 |
class ExecutionError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.message = self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Execution Error: {0}".format(self.msg) | Exception for errors in the runtime module.
Args:
msg (str): A message describing the error. | 62599059fff4ab517ebcedea |
class Resize(object): <NEW_LINE> <INDENT> def __init__(self, size=256): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> <DEDENT> def __call__(self, sample): <NEW_LINE> <INDENT> image_x, image_ir, image_depth, binary_mask, spoofing_label = sample['image_x'], sample['image_ir'], sample['image_depth'], sample['binary_mask'],sample['spoofing_label'] <NEW_LINE> image_x = cv2.resize(image_x, (self.size, self.size)) <NEW_LINE> image_ir = cv2.resize(image_ir, (self.size, self.size)) <NEW_LINE> image_depth = cv2.resize(image_depth, (self.size, self.size)) <NEW_LINE> return {'image_x': image_x,'image_ir': image_ir,'image_depth': image_depth, 'binary_mask': binary_mask, 'spoofing_label': spoofing_label} | Convert ndarrays in sample to Tensors.
process only one batch every time | 62599059435de62698e9d3ca |
class IapProjectsIapTunnelZonesInstancesGetIamPolicyRequest(_messages.Message): <NEW_LINE> <INDENT> getIamPolicyRequest = _messages.MessageField('GetIamPolicyRequest', 1) <NEW_LINE> resource = _messages.StringField(2, required=True) | A IapProjectsIapTunnelZonesInstancesGetIamPolicyRequest object.
Fields:
getIamPolicyRequest: A GetIamPolicyRequest resource to be passed as the
request body.
resource: REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this
field. | 625990594e4d5625663739cd |
class BuildFeed(Command): <NEW_LINE> <INDENT> def get_options(self): <NEW_LINE> <INDENT> return [ Option('-v', '--validate', dest='validate', action="store_true", default=False), Option('-e', '--extract', dest='extract', action="store_true", default=False), Option('-u', '--upload', dest='upload', action="store_true", default=False) ] <NEW_LINE> <DEDENT> def run(self, validate=False, extract=False, upload=False): <NEW_LINE> <INDENT> logger.info("build feed task started") <NEW_LINE> from app.services.feed import Feed <NEW_LINE> TMP_FOLDER = current_app.config['TMP_FOLDER'] <NEW_LINE> if not os.path.isdir(TMP_FOLDER): <NEW_LINE> <INDENT> os.makedirs(TMP_FOLDER) <NEW_LINE> <DEDENT> feed = Feed(db=db.session) <NEW_LINE> feedFile = feed.build() <NEW_LINE> feed.saveTo(TMP_FOLDER) <NEW_LINE> if extract: <NEW_LINE> <INDENT> extract_zip(TMP_FOLDER + feed.filename, TMP_FOLDER + 'extracted/') <NEW_LINE> <DEDENT> if validate: <NEW_LINE> <INDENT> feed.validate() <NEW_LINE> <DEDENT> if upload: <NEW_LINE> <INDENT> s3service = S3(current_app.config['AWS_S3_BUCKET_NAME']) <NEW_LINE> s3service.config(current_app.config) <NEW_LINE> s3service.uploadFileObj(feed.filename, feedFile) <NEW_LINE> <DEDENT> return 'success' | Builds a feed | 62599059498bea3a75a590df |
class A: <NEW_LINE> <INDENT> def __setattr__(self, *args): <NEW_LINE> <INDENT> pass | Not an attrs class on purpose to prevent accidental resets that
would render the asserts meaningless. | 62599059b5575c28eb7137af |
class UserInfoResponse(Response): <NEW_LINE> <INDENT> def __init__(self, user, *args, **kwargs): <NEW_LINE> <INDENT> super(UserInfoResponse, self).__init__(*args, **kwargs) <NEW_LINE> self.user = user | Inherits from :class:`.Response`, adds :attr:`~UserInfoResponse.user`
attribute. | 6259905955399d3f05627ae6 |
class DiscoveryAwsKmsMasterKeyProvider(BaseKMSMasterKeyProvider): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(DiscoveryAwsKmsMasterKeyProvider, self).__init__(**kwargs) <NEW_LINE> self.vend_masterkey_on_decrypt = True <NEW_LINE> <DEDENT> def validate_config(self): <NEW_LINE> <INDENT> if self.config.key_ids: <NEW_LINE> <INDENT> raise ConfigMismatchError( "To explicitly identify which keys should be used, use a " "StrictAwsKmsMasterKeyProvider." ) <NEW_LINE> <DEDENT> if self.config.discovery_filter: <NEW_LINE> <INDENT> if not self.config.discovery_filter.account_ids or not self.config.discovery_filter.partition: <NEW_LINE> <INDENT> raise ConfigMismatchError( "When specifying a discovery filter you must include both account ids and " "partition" ) <NEW_LINE> <DEDENT> for account in self.config.discovery_filter.account_ids: <NEW_LINE> <INDENT> if not account: <NEW_LINE> <INDENT> raise ConfigMismatchError( "When specifying a discovery filter, account ids must be non-empty " "strings" ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.config.discovery_region: <NEW_LINE> <INDENT> raise ConfigMismatchError( "To enable MRK-aware discovery mode, use a MRKAwareDiscoveryAwsKmsMasterKeyProvider." ) | Discovery Master Key Provider for KMS. This can only be used for decryption. It is configured with an optional
Discovery Filter containing AWS account ids and partitions that should be trusted for decryption. If a ciphertext
was encrypted with an AWS KMS master key that matches an account and partition listed by this provider, decryption
will succeed. Otherwise, decryption will fail. If no Discovery Filter is configured, the provider will attempt
to decrypt any ciphertext created by an AWS KMS Master Key Provider.
>>> import aws_encryption_sdk
>>> kms_key_provider = aws_encryption_sdk.DiscoveryAwsKmsMasterKeyProvider(discovery_filter=DiscoveryFilter(
... account_ids=['2222222222222', '3333333333333']
... )
.. note::
If no botocore_session is provided, the default botocore session will be used.
:param config: Configuration object (optional)
:type config: aws_encryption_sdk.key_providers.kms.KMSMasterKeyProviderConfig
:param botocore_session: botocore session object (optional)
:type botocore_session: botocore.session.Session
:param list key_ids: List of KMS CMK IDs with which to pre-populate provider (optional)
:param list region_names: List of regions for which to pre-populate clients (optional) | 6259905932920d7e50bc760c |
class Api(object): <NEW_LINE> <INDENT> def __init__(self, loop, port=8099, site=None): <NEW_LINE> <INDENT> loop = loop or asyncio.get_event_loop() <NEW_LINE> self.app = web.Application(loop=loop) <NEW_LINE> self.port = port <NEW_LINE> self.site = site <NEW_LINE> self.app.router.add_get('/', self.index) <NEW_LINE> self.app.router.add_get('/logs', self.sys_info) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> web.run_app(self.app, host='0.0.0.0', port=self.port) <NEW_LINE> <DEDENT> async def index(self, request): <NEW_LINE> <INDENT> return web.json_response({ 'name': 'Sanic-JWT', 'api_version': 'V2'}) <NEW_LINE> <DEDENT> async def sys_info(self, request): <NEW_LINE> <INDENT> return web.json_response(self.site.get_info()) | Application Interface for RPS
| 62599059be8e80087fbc064a |
class MockDailyBarSpotReader(object): <NEW_LINE> <INDENT> def spot_price(self, sid, day, column): <NEW_LINE> <INDENT> return 100.0 | A BcolzDailyBarReader which returns a constant value for spot price. | 625990598a43f66fc4bf3754 |
class Mc(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "https://midnight-commander.org" <NEW_LINE> url = "http://ftp.midnight-commander.org/mc-4.8.20.tar.bz2" <NEW_LINE> version('4.8.20', 'dcfc7aa613c62291a0f71f6b698d8267') <NEW_LINE> depends_on('ncurses') <NEW_LINE> depends_on('pkgconfig', type='build') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> def setup_environment(self, spack_env, run_env): <NEW_LINE> <INDENT> if 'darwin' in self.spec.architecture: <NEW_LINE> <INDENT> env['ac_cv_func_utimensat'] = 'no' <NEW_LINE> <DEDENT> <DEDENT> def configure_args(self): <NEW_LINE> <INDENT> args = [ '--disable-debug', '--disable-dependency-tracking', '--disable-silent-rules', '--without-x', '--with-screen=ncurses', '--enable-vfs-sftp' ] <NEW_LINE> return args | The GNU Midnight Commander is a visual file manager. | 625990598e71fb1e983bd090 |
class UShortStat(DoubleBufferedStat): <NEW_LINE> <INDENT> buffer_type = ctypes.c_uint16 | 16bit Double Buffered Unsigned Integer field | 62599059adb09d7d5dc0bb31 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.