code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
@implementer(IStyle) <NEW_LINE> class TestStyle: <NEW_LINE> <INDENT> name = 'test' <NEW_LINE> priority = 10 <NEW_LINE> def apply(self, mailing_list): <NEW_LINE> <INDENT> mailing_list.preferred_language = 'en' <NEW_LINE> <DEDENT> def match(self, mailing_list, styles): <NEW_LINE> <INDENT> styles.append(self)
See `IStyle`.
6259907a796e427e53850176
class Stage(object): <NEW_LINE> <INDENT> def __init__(self, builder, state): <NEW_LINE> <INDENT> self.builder = builder <NEW_LINE> self.stage = builder.stage <NEW_LINE> self._state = state <NEW_LINE> self.active = [] <NEW_LINE> self.queued = [] <NEW_LINE> self.pending = [] <NEW_LINE> self.failed = [] <NEW_LINE> self.done = [] <NEW_LINE> self.status = { builder.ADDED: self.pending, builder.QUEUED: self.queued, builder.ACTIVE: self.active, builder.FAILED: self.failed, builder.DONE: self.done, } <NEW_LINE> self.ports = set() <NEW_LINE> builder.update.connect(self._update) <NEW_LINE> <DEDENT> def __getitem__(self, status): <NEW_LINE> <INDENT> return self.status[status] <NEW_LINE> <DEDENT> def _update(self, _builder, status, port): <NEW_LINE> <INDENT> from .builder import Builder <NEW_LINE> if status == Builder.ADDED: <NEW_LINE> <INDENT> assert port not in self.ports <NEW_LINE> assert port not in self.failed <NEW_LINE> assert port not in self.done <NEW_LINE> if self._state.stage_started(self, port): <NEW_LINE> <INDENT> bisect.insort(self.pending, port) <NEW_LINE> <DEDENT> self.ports.add(port) <NEW_LINE> <DEDENT> elif status == Builder.QUEUED: <NEW_LINE> <INDENT> self.pending.remove(port) <NEW_LINE> bisect.insort(self.queued, port) <NEW_LINE> <DEDENT> elif status == Builder.ACTIVE: <NEW_LINE> <INDENT> self.queued.remove(port) <NEW_LINE> self.active.append(port) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ports.remove(port) <NEW_LINE> if port in self.active: <NEW_LINE> <INDENT> self.active.remove(port) <NEW_LINE> <DEDENT> elif port in self.queued: <NEW_LINE> <INDENT> self.queued.remove(port) <NEW_LINE> <DEDENT> elif port in self.pending: <NEW_LINE> <INDENT> self.pending.remove(port) <NEW_LINE> <DEDENT> if self.stage in port.stages: <NEW_LINE> <INDENT> if status == Builder.FAILED: <NEW_LINE> <INDENT> self.failed.append(port) <NEW_LINE> <DEDENT> elif status == Builder.DONE: <NEW_LINE> <INDENT> self.done.append(port) <NEW_LINE> <DEDENT> <DEDENT> self._state.stage_finished(self, port) <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> self.builder.update.disconnect(self._update)
Information about each stage of the build process.
6259907a7c178a314d78e8e9
class GroupMembersSelector(bb.Struct): <NEW_LINE> <INDENT> __slots__ = [ '_group_value', '_group_present', '_users_value', '_users_present', ] <NEW_LINE> _has_required_fields = True <NEW_LINE> def __init__(self, group=None, users=None): <NEW_LINE> <INDENT> self._group_value = None <NEW_LINE> self._group_present = False <NEW_LINE> self._users_value = None <NEW_LINE> self._users_present = False <NEW_LINE> if group is not None: <NEW_LINE> <INDENT> self.group = group <NEW_LINE> <DEDENT> if users is not None: <NEW_LINE> <INDENT> self.users = users <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def group(self): <NEW_LINE> <INDENT> if self._group_present: <NEW_LINE> <INDENT> return self._group_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("missing required field 'group'") <NEW_LINE> <DEDENT> <DEDENT> @group.setter <NEW_LINE> def group(self, val): <NEW_LINE> <INDENT> self._group_validator.validate_type_only(val) <NEW_LINE> self._group_value = val <NEW_LINE> self._group_present = True <NEW_LINE> <DEDENT> @group.deleter <NEW_LINE> def group(self): <NEW_LINE> <INDENT> self._group_value = None <NEW_LINE> self._group_present = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def users(self): <NEW_LINE> <INDENT> if self._users_present: <NEW_LINE> <INDENT> return self._users_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("missing required field 'users'") <NEW_LINE> <DEDENT> <DEDENT> @users.setter <NEW_LINE> def users(self, val): <NEW_LINE> <INDENT> self._users_validator.validate_type_only(val) <NEW_LINE> self._users_value = val <NEW_LINE> self._users_present = True <NEW_LINE> <DEDENT> @users.deleter <NEW_LINE> def users(self): <NEW_LINE> <INDENT> self._users_value = None <NEW_LINE> self._users_present = False <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, field_path, processor): <NEW_LINE> <INDENT> super(GroupMembersSelector, self)._process_custom_annotations(annotation_type, field_path, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'GroupMembersSelector(group={!r}, users={!r})'.format( self._group_value, self._users_value, )
Argument for selecting a group and a list of users. :ivar team.GroupMembersSelector.group: Specify a group. :ivar team.GroupMembersSelector.users: A list of users that are members of ``group``.
6259907aa8370b77170f1dca
class User: <NEW_LINE> <INDENT> userid: str <NEW_LINE> bio: str <NEW_LINE> follows: List[User] <NEW_LINE> tweets: List[Tweet] <NEW_LINE> def __init__(self, id_: str, bio: str) -> None: <NEW_LINE> <INDENT> self.userid = id_ <NEW_LINE> self.bio = bio <NEW_LINE> self.follows = [] <NEW_LINE> self.tweets = [] <NEW_LINE> <DEDENT> def tweet(self, message: str) -> None: <NEW_LINE> <INDENT> new_tweet = Tweet(self.userid, date.today(), message) <NEW_LINE> self.tweets.append(new_tweet) <NEW_LINE> <DEDENT> def follow(self, other: User) -> None: <NEW_LINE> <INDENT> self.follows.append(other) <NEW_LINE> <DEDENT> def verbosity(self, y: int) -> int: <NEW_LINE> <INDENT> cunt = 0 <NEW_LINE> for t in self.tweets: <NEW_LINE> <INDENT> if t.created_at.year == y: <NEW_LINE> <INDENT> cunt += len(t.content) <NEW_LINE> <DEDENT> <DEDENT> return cunt <NEW_LINE> <DEDENT> def hack(self) -> None: <NEW_LINE> <INDENT> for users in self.follows: <NEW_LINE> <INDENT> for t in users.tweets: <NEW_LINE> <INDENT> t.content = 'mwahahaha'
A Twitter user. === Attributes === userid: the userid of this Twitter user. bio: the bio of this Twitter user. follows: a list of the other users who this Twitter user follows. tweets: a list of the tweets that this user has made.
6259907a21bff66bcd724665
class UserIdentifierMissingError(Exception): <NEW_LINE> <INDENT> pass
Indicates that the identifier of a user is missing when the use of unique access token is enabled.
6259907a63b5f9789fe86b63
class SquareDistr(OpDistr): <NEW_LINE> <INDENT> def __init__(self, d): <NEW_LINE> <INDENT> super(SquareDistr, self).__init__([d]) <NEW_LINE> self.d = d <NEW_LINE> <DEDENT> def init_piecewise_pdf(self): <NEW_LINE> <INDENT> self.piecewise_pdf = self.d.get_piecewise_pdf().copySquareComposition() <NEW_LINE> <DEDENT> def pdf(self,x): <NEW_LINE> <INDENT> if x <= 0: <NEW_LINE> <INDENT> f = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = (self.d.pdf(-sqrt(x)) + self.d.pdf(sqrt(x))) /(2*sqrt(x)) <NEW_LINE> <DEDENT> return f <NEW_LINE> <DEDENT> def rand_op(self, n, cache): <NEW_LINE> <INDENT> r = self.d.rand(n, cache) <NEW_LINE> return r * r <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "#{0}**2".format(id(self.d)) <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return "sqr({0})".format(self.d.getName())
Injective function of random variable
6259907a7cff6e4e811b743c
class AzureTests(TestCase, IntegrationTestsV2): <NEW_LINE> <INDENT> provider_name = "azure" <NEW_LINE> domain = "full4ir.tk" <NEW_LINE> def _test_parameters_overrides(self): <NEW_LINE> <INDENT> return {"resource_group": "dns-test"} <NEW_LINE> <DEDENT> def _filter_headers(self): <NEW_LINE> <INDENT> return [("Authorization", "Bearer TOKEN")] <NEW_LINE> <DEDENT> def _filter_post_data_parameters(self): <NEW_LINE> <INDENT> return [("client_id", "CLIENT_ID"), ("client_secret", "CLIENT_SECRET")] <NEW_LINE> <DEDENT> def _filter_request(self, request): <NEW_LINE> <INDENT> request.uri = re.sub( r"/[\w-]+/oauth2/token", "/TENANT_ID/oauth2/token", request.uri ) <NEW_LINE> request.uri = re.sub( r"/subscriptions/[\w-]+/", "/subscriptions/SUBSCRIPTION_ID/", request.uri ) <NEW_LINE> return request <NEW_LINE> <DEDENT> def _filter_response(self, response): <NEW_LINE> <INDENT> response["body"]["string"] = re.sub( rb'"access_token":"[\w.-]+"', b'"access_token":"TOKEN"', response["body"]["string"], ) <NEW_LINE> response["body"]["string"] = re.sub( rb"\\/subscriptions\\/[\w-]+\\/", b"\\/subscriptions\\/SUBSCRIPTION_ID\\/", response["body"]["string"], ) <NEW_LINE> return response
TestCase for Google Cloud DNS
6259907a4c3428357761bcb6
class WebServiceCommon(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def stub(cls, reactor): <NEW_LINE> <INDENT> return cls( reactor=reactor, title='[ShinySDR Test Server]', ws_endpoint_string='tcp:99999') <NEW_LINE> <DEDENT> def __init__(self, reactor, title, ws_endpoint_string): <NEW_LINE> <INDENT> self.reactor = reactor <NEW_LINE> self.title = unicode(title) <NEW_LINE> self.__ws_endpoint_string = ws_endpoint_string <NEW_LINE> <DEDENT> def make_websocket_url(self, request, path): <NEW_LINE> <INDENT> return endpoint_string_to_url(self.__ws_endpoint_string, hostname=request.getRequestHostname(), scheme=b'ws', path=path)
Ugly collection of stuff web resources need which is not noteworthy authority.
6259907aadb09d7d5dc0bf66
class _TRACK(object): <NEW_LINE> <INDENT> def __init__(self, id, prefix, help_tag, help_note): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.prefix = prefix <NEW_LINE> self.help_tag = help_tag <NEW_LINE> self.help_note = help_note <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.id <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.id == other.id <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.id)
An enum representing the release track of a command or command group.
6259907a4f88993c371f1220
class LowStateCountChecker(Checker): <NEW_LINE> <INDENT> THRESHOLD = 3 <NEW_LINE> def check(self, nex): <NEW_LINE> <INDENT> counts = {} <NEW_LINE> for taxon in nex.data.matrix: <NEW_LINE> <INDENT> counts[taxon] = len([ c for c in nex.data.matrix[taxon] if c not in self.EMPTY_STATES]) <NEW_LINE> <DEDENT> med = statistics.median(counts.values()) <NEW_LINE> sd = statistics.stdev(counts.values()) <NEW_LINE> sd_threshold = med - (self.THRESHOLD * sd) <NEW_LINE> for taxon in sorted(counts): <NEW_LINE> <INDENT> if counts[taxon] <= sd_threshold: <NEW_LINE> <INDENT> self.errors.append( "Taxon %s has a low state count (%d, median = %0.2f - %0.2f)" % ( taxon, counts[taxon], med, sd_threshold)) <NEW_LINE> <DEDENT> <DEDENT> return not self.has_errors
Checks for taxa with low character states. Returns errors if there are taxa with counts of less than 3 standard deviations
6259907a1b99ca4002290234
class Meals(models.Model): <NEW_LINE> <INDENT> name = models.CharField("Название", max_length=30, db_index=True, unique=True) <NEW_LINE> proteins = models.PositiveSmallIntegerField("Белки", validators=[validators.MaxValueValidator(10000)]) <NEW_LINE> fats = models.PositiveSmallIntegerField("Жиры", validators=[validators.MaxValueValidator(10000)]) <NEW_LINE> carbohydrates = models.PositiveSmallIntegerField("Углеводы", validators=[validators.MaxValueValidator(10000)]) <NEW_LINE> calories = models.PositiveSmallIntegerField(verbose_name="Калории") <NEW_LINE> price = models.DecimalField("Стоимость", max_digits=7, decimal_places=2, default=100, validators=[validators.MinValueValidator(0)]) <NEW_LINE> picture = models.ImageField("Изображение", null=True, blank=True) <NEW_LINE> allergens = models.ManyToManyField(Allergens, verbose_name='Аллергены') <NEW_LINE> category = models.ForeignKey(Category, verbose_name='Категория', on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Блюдо' <NEW_LINE> verbose_name_plural = 'Блюда'
Блюда
6259907a3617ad0b5ee07b4b
class CommentCacheTTL(BaseCacheTTL): <NEW_LINE> <INDENT> TTL = 30 * 60
评论信息缓存时间,秒
6259907aad47b63b2c5a924d
class SeqAttnMatch(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_size, identity=False): <NEW_LINE> <INDENT> super(SeqAttnMatch, self).__init__() <NEW_LINE> if not identity: <NEW_LINE> <INDENT> self.linear = nn.Linear(input_size, input_size) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.linear = None <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x, y, y_mask): <NEW_LINE> <INDENT> if self.linear: <NEW_LINE> <INDENT> x_proj = self.linear(x.view(-1, x.size(2))).view(x.size()) <NEW_LINE> x_proj = F.relu(x_proj) <NEW_LINE> y_proj = self.linear(y.view(-1, y.size(2))).view(y.size()) <NEW_LINE> y_proj = F.relu(y_proj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x_proj = x <NEW_LINE> y_proj = y <NEW_LINE> <DEDENT> scores = x_proj.bmm(y_proj.transpose(2, 1)) <NEW_LINE> y_mask = y_mask.unsqueeze(1).expand(scores.size()) <NEW_LINE> scores.data.masked_fill_(y_mask.data, -float('inf')) <NEW_LINE> alpha_flat = F.softmax(scores.view(-1, y.size(1)), dim=1) <NEW_LINE> alpha = alpha_flat.view(-1, x.size(1), y.size(1)) <NEW_LINE> matched_seq = alpha.bmm(y) <NEW_LINE> return matched_seq
Given sequences X and Y, match sequence Y to each element in X. * o_i = sum(alpha_j * y_j) for i in X * alpha_j = softmax(y_j * x_i)
6259907abe7bc26dc9252b54
class PAINSFilter(ParentFilter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.filters_list = self.get_filters_list() <NEW_LINE> <DEDENT> def get_filters_list(self): <NEW_LINE> <INDENT> params_PAINS_A = FilterCatalogParams() <NEW_LINE> params_PAINS_A.AddCatalog(FilterCatalogParams.FilterCatalogs.PAINS_A) <NEW_LINE> params_PAINS_B = FilterCatalogParams() <NEW_LINE> params_PAINS_B.AddCatalog(FilterCatalogParams.FilterCatalogs.PAINS_B) <NEW_LINE> params_PAINS_C = FilterCatalogParams() <NEW_LINE> params_PAINS_C.AddCatalog(FilterCatalogParams.FilterCatalogs.PAINS_C) <NEW_LINE> params_PAINS = FilterCatalogParams() <NEW_LINE> params_PAINS.AddCatalog(FilterCatalogParams.FilterCatalogs.PAINS) <NEW_LINE> params_list = [params_PAINS_A, params_PAINS_B, params_PAINS_C, params_PAINS] <NEW_LINE> filters_list = [] <NEW_LINE> for param in params_list: <NEW_LINE> <INDENT> filter = FilterCatalog.FilterCatalog(param) <NEW_LINE> filters_list.append(filter) <NEW_LINE> <DEDENT> return filters_list <NEW_LINE> <DEDENT> def run_filter(self, mol): <NEW_LINE> <INDENT> for filters in self.filters_list: <NEW_LINE> <INDENT> if filters.HasMatch(mol) is True: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
This will filter a ligand using a PAINS filter. PAINS eliminates of Pan Assay Interference Compounds using substructure a search. This will include PAINS_A, PAINS_B, and PAINS_C filtering. This script relies on the RDKit predefined FilterCatalog. FilterCatalog is maintained by RDKit. If using the PAINS filter please cite: Baell JB, Holloway GA. New Substructure Filters for Removal of Pan Assay Interference Compounds (PAINS) from Screening Libraries and for Their Exclusion in Bioassays. J Med Chem 53 (2010) 2719D40. doi:10.1021/jm901137j. Inputs: :param class ParentFilter: a parent class to initialize off
6259907a7047854f46340db9
class PHP(CNF, object): <NEW_LINE> <INDENT> def __init__(self, nof_holes, kval=1, topv=0, verb=False): <NEW_LINE> <INDENT> super(PHP, self).__init__() <NEW_LINE> vpool = IDPool(start_from=topv + 1) <NEW_LINE> var = lambda i, j: vpool.id('v_{0}_{1}'.format(i, j)) <NEW_LINE> for i in range(1, kval * nof_holes + 2): <NEW_LINE> <INDENT> self.append([var(i, j) for j in range(1, nof_holes + 1)]) <NEW_LINE> <DEDENT> pigeons = range(1, kval * nof_holes + 2) <NEW_LINE> for j in range(1, nof_holes + 1): <NEW_LINE> <INDENT> for comb in itertools.combinations(pigeons, kval + 1): <NEW_LINE> <INDENT> self.append([-var(i, j) for i in comb]) <NEW_LINE> <DEDENT> <DEDENT> if verb: <NEW_LINE> <INDENT> head = 'c {0}PHP formula for'.format('' if kval == 1 else str(kval) + '-') <NEW_LINE> head += ' {0} pigeons and {1} holes'.format(kval * nof_holes + 1, nof_holes) <NEW_LINE> self.comments.append(head) <NEW_LINE> for i in range(1, kval * nof_holes + 2): <NEW_LINE> <INDENT> for j in range(1, nof_holes + 1): <NEW_LINE> <INDENT> self.comments.append('c (pigeon, hole) pair: ({0}, {1}); bool var: {2}'.format(i, j, var(i, j)))
Pigeonhole principle formula for (kval * nof_holes + 1) pigeons and nof_holes holes.
6259907abf627c535bcb2ecd
class EncoderCNNSmall(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_dim, hidden_dim, num_objects, act_fn='sigmoid', act_fn_hid='relu'): <NEW_LINE> <INDENT> super(EncoderCNNSmall, self).__init__() <NEW_LINE> self.cnn1 = nn.Conv2d( input_dim, hidden_dim, (10, 10), stride=10) <NEW_LINE> self.cnn2 = nn.Conv2d(hidden_dim, num_objects, (1, 1), stride=1) <NEW_LINE> self.ln1 = nn.BatchNorm2d(hidden_dim) <NEW_LINE> self.act1 = utils.get_act_fn(act_fn_hid) <NEW_LINE> self.act2 = utils.get_act_fn(act_fn) <NEW_LINE> <DEDENT> def forward(self, obs): <NEW_LINE> <INDENT> h = self.act1(self.ln1(self.cnn1(obs))) <NEW_LINE> return self.act2(self.cnn2(h))
CNN encoder, maps observation to obj-specific feature maps.
6259907a7b180e01f3e49d64
class DownloadEmail(BaseSubstitution): <NEW_LINE> <INDENT> category = _(u'Download') <NEW_LINE> description = _(u'Download e-mail') <NEW_LINE> def safe_call(self): <NEW_LINE> <INDENT> return getattr(self.context, 'email', '')
Download email substitution
6259907a5fcc89381b266e5a
class itkScalarImageKmeansImageFilterID2IUC2(itkImageToImageFilterAPython.itkImageToImageFilterID2IUC2): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> ImageDimension = _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_ImageDimension <NEW_LINE> InputHasNumericTraitsCheck = _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_InputHasNumericTraitsCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def AddClassWithInitialMean(self, *args): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_AddClassWithInitialMean(self, *args) <NEW_LINE> <DEDENT> def GetFinalMeans(self): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_GetFinalMeans(self) <NEW_LINE> <DEDENT> def SetUseNonContiguousLabels(self, *args): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_SetUseNonContiguousLabels(self, *args) <NEW_LINE> <DEDENT> def GetUseNonContiguousLabels(self): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_GetUseNonContiguousLabels(self) <NEW_LINE> <DEDENT> def UseNonContiguousLabelsOn(self): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_UseNonContiguousLabelsOn(self) <NEW_LINE> <DEDENT> def UseNonContiguousLabelsOff(self): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_UseNonContiguousLabelsOff(self) <NEW_LINE> <DEDENT> def SetImageRegion(self, *args): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_SetImageRegion(self, *args) <NEW_LINE> <DEDENT> def GetImageRegion(self): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_GetImageRegion(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkScalarImageKmeansImageFilterPython.delete_itkScalarImageKmeansImageFilterID2IUC2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterID2IUC2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkScalarImageKmeansImageFilterID2IUC2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkScalarImageKmeansImageFilterID2IUC2 class
6259907a7d43ff2487428114
class IRamlApiDefinition(Interface): <NEW_LINE> <INDENT> pass
Marker interface for API Definition
6259907a009cb60464d02f3d
class Visualizer(object): <NEW_LINE> <INDENT> def __init__(self, env='default', **kwargs): <NEW_LINE> <INDENT> self.vis = visdom.Visdom(env=env,use_incoming_socket=False, **kwargs) <NEW_LINE> self.index = {} <NEW_LINE> self.log_text = '' <NEW_LINE> <DEDENT> def plot_many(self, d): <NEW_LINE> <INDENT> for k, v in d.items(): <NEW_LINE> <INDENT> self.plot(k, v) <NEW_LINE> <DEDENT> <DEDENT> def img_many(self, d): <NEW_LINE> <INDENT> for k, v in d.items(): <NEW_LINE> <INDENT> self.img(k, v) <NEW_LINE> <DEDENT> <DEDENT> def plot(self, name, y, **kwargs): <NEW_LINE> <INDENT> x = self.index.get(name, 0) <NEW_LINE> self.vis.line(Y=np.array([y]), X=np.array([x]),win=name,opts=dict(title=name),update=None if x == 0 else 'append',**kwargs) <NEW_LINE> self.index[name] = x + 1 <NEW_LINE> <DEDENT> def img(self, name, img_, **kwargs): <NEW_LINE> <INDENT> self.vis.images(img_.cpu().numpy(),win=name,opts=dict(title=name),**kwargs) <NEW_LINE> <DEDENT> def log(self, info, win='log_text'): <NEW_LINE> <INDENT> self.log_text += ('[{time}] {info} <br>'.format(time=time.strftime('%m%d_%H%M%S'), info=info)) <NEW_LINE> self.vis.text(self.log_text, win) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.vis, name)
封装了visdom的基本操作,但是你仍然可以通过`self.vis.function` 调用原生的visdom接口
6259907aa05bb46b3848be28
class Restaurant(): <NEW_LINE> <INDENT> def __init__(self, restaurant_name, cuisine_type): <NEW_LINE> <INDENT> self.restaurant_name = restaurant_name <NEW_LINE> self.cuisine_type = cuisine_type <NEW_LINE> <DEDENT> def describe_restaurant(self): <NEW_LINE> <INDENT> print(f"Restaurant name : {self.restaurant_name.title()}") <NEW_LINE> print(f"Cuisine type : {self.cuisine_type.title()}") <NEW_LINE> <DEDENT> def open_restaurant(self): <NEW_LINE> <INDENT> print(f"{self.restaurant_name.title()} is Open.")
class which represents a restaurant.
6259907a23849d37ff852ab7
class Seq2VecEncoder(torch.nn.Module, Registrable): <NEW_LINE> <INDENT> def get_input_dim(self) -> int: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_output_dim(self) -> int: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_params(cls, params: Params) -> 'Seq2VecEncoder': <NEW_LINE> <INDENT> choice = params.pop_choice('type', cls.list_available()) <NEW_LINE> return cls.by_name(choice).from_params(params)
A ``Seq2VecEncoder`` is a ``Module`` that takes as input a sequence of vectors and returns a single vector. Input shape: ``(batch_size, sequence_length, input_dim)``; output shape: ``(batch_size, output_dim)``. We add two methods to the basic ``Module`` API: :func:`get_input_dim()` and :func:`get_output_dim()`. You might need this if you want to construct a ``Linear`` layer using the output of this encoder, or to raise sensible errors for mis-matching input dimensions.
6259907a4428ac0f6e659f2e
class PathInteractor: <NEW_LINE> <INDENT> showverts = True <NEW_LINE> epsilon = 5 <NEW_LINE> def __init__(self, pathpatch): <NEW_LINE> <INDENT> self.ax = pathpatch.axes <NEW_LINE> canvas = self.ax.figure.canvas <NEW_LINE> self.pathpatch = pathpatch <NEW_LINE> self.pathpatch.set_animated(True) <NEW_LINE> x, y = zip(*self.pathpatch.get_path().vertices) <NEW_LINE> self.line, = ax.plot( x, y, marker='o', markerfacecolor='r', animated=True) <NEW_LINE> self._ind = None <NEW_LINE> canvas.mpl_connect('draw_event', self.on_draw) <NEW_LINE> canvas.mpl_connect('button_press_event', self.on_button_press) <NEW_LINE> canvas.mpl_connect('key_press_event', self.on_key_press) <NEW_LINE> canvas.mpl_connect('button_release_event', self.on_button_release) <NEW_LINE> canvas.mpl_connect('motion_notify_event', self.on_mouse_move) <NEW_LINE> self.canvas = canvas <NEW_LINE> <DEDENT> def get_ind_under_point(self, event): <NEW_LINE> <INDENT> xy = np.asarray(self.pathpatch.get_path().vertices) <NEW_LINE> xyt = self.pathpatch.get_transform().transform(xy) <NEW_LINE> xt, yt = xyt[:, 0], xyt[:, 1] <NEW_LINE> d = np.sqrt((xt - event.x)**2 + (yt - event.y)**2) <NEW_LINE> ind = d.argmin() <NEW_LINE> if d[ind] >= self.epsilon: <NEW_LINE> <INDENT> ind = None <NEW_LINE> <DEDENT> return ind <NEW_LINE> <DEDENT> def on_draw(self, event): <NEW_LINE> <INDENT> self.background = self.canvas.copy_from_bbox(self.ax.bbox) <NEW_LINE> self.ax.draw_artist(self.pathpatch) <NEW_LINE> self.ax.draw_artist(self.line) <NEW_LINE> self.canvas.blit(self.ax.bbox) <NEW_LINE> <DEDENT> def on_button_press(self, event): <NEW_LINE> <INDENT> if (event.inaxes is None or event.button != MouseButton.LEFT or not self.showverts): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._ind = self.get_ind_under_point(event) <NEW_LINE> <DEDENT> def on_button_release(self, event): <NEW_LINE> <INDENT> if (event.button != MouseButton.LEFT or not self.showverts): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._ind = None <NEW_LINE> <DEDENT> def on_key_press(self, event): <NEW_LINE> <INDENT> if not event.inaxes: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if event.key == 't': <NEW_LINE> <INDENT> self.showverts = not self.showverts <NEW_LINE> self.line.set_visible(self.showverts) <NEW_LINE> if not self.showverts: <NEW_LINE> <INDENT> self._ind = None <NEW_LINE> <DEDENT> <DEDENT> self.canvas.draw() <NEW_LINE> <DEDENT> def on_mouse_move(self, event): <NEW_LINE> <INDENT> if (self._ind is None or event.inaxes is None or event.button != MouseButton.LEFT or not self.showverts): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> vertices = self.pathpatch.get_path().vertices <NEW_LINE> vertices[self._ind] = event.xdata, event.ydata <NEW_LINE> self.line.set_data(zip(*vertices)) <NEW_LINE> self.canvas.restore_region(self.background) <NEW_LINE> self.ax.draw_artist(self.pathpatch) <NEW_LINE> self.ax.draw_artist(self.line) <NEW_LINE> self.canvas.blit(self.ax.bbox)
An path editor. Press 't' to toggle vertex markers on and off. When vertex markers are on, they can be dragged with the mouse.
6259907a5fdd1c0f98e5f97e
class ProvisionSpec: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.disks = [] <NEW_LINE> <DEDENT> def toElement(self, doc): <NEW_LINE> <INDENT> element = doc.createElement("provision") <NEW_LINE> for disk in self.disks: <NEW_LINE> <INDENT> element.appendChild(disk.toElement(doc)) <NEW_LINE> <DEDENT> return element <NEW_LINE> <DEDENT> def setSR(self, sr): <NEW_LINE> <INDENT> for disk in self.disks: <NEW_LINE> <INDENT> disk.sr = sr
Represents a provisioning specification: currently a list of required disks
6259907a44b2445a339b765d
class TestStreams(BaseTest): <NEW_LINE> <INDENT> def test_add_stream(self): <NEW_LINE> <INDENT> response = self.client.post( '/api/v1/streams', data=json.dumps(add_stream), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Stream added successfully') <NEW_LINE> assert response.status_code == 201 <NEW_LINE> <DEDENT> def test_add_stream_keys(self): <NEW_LINE> <INDENT> response = self.client.post( '/api/v1/streams', data=json.dumps(add_stream_keys), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Invalid stream_name key') <NEW_LINE> assert response.status_code == 400 <NEW_LINE> <DEDENT> def test_get_all_streams(self): <NEW_LINE> <INDENT> self.client.post( '/api/v1/streams', data=json.dumps(add_stream), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> response = self.client.get( '/api/v1/streams', content_type='application/json', headers=self.get_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Streams retrived successfully') <NEW_LINE> assert response.status_code == 200 <NEW_LINE> <DEDENT> def test_get_streams_by_id(self): <NEW_LINE> <INDENT> self.client.post( '/api/v1/streams', data=json.dumps(add_stream), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> response = self.client.get( '/api/v1/streams/1', content_type='application/json', headers=self.get_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Stream retrived successfully') <NEW_LINE> assert response.status_code == 200 <NEW_LINE> <DEDENT> def test_get_streams_by_name(self): <NEW_LINE> <INDENT> self.client.post( '/api/v1/streams', data=json.dumps(add_stream), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> response = self.client.get( '/api/v1/streams/Form 1A', content_type='application/json', headers=self.get_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Stream retrived successfully') <NEW_LINE> assert response.status_code == 200 <NEW_LINE> <DEDENT> def test_get_non_existing_stream_by_id(self): <NEW_LINE> <INDENT> self.client.post( '/api/v1/streams', data=json.dumps(add_stream), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> response = self.client.get( '/api/v1/streams/10', content_type='application/json', headers=self.get_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Stream not found') <NEW_LINE> assert response.status_code == 404 <NEW_LINE> <DEDENT> def test_get_non_existing_stream_by_name(self): <NEW_LINE> <INDENT> self.client.post( '/api/v1/streams', data=json.dumps(add_stream), content_type='application/json', headers=self.get_registrar_token()) <NEW_LINE> response = self.client.get( '/api/v1/streams/Form 2A', content_type='application/json', headers=self.get_token()) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(result['message'], 'Stream not found') <NEW_LINE> assert response.status_code == 404
Test streams endpoints.
6259907aadb09d7d5dc0bf68
class NumpyFmDemod(): <NEW_LINE> <INDENT> def __init__(self, frequency, sample_rate=1800000, sample_count=8192000, dc_offset=250000): <NEW_LINE> <INDENT> self.freq = int(frequency) <NEW_LINE> self.sample_rate = sample_rate <NEW_LINE> self.sample_count = sample_count <NEW_LINE> self.dc_offset = dc_offset <NEW_LINE> <DEDENT> def capture_samples(self): <NEW_LINE> <INDENT> sdr = RtlSdr() <NEW_LINE> sdr.sample_rate = self.sample_rate <NEW_LINE> sdr.center_freq = self.freq - self.dc_offset <NEW_LINE> sdr.gain = 'auto' <NEW_LINE> self.samples = sdr.read_samples(self.sample_count) <NEW_LINE> self.samples_to_np() <NEW_LINE> sdr.close() <NEW_LINE> <DEDENT> def load_samples(self, filename): <NEW_LINE> <INDENT> self.samples = np.load(filename) <NEW_LINE> <DEDENT> def dump_samples(self, filename): <NEW_LINE> <INDENT> np.save(filename, self.samples) <NEW_LINE> <DEDENT> def decimate(self, rate): <NEW_LINE> <INDENT> self.samples = signal.decimate(self.samples, rate) <NEW_LINE> self.sample_rate /= rate <NEW_LINE> <DEDENT> def samples_to_np(self): <NEW_LINE> <INDENT> self.samples = np.array(self.samples).astype('complex64') <NEW_LINE> <DEDENT> def mix_down_dc_offset(self): <NEW_LINE> <INDENT> fc1 = np.exp(-1.0j * 2.0 * np.pi * self.dc_offset / self.sample_rate * np.arange(len(self.samples))) <NEW_LINE> self.samples *= fc1 <NEW_LINE> <DEDENT> def lowpass_filter(self): <NEW_LINE> <INDENT> BANDWIDTH = 200000 <NEW_LINE> decimation_rate = int(self.sample_rate / BANDWIDTH) <NEW_LINE> self.decimate(decimation_rate) <NEW_LINE> <DEDENT> def polar_discriminator(self): <NEW_LINE> <INDENT> self.samples = np.angle(self.samples[1:] * np.conj(self.samples[:-1])) <NEW_LINE> <DEDENT> def de_emphasis_filter(self): <NEW_LINE> <INDENT> d = self.sample_rate * 75e-6 <NEW_LINE> x = np.exp(-1/d) <NEW_LINE> b, a = [1-x], [1,-x] <NEW_LINE> self.samples = signal.lfilter(b, a, self.samples) <NEW_LINE> <DEDENT> def mono_decimate(self): <NEW_LINE> <INDENT> audio_freq = 44100.0 <NEW_LINE> decimation_rate = int(self.sample_rate / audio_freq) <NEW_LINE> self.decimate(decimation_rate) <NEW_LINE> <DEDENT> def scale_volume(self): <NEW_LINE> <INDENT> self.samples *= 10000 / np.max(np.abs(self.samples)) <NEW_LINE> <DEDENT> def output_file(self, filename, astype='int16'): <NEW_LINE> <INDENT> self.samples.astype(astype).tofile(filename) <NEW_LINE> <DEDENT> def demod(self): <NEW_LINE> <INDENT> self.mix_down_dc_offset() <NEW_LINE> self.lowpass_filter() <NEW_LINE> self.polar_discriminator() <NEW_LINE> self.de_emphasis_filter() <NEW_LINE> self.mono_decimate() <NEW_LINE> self.scale_volume()
Numpy-based FM signal demodulation Based on the great tutorial by Fraida Fund https://witestlab.poly.edu/blog/capture-and-decode-fm-radio/
6259907a60cbc95b06365a6d
class Invalid(DictizationError): <NEW_LINE> <INDENT> error: str <NEW_LINE> def __init__(self, error: str, key: Optional[Any] = None) -> None: <NEW_LINE> <INDENT> self.error = error
Exception raised by some validator, converter and dictization functions when the given value is invalid.
6259907a5166f23b2e244dd7
class LocalsDictNodeNG(LookupMixIn, NodeNG): <NEW_LINE> <INDENT> def qname(self): <NEW_LINE> <INDENT> if self.parent is None: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> return '%s.%s' % (self.parent.frame().qname(), self.name) <NEW_LINE> <DEDENT> def frame(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def scope(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def _scope_lookup(self, node, name, offset=0): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> stmts = node._filter_stmts(self.locals[name], self, offset) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> stmts = () <NEW_LINE> <DEDENT> if stmts: <NEW_LINE> <INDENT> return self, stmts <NEW_LINE> <DEDENT> if self.parent: <NEW_LINE> <INDENT> pscope = self.parent.scope() <NEW_LINE> if not pscope.is_function: <NEW_LINE> <INDENT> pscope = pscope.root() <NEW_LINE> <DEDENT> return pscope.scope_lookup(node, name) <NEW_LINE> <DEDENT> return builtin_lookup(name) <NEW_LINE> <DEDENT> def set_local(self, name, stmt): <NEW_LINE> <INDENT> self.locals.setdefault(name, []).append(stmt) <NEW_LINE> <DEDENT> __setitem__ = set_local <NEW_LINE> def _append_node(self, child): <NEW_LINE> <INDENT> self.body.append(child) <NEW_LINE> child.parent = self <NEW_LINE> <DEDENT> def add_local_node(self, child_node, name=None): <NEW_LINE> <INDENT> if name != '__class__': <NEW_LINE> <INDENT> self._append_node(child_node) <NEW_LINE> <DEDENT> self.set_local(name or child_node.name, child_node) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self.locals[item][0] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.keys()) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return list(self.locals.keys()) <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return [self[key] for key in self.keys()] <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return list(zip(self.keys(), self.values())) <NEW_LINE> <DEDENT> def __contains__(self, name): <NEW_LINE> <INDENT> return name in self.locals <NEW_LINE> <DEDENT> has_key = __contains__
this class provides locals handling common to Module, Function and Class nodes, including a dict like interface for direct access to locals information
6259907ad268445f2663a85e
class WidgetRedirector: <NEW_LINE> <INDENT> def __init__(self, widget): <NEW_LINE> <INDENT> self.dict = {} <NEW_LINE> self.widget = widget <NEW_LINE> self.tk = tk = widget.tk <NEW_LINE> w = widget._w <NEW_LINE> self.orig = w + "_orig" <NEW_LINE> tk.call("rename", w, self.orig) <NEW_LINE> tk.createcommand(w, self.dispatch) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "WidgetRedirector(%s<%s>)" % (self.widget.__class__.__name__, self.widget._w) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> for name in list(self.dict.keys()): <NEW_LINE> <INDENT> self.unregister(name) <NEW_LINE> <DEDENT> widget = self.widget; del self.widget <NEW_LINE> orig = self.orig; del self.orig <NEW_LINE> tk = widget.tk <NEW_LINE> w = widget._w <NEW_LINE> tk.deletecommand(w) <NEW_LINE> tk.call("rename", orig, w) <NEW_LINE> <DEDENT> def register(self, name, function): <NEW_LINE> <INDENT> if name in self.dict: <NEW_LINE> <INDENT> previous = dict[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> previous = OriginalCommand(self, name) <NEW_LINE> <DEDENT> self.dict[name] = function <NEW_LINE> setattr(self.widget, name, function) <NEW_LINE> return previous <NEW_LINE> <DEDENT> def unregister(self, name): <NEW_LINE> <INDENT> if name in self.dict: <NEW_LINE> <INDENT> function = self.dict[name] <NEW_LINE> del self.dict[name] <NEW_LINE> if hasattr(self.widget, name): <NEW_LINE> <INDENT> delattr(self.widget, name) <NEW_LINE> <DEDENT> return function <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def dispatch(self, cmd, *args): <NEW_LINE> <INDENT> m = self.dict.get(cmd) <NEW_LINE> try: <NEW_LINE> <INDENT> if m: <NEW_LINE> <INDENT> return m(*args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.tk.call((self.orig, cmd) + args) <NEW_LINE> <DEDENT> <DEDENT> except tk.TclError: <NEW_LINE> <INDENT> return ""
Support for redirecting arbitrary widget subcommands.
6259907b92d797404e38985c
class Node(graphene.relay.Node): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> name = "Node" <NEW_LINE> <DEDENT> name = graphene.String(required=True) <NEW_LINE> family = graphene.String(required=True) <NEW_LINE> @staticmethod <NEW_LINE> def to_global_id(type_, id): <NEW_LINE> <INDENT> print(f"Node.to_global_id: type:{type_}, id={id}") <NEW_LINE> raise NotImplementedError("NYI!") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_node_from_global_id(info, global_id, only_type=None): <NEW_LINE> <INDENT> print(f"Node.get_node_from_gloabl_id: info:{info}, global_id:{global_id}") <NEW_LINE> raise NotImplementedError("NYI!")
Requirements of all flow nodes
6259907b7d847024c075dddd
class SingleSheet(object): <NEW_LINE> <INDENT> def __init__(self, sheet_id, client_factory=SheetsClient): <NEW_LINE> <INDENT> self.client = client_factory() <NEW_LINE> self.sheet_id = sheet_id <NEW_LINE> <DEDENT> def read_range(self, sheet_range): <NEW_LINE> <INDENT> return self.client.read_range(self.sheet_id, sheet_range) <NEW_LINE> <DEDENT> def read_range_as_DataFrame(self, sheet_range): <NEW_LINE> <INDENT> data = self.client.read_range(self.sheet_id, sheet_range) <NEW_LINE> headers = data.pop(0) <NEW_LINE> return pandas.DataFrame(data, columns=headers)
A handle to a single Google sheet.
6259907b66673b3332c31e00
class LinearIOSystem(InputOutputSystem, StateSpace): <NEW_LINE> <INDENT> def __init__(self, linsys, inputs=None, outputs=None, states=None, name=None): <NEW_LINE> <INDENT> if not isinstance(linsys, StateSpace): <NEW_LINE> <INDENT> raise TypeError("Linear I/O system must be a state space object") <NEW_LINE> <DEDENT> super(LinearIOSystem, self).__init__( inputs=linsys.inputs, outputs=linsys.outputs, states=linsys.states, params={}, dt=linsys.dt, name=name) <NEW_LINE> StateSpace.__init__(self, linsys, remove_useless=False) <NEW_LINE> ninputs, self.input_index = self._process_signal_list( inputs if inputs is not None else linsys.inputs, prefix='u') <NEW_LINE> if ninputs is not None and linsys.inputs != ninputs: <NEW_LINE> <INDENT> raise ValueError("Wrong number/type of inputs given.") <NEW_LINE> <DEDENT> noutputs, self.output_index = self._process_signal_list( outputs if outputs is not None else linsys.outputs, prefix='y') <NEW_LINE> if noutputs is not None and linsys.outputs != noutputs: <NEW_LINE> <INDENT> raise ValueError("Wrong number/type of outputs given.") <NEW_LINE> <DEDENT> nstates, self.state_index = self._process_signal_list( states if states is not None else linsys.states, prefix='x') <NEW_LINE> if nstates is not None and linsys.states != nstates: <NEW_LINE> <INDENT> raise ValueError("Wrong number/type of states given.") <NEW_LINE> <DEDENT> <DEDENT> def _update_params(self, params={}, warning=True): <NEW_LINE> <INDENT> if params and warning: <NEW_LINE> <INDENT> warn("Parameters passed to LinearIOSystems are ignored.") <NEW_LINE> <DEDENT> <DEDENT> def _rhs(self, t, x, u): <NEW_LINE> <INDENT> xdot = np.dot(self.A, np.reshape(x, (-1, 1))) + np.dot(self.B, np.reshape(u, (-1, 1))) <NEW_LINE> return np.array(xdot).reshape((-1,)) <NEW_LINE> <DEDENT> def _out(self, t, x, u): <NEW_LINE> <INDENT> y = np.dot(self.C, np.reshape(x, (-1, 1))) + np.dot(self.D, np.reshape(u, (-1, 1))) <NEW_LINE> return np.array(y).reshape((-1,))
Input/output representation of a linear (state space) system. This class is used to implementat a system that is a linear state space system (defined by the StateSpace system object).
6259907b60cbc95b06365a6e
class TD_IPSO_3302_02(CoAPTestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> @typecheck <NEW_LINE> def get_stimulis(cls) -> list_of(Value): <NEW_LINE> <INDENT> return [CoAP(type='con', code='get')] <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.match('server', CoAP(type='con', code='get', opt=self.uri('3302/0/5500')), 'fail') <NEW_LINE> self.match('server', CoAP(opt=Opt(CoAPOptionAccept('40'))), 'fail') <NEW_LINE> self.next() <NEW_LINE> self.match('client', CoAP(code=2.05, pl=Not(b'')), 'fail') <NEW_LINE> self.match('client', CoAP(opt=Opt(CoAPOptionContentFormat('40'))), 'fail')
testcase_id: TD_IPSO_3302_02 uri : http://openmobilealliance.org/iot/lightweight-m2m-lwm2m configuration: LWM2M_CFG_01 objective: - Querying of Digital input state resource (ID = 5500) value of Presence object (ID = 3302) in plain text format pre_conditions: Device is registered at the LWM2M server sequence: - step_id: 'TD_IPSO_3302_02_step_01' type: stimuli node : lwm2m_server description: - 'Server sends a READ request (COAP GET) on Digital input state resource of Presence object' - - Type = 0 (CON) - Code = 1 (GET) - step_id: 'TD_IPSO_3302_02_step_02' type: check description: - 'The request sent by the server contains' - - Type=0 and Code=1 - Accept option = text/plain - Uri-Path option = 3302/0/5500 - step_id: 'TD_IPSO_3302_02_step_03' type: check description: - 'Client sends response containing' - - Code = 2.05 (Content) - content-format option = text/plain - Non-empty Payload - step_id: 'TD_IPSO_3302_02_step_04' type: verify node: lwm2m_server description: - 'Requested data is successfully displayed'
6259907b442bda511e95da58
class BrowserBotStartLink(GenericWaitPageMixin, HTTPEndpoint): <NEW_LINE> <INDENT> url_pattern = '/browser_bot_start/{admin_secret_code}' <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> admin_secret_code = request.path_params['admin_secret_code'] <NEW_LINE> if admin_secret_code != otree.common.get_admin_secret_code(): <NEW_LINE> <INDENT> return Response('Incorrect code', status_code=404) <NEW_LINE> <DEDENT> session_code = GlobalState.browser_bots_launcher_session_code <NEW_LINE> if session_code: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> session = Session.objects_get(code=session_code) <NEW_LINE> <DEDENT> except NoResultFound: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> participant = ( session.pp_set.filter_by(visited=False).order_by('id').first() ) <NEW_LINE> if not participant: <NEW_LINE> <INDENT> return no_participants_left_http_response() <NEW_LINE> <DEDENT> participant.visited = True <NEW_LINE> return RedirectResponse(participant._start_url(), status_code=302) <NEW_LINE> <DEDENT> <DEDENT> ctx = dict( view=self, title_text='Please wait', body_text='Waiting for browser bots session to begin', ) <NEW_LINE> return render("otree/WaitPage.html", ctx) <NEW_LINE> <DEDENT> def socket_url(self): <NEW_LINE> <INDENT> return '/browser_bot_wait/'
should i move this to another module? because the rest of these views are accessible without password login.
6259907b55399d3f05627f15
class UpdateNetwork(neutronV20.UpdateCommand): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.UpdateNetwork') <NEW_LINE> resource = 'network'
Update network's information.
6259907bbf627c535bcb2ed1
class Test_parse_functions_1(unittest.TestCase): <NEW_LINE> <INDENT> def run_checks(self, doc): <NEW_LINE> <INDENT> self.assertEqual(len(doc.xml_children), 1) <NEW_LINE> self.assertEqual(doc.xml_children[0].xml_type, tree.element.xml_type) <NEW_LINE> self.assertEqual(doc.xml_children[0].xml_qname, 'monty') <NEW_LINE> self.assertEqual(doc.xml_children[0].xml_namespace, None) <NEW_LINE> self.assertEqual(doc.xml_children[0].xml_prefix, None) <NEW_LINE> self.assertEqual(len(doc.monty.xml_children), 5) <NEW_LINE> self.assertEqual(doc.monty.xml_children[0].xml_type, tree.text.xml_type) <NEW_LINE> self.assertEqual(doc.monty.xml_children[1].xml_type, tree.element.xml_type) <NEW_LINE> self.assertEqual(doc.monty.xml_children[1].xml_qname, 'python') <NEW_LINE> self.assertEqual(doc.monty.xml_children[1].xml_namespace, None) <NEW_LINE> self.assertEqual(doc.monty.xml_children[1].xml_prefix, None) <NEW_LINE> self.assertEqual(len(doc.monty.python), 2) <NEW_LINE> self.assertEqual(doc.monty.python[1].xml_qname, 'python') <NEW_LINE> self.assertEqual(';'.join([ e.xml_qname for e in doc.monty.python ]), u'python;python') <NEW_LINE> <DEDENT> def test_parse_with_string(self): <NEW_LINE> <INDENT> doc = parse(MONTY_XML) <NEW_LINE> self.run_checks(doc) <NEW_LINE> <DEDENT> def test_parse_with_stream(self): <NEW_LINE> <INDENT> fname = tempfile.mktemp('.xml') <NEW_LINE> fout = open(fname, 'w') <NEW_LINE> fout.write(MONTY_XML) <NEW_LINE> fout.close() <NEW_LINE> fout = open(fname, 'r') <NEW_LINE> doc = parse(fout) <NEW_LINE> fout.close() <NEW_LINE> self.run_checks(doc) <NEW_LINE> <DEDENT> def test_parse_with_file_path(self): <NEW_LINE> <INDENT> fname = tempfile.mktemp('.xml') <NEW_LINE> fout = open(fname, 'w') <NEW_LINE> fout.write(MONTY_XML) <NEW_LINE> fout.close() <NEW_LINE> doc = parse(fname) <NEW_LINE> self.run_checks(doc) <NEW_LINE> <DEDENT> def test_attribute_series(self): <NEW_LINE> <INDENT> doc = parse(SILLY_XML) <NEW_LINE> self.assertEqual(';'.join([ e.name for e in doc.parent.element ]), u'a;b') <NEW_LINE> <DEDENT> def test_attribute_series(self): <NEW_LINE> <INDENT> doc = parse(SILLY_NS_XML) <NEW_LINE> self.assertEqual(';'.join([ e.name for e in doc.parent.sillywrap.element ]), u'a;b') <NEW_LINE> <DEDENT> def test_nasty_xml_1(self): <NEW_LINE> <INDENT> doc = parse(NASTY_NS_XML1) <NEW_LINE> self.assertEqual(len(doc.top.xml_children), 5) <NEW_LINE> self.assertEqual(len(list(doc.top.monty)), 1) <NEW_LINE> self.assertEqual(len(list(doc.top.monty_)), 1) <NEW_LINE> self.assertEqual(doc.top.monty.xml_namespace, u"urn:bogus:a") <NEW_LINE> self.assertEqual(doc.top.monty_.xml_namespace, u"urn:bogus:b") <NEW_LINE> self.assertEqual(doc.top.monty.xml_following_sibling.xml_following_sibling, doc.top.monty_) <NEW_LINE> <DEDENT> def test_non_xml_1(self): <NEW_LINE> <INDENT> arg = '{spam}{eggs}'*500 <NEW_LINE> self.assertRaises(ValueError, lambda arg=arg: parse(arg))
Testing local sources
6259907bdc8b845886d54fbd
class SongSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Song <NEW_LINE> fields = ('id', 'title', 'song_length', 'release_date', 'artist', 'genre', 'album') <NEW_LINE> depth = 1
Class for data serialization of a specific Model: Song
6259907b76e4537e8c3f0f81
class EnableWafWhiteRulesRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(EnableWafWhiteRulesRequest, self).__init__( '/domain/{domain}/wafWhiteRule:enable', 'POST', header, version) <NEW_LINE> self.parameters = parameters
启用WAF白名单
6259907b3346ee7daa338362
class Identity(object): <NEW_LINE> <INDENT> def __init__(self, dataset=None): <NEW_LINE> <INDENT> self.mean = 0 <NEW_LINE> self.std = 1 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_file(path): <NEW_LINE> <INDENT> return Identity() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_state(path): <NEW_LINE> <INDENT> return Identity() <NEW_LINE> <DEDENT> def state_dict(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def __call__(self, sample): <NEW_LINE> <INDENT> return sample
Does nothing.
6259907b91f36d47f2231b90
class TraktAuthCommand(Command): <NEW_LINE> <INDENT> def handle(self): <NEW_LINE> <INDENT> from trakt_scrobbler.trakt_auth import TraktAuth <NEW_LINE> trakt_auth = TraktAuth() <NEW_LINE> if self.option("force"): <NEW_LINE> <INDENT> self.line("Forcing trakt authentication") <NEW_LINE> trakt_auth.clear_token() <NEW_LINE> <DEDENT> if not trakt_auth.get_access_token(): <NEW_LINE> <INDENT> self.line("Failed to retrieve trakt token.", "error") <NEW_LINE> return 1 <NEW_LINE> <DEDENT> expiry_date = trakt_auth.token_expires_at().date() <NEW_LINE> self.line(f"Token valid until {expiry_date:%x}")
Runs the authentication flow for trakt.tv auth {--f|force : Force run the flow, ignoring already existing credentials.}
6259907bd486a94d0ba2d9ba
class CourseAccessRoleForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> model = CourseAccessRole <NEW_LINE> <DEDENT> email = forms.EmailField(required=True) <NEW_LINE> COURSE_ACCESS_ROLES = [(role_name, role_name) for role_name in REGISTERED_ACCESS_ROLES.keys()] <NEW_LINE> role = forms.ChoiceField(choices=COURSE_ACCESS_ROLES) <NEW_LINE> def clean_course_id(self): <NEW_LINE> <INDENT> if self.cleaned_data['course_id']: <NEW_LINE> <INDENT> course_id = self.cleaned_data['course_id'] <NEW_LINE> try: <NEW_LINE> <INDENT> course_key = CourseKey.from_string(course_id) <NEW_LINE> <DEDENT> except InvalidKeyError: <NEW_LINE> <INDENT> raise forms.ValidationError(u"Invalid CourseID. Please check the format and re-try.") <NEW_LINE> <DEDENT> if not modulestore().has_course(course_key): <NEW_LINE> <INDENT> raise forms.ValidationError(u"Cannot find course with id {} in the modulestore".format(course_id)) <NEW_LINE> <DEDENT> return course_key <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def clean_org(self): <NEW_LINE> <INDENT> if self.cleaned_data.get('course_id') and self.cleaned_data['org']: <NEW_LINE> <INDENT> org = self.cleaned_data['org'] <NEW_LINE> org_name = self.cleaned_data.get('course_id').org <NEW_LINE> if org.lower() != org_name.lower(): <NEW_LINE> <INDENT> raise forms.ValidationError( u"Org name {} is not valid. Valid name is {}.".format( org, org_name ) ) <NEW_LINE> <DEDENT> <DEDENT> return self.cleaned_data['org'] <NEW_LINE> <DEDENT> def clean_email(self): <NEW_LINE> <INDENT> email = self.cleaned_data['email'] <NEW_LINE> try: <NEW_LINE> <INDENT> user = User.objects.get(email=email) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise forms.ValidationError( u"Email does not exist. Could not find {email}. Please re-enter email address".format( email=email ) ) <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super(CourseAccessRoleForm, self).clean() <NEW_LINE> if not self.errors: <NEW_LINE> <INDENT> if CourseAccessRole.objects.filter( user=cleaned_data.get("email"), org=cleaned_data.get("org"), course_id=cleaned_data.get("course_id"), role=cleaned_data.get("role") ).exists(): <NEW_LINE> <INDENT> raise forms.ValidationError("Duplicate Record.") <NEW_LINE> <DEDENT> <DEDENT> return cleaned_data <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CourseAccessRoleForm, self).__init__(*args, **kwargs) <NEW_LINE> if self.instance.user_id: <NEW_LINE> <INDENT> self.fields['email'].initial = self.instance.user.email
Form for adding new Course Access Roles view the Django Admin Panel.
6259907b23849d37ff852abb
class AnnotateFileRequest(proto.Message): <NEW_LINE> <INDENT> input_config = proto.Field(proto.MESSAGE, number=1, message="InputConfig",) <NEW_LINE> features = proto.RepeatedField(proto.MESSAGE, number=2, message="Feature",) <NEW_LINE> image_context = proto.Field(proto.MESSAGE, number=3, message="ImageContext",) <NEW_LINE> pages = proto.RepeatedField(proto.INT32, number=4,)
A request to annotate one single file, e.g. a PDF, TIFF or GIF file. Attributes: input_config (google.cloud.vision_v1.types.InputConfig): Required. Information about the input file. features (Sequence[google.cloud.vision_v1.types.Feature]): Required. Requested features. image_context (google.cloud.vision_v1.types.ImageContext): Additional context that may accompany the image(s) in the file. pages (Sequence[int]): Pages of the file to perform image annotation. Pages starts from 1, we assume the first page of the file is page 1. At most 5 pages are supported per request. Pages can be negative. Page 1 means the first page. Page 2 means the second page. Page -1 means the last page. Page -2 means the second to the last page. If the file is GIF instead of PDF or TIFF, page refers to GIF frames. If this field is empty, by default the service performs image annotation for the first 5 pages of the file.
6259907b01c39578d7f14436
class MetricOutputCallback(BaseModelCallback): <NEW_LINE> <INDENT> def __init__(self, metric_functions: Dict[str, Callable]): <NEW_LINE> <INDENT> super(MetricOutputCallback, self).__init__() <NEW_LINE> self.metric_functions = metric_functions <NEW_LINE> <DEDENT> def on_forward_end(self, module, batch, batch_idx, model_output=None): <NEW_LINE> <INDENT> _, y = module.next_batch <NEW_LINE> probabilities, loss = model_output <NEW_LINE> model_output = {} <NEW_LINE> for metric_name in self.metric_functions: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model_output[metric_name] = self.metric_functions[metric_name](probabilities, y) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> model_output[metric_name] = torch.tensor(0.) <NEW_LINE> <DEDENT> <DEDENT> model_output['loss'] = loss <NEW_LINE> return model_output <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def initialize_metric_functions(cls): <NEW_LINE> <INDENT> return { 'acc': acc, 'auc': auc, 'probs': probs_identity, 'targets': targets_identity }
Wraps a model to return a dictionary of metrics / diagnostic variables for a given batch.
6259907b1f5feb6acb1645fa
class Rule(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> rule_title = models.CharField(max_length=50, blank=True) <NEW_LINE> primary_instrument = models.ForeignKey(Instrument) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.operator
Stores a trading rule, related to :model:`specify.Instrument` and :model:`auth.User`.
6259907b3617ad0b5ee07b51
class SemanticFandomIntent(AskFandomIntentBase): <NEW_LINE> <INDENT> def get_smw_property_for_page(self, wiki_domain: str, page: str, prop: str): <NEW_LINE> <INDENT> self.logger.info("Asking %s SMW for '%s' page %s property", wiki_domain, page, prop) <NEW_LINE> site = self.get_mw_client(wiki_domain) <NEW_LINE> res = site.get(action='browsebysubject', subject=page) <NEW_LINE> query_data = res['query']['data'] <NEW_LINE> for item in query_data: <NEW_LINE> <INDENT> if item['property'].lower() == prop.lower(): <NEW_LINE> <INDENT> values = [ str(value['item']).replace('#0#', '').replace('_', ' ') for value in item['dataitem'] ] <NEW_LINE> self.logger.info("Got the value for %s: %s", prop, values) <NEW_LINE> self._set_wikia_reference(wiki_domain, article_name=page) <NEW_LINE> return values <NEW_LINE> <DEDENT> <DEDENT> return None
A base class for an intent that queries SemanticMediaWiki data
6259907b97e22403b383c904
class BFGS(Optimizer): <NEW_LINE> <INDENT> def __init__(self, model, searchConfig = None): <NEW_LINE> <INDENT> super(BFGS, self).__init__() <NEW_LINE> self.model = model <NEW_LINE> self.searchConfig = searchConfig <NEW_LINE> self.trailsCounter = 0 <NEW_LINE> self.errorCounter = 0 <NEW_LINE> <DEDENT> def findMin(self, x, y, numIters = 100): <NEW_LINE> <INDENT> meanfunc = self.model.meanfunc <NEW_LINE> covfunc = self.model.covfunc <NEW_LINE> likfunc = self.model.likfunc <NEW_LINE> inffunc = self.model.inffunc <NEW_LINE> hypInArray = self._convert_to_array() <NEW_LINE> try: <NEW_LINE> <INDENT> opt = bfgs(self._nlml, hypInArray, self._dnlml, maxiter=numIters, disp=False, full_output=True) <NEW_LINE> optimalHyp = deepcopy(opt[0]) <NEW_LINE> funcValue = opt[1] <NEW_LINE> warnFlag = opt[6] <NEW_LINE> if warnFlag == 1: <NEW_LINE> <INDENT> print("Maximum number of iterations exceeded.") <NEW_LINE> <DEDENT> elif warnFlag == 2: <NEW_LINE> <INDENT> print("Gradient and/or function calls not changing.") <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> self.errorCounter += 1 <NEW_LINE> if not self.searchConfig: <NEW_LINE> <INDENT> raise Exception("Can not learn hyperparamters using BFGS.") <NEW_LINE> <DEDENT> <DEDENT> self.trailsCounter += 1 <NEW_LINE> if self.searchConfig: <NEW_LINE> <INDENT> searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange <NEW_LINE> if not (self.searchConfig.num_restarts or self.searchConfig.min_threshold): <NEW_LINE> <INDENT> raise Exception('Specify at least one of the stop conditions') <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> self.trailsCounter += 1 <NEW_LINE> for i in range(hypInArray.shape[0]): <NEW_LINE> <INDENT> hypInArray[i]= np.random.uniform(low=searchRange[i][0], high=searchRange[i][1]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> thisopt = bfgs(self._nlml, hypInArray, self._dnlml, maxiter=100, disp=False, full_output=True) <NEW_LINE> if thisopt[1] < funcValue: <NEW_LINE> <INDENT> funcValue = thisopt[1] <NEW_LINE> optimalHyp = thisopt[0] <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> self.errorCounter += 1 <NEW_LINE> <DEDENT> if self.searchConfig.num_restarts and self.errorCounter > self.searchConfig.num_restarts/2: <NEW_LINE> <INDENT> print("[BFGS] %d out of %d trails failed during optimization" % (self.errorCounter, self.trailsCounter)) <NEW_LINE> raise Exception("Over half of the trails failed for BFGS") <NEW_LINE> <DEDENT> if self.searchConfig.num_restarts and self.trailsCounter > self.searchConfig.num_restarts-1: <NEW_LINE> <INDENT> print("[BFGS] %d out of %d trails failed during optimization" % (self.errorCounter, self.trailsCounter)) <NEW_LINE> return optimalHyp, funcValue <NEW_LINE> <DEDENT> if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold: <NEW_LINE> <INDENT> print("[BFGS] %d out of %d trails failed during optimization" % (self.errorCounter, self.trailsCounter)) <NEW_LINE> return optimalHyp, funcValue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return optimalHyp, funcValue
quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)
6259907b71ff763f4b5e91af
class Reader(object): <NEW_LINE> <INDENT> def __init__(self, handle): <NEW_LINE> <INDENT> self.handle = handle <NEW_LINE> self.seek_entry(0, 2) <NEW_LINE> self.__entry_count = int(self.handle.tell() / ENTRY_STRUCT.size) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.__entry_count <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if key >= self.__entry_count: <NEW_LINE> <INDENT> raise IndexError() <NEW_LINE> <DEDENT> self.seek_entry(key) <NEW_LINE> return self.next() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self.seek_entry(0) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __reversed__(self): <NEW_LINE> <INDENT> for i in xrange(self.__entry_count - 1, -1, -1): <NEW_LINE> <INDENT> self.seek_entry(i) <NEW_LINE> yield self.next() <NEW_LINE> <DEDENT> <DEDENT> def seek_entry(self, offset, whence=0): <NEW_LINE> <INDENT> self.handle.seek(offset * ENTRY_STRUCT.size, whence) <NEW_LINE> <DEDENT> def seek_position(self, position): <NEW_LINE> <INDENT> key = position.zobrist_hash() <NEW_LINE> start = 0 <NEW_LINE> end = len(self) <NEW_LINE> while end >= start: <NEW_LINE> <INDENT> middle = int((start + end) / 2) <NEW_LINE> self.seek_entry(middle) <NEW_LINE> raw_entry = self.next_raw() <NEW_LINE> if raw_entry[0] < key: <NEW_LINE> <INDENT> start = middle + 1 <NEW_LINE> <DEDENT> elif raw_entry[0] > key: <NEW_LINE> <INDENT> end = middle - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.seek_entry(-1, 1) <NEW_LINE> while raw_entry[0] == key and middle > start: <NEW_LINE> <INDENT> middle -= 1 <NEW_LINE> self.seek_entry(middle) <NEW_LINE> raw_entry = self.next_raw() <NEW_LINE> if middle == start and raw_entry[0] == key: <NEW_LINE> <INDENT> self.seek_entry(-1, 1) <NEW_LINE> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> <DEDENT> raise KeyError() <NEW_LINE> <DEDENT> def next_raw(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return ENTRY_STRUCT.unpack(self.handle.read(ENTRY_STRUCT.size)) <NEW_LINE> <DEDENT> except struct.error: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> <DEDENT> def next(self): <NEW_LINE> <INDENT> key, raw_move, weight, learn = self.next_raw() <NEW_LINE> return Entry(key, raw_move, weight, learn) <NEW_LINE> <DEDENT> def get_entries_for_position(self, position): <NEW_LINE> <INDENT> zobrist_hash = position.zobrist_hash() <NEW_LINE> try: <NEW_LINE> <INDENT> self.seek_position(position) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> entry = self.next() <NEW_LINE> while entry.key == zobrist_hash: <NEW_LINE> <INDENT> if entry.move() in position.legal_moves: <NEW_LINE> <INDENT> yield entry <NEW_LINE> <DEDENT> entry = self.next()
A reader for a polyglot opening book opened in binary mode. The file has to be seekable. Provides methods to seek entries for specific positions but also ways to efficiently use the opening book like a list. >>> # Get the number of entries >>> len(reader) 92954 >>> # Get the nth entry >>> entry = reader[n] >>> # Iteration >>> for entry in reader: >>> pass >>> # Backwards iteration >>> for entry in reversed(reader): >>> pass
6259907b32920d7e50bc7a45
class TestFlattenArguments(object): <NEW_LINE> <INDENT> def test_example(self): <NEW_LINE> <INDENT> args = { '--flag': True, '--single': 5, '--multiple': ('test1', 'test2'), '-s': ('a', )} <NEW_LINE> flat = util.flatten_arguments(args) <NEW_LINE> assert flat == [ '--flag', '--multiple', 'test1', 'test2', '--single', '5', '-s', 'a' ]
Tests for the flatten_arguments function.
6259907b1b99ca4002290237
class RestView(MethodView): <NEW_LINE> <INDENT> content_type = 'application/json; charset=utf-8' <NEW_LINE> method_decorators = [] <NEW_LINE> def handler_error(self, exception): <NEW_LINE> <INDENT> data = { 'ok': False, 'message': exception.message } <NEW_LINE> result = dumps(data) + '\n' <NEW_LINE> resp = make_response(result, exception.code) <NEW_LINE> resp.headers['Content-Type'] = self.content_type <NEW_LINE> return resp <NEW_LINE> <DEDENT> def dispatch_request(self, *args, **kwargs): <NEW_LINE> <INDENT> method = getattr(self, request.method.lower(), None) <NEW_LINE> if method is None and request.method == 'HEAD': <NEW_LINE> <INDENT> method = getattr(self, 'get', None) <NEW_LINE> <DEDENT> assert method is not None, 'Unimplemented method %r' % request.method <NEW_LINE> if isinstance(self.method_decorators, Mapping): <NEW_LINE> <INDENT> decorators = self.method_decorators.get(request.method.lower(), []) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> decorators = self.method_decorators <NEW_LINE> <DEDENT> for decorator in decorators: <NEW_LINE> <INDENT> method = decorator(method) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> resp = method(*args, **kwargs) <NEW_LINE> <DEDENT> except RestError as e: <NEW_LINE> <INDENT> resp = self.handler_error(e) <NEW_LINE> <DEDENT> if isinstance(resp, Response): <NEW_LINE> <INDENT> return resp <NEW_LINE> <DEDENT> data, code, headers = RestView.unpack(resp) <NEW_LINE> if code >= 400 and isinstance(data, dict): <NEW_LINE> <INDENT> for key in data: <NEW_LINE> <INDENT> if isinstance(data[key], list) and len(data[key]) > 0: <NEW_LINE> <INDENT> message = data[key][0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = data[key] <NEW_LINE> <DEDENT> <DEDENT> data = {'ok': False, 'message': message} <NEW_LINE> <DEDENT> result = dumps(data) + '\n' <NEW_LINE> response = make_response(result, code) <NEW_LINE> response.headers.extend(headers) <NEW_LINE> response.headers['Content-Type'] = self.content_type <NEW_LINE> return response <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def unpack(value): <NEW_LINE> <INDENT> headers = {} <NEW_LINE> if not isinstance(value, tuple): <NEW_LINE> <INDENT> return value, 200, {} <NEW_LINE> <DEDENT> if len(value) == 3: <NEW_LINE> <INDENT> data, code, headers = value <NEW_LINE> <DEDENT> elif len(value) == 2: <NEW_LINE> <INDENT> data, code = value <NEW_LINE> <DEDENT> return data, code, headers
自定义 View 类 json 序列化,异常处理,装饰器支持
6259907b2c8b7c6e89bd51ef
class Cap(Parseable): <NEW_LINE> <INDENT> def __init__(self, name, value=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.value = value or None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.value: <NEW_LINE> <INDENT> return CAP_VALUE_SEP.join((self.name, self.value)) <NEW_LINE> <DEDENT> return self.name <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Cap): <NEW_LINE> <INDENT> return self.name == other.name <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parse(text: str): <NEW_LINE> <INDENT> name, _, value = text.partition(CAP_VALUE_SEP) <NEW_LINE> return Cap(name, value)
Represents a CAP entity as defined in IRCv3.2
6259907b8a349b6b43687c60
class EightPuzzleSearchProblem(search.SearchProblem): <NEW_LINE> <INDENT> def __init__(self,puzzle): <NEW_LINE> <INDENT> self.puzzle = puzzle <NEW_LINE> <DEDENT> def getStartState(self): <NEW_LINE> <INDENT> return puzzle <NEW_LINE> <DEDENT> def isGoalState(self,state): <NEW_LINE> <INDENT> return state.isGoal() <NEW_LINE> <DEDENT> def getSuccessors(self,state): <NEW_LINE> <INDENT> succ = [] <NEW_LINE> for a in state.legalMoves(): <NEW_LINE> <INDENT> succ.append((state.result(a), a, 1)) <NEW_LINE> <DEDENT> return succ <NEW_LINE> <DEDENT> def getCostOfActions(self, actions): <NEW_LINE> <INDENT> return len(actions)
Implementation of a SearchProblem for the Eight Puzzle domain Each state is represented by an instance of an eightPuzzle.
6259907b627d3e7fe0e0888b
class Response(ResponseBase, swob.Response): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> swob.Response.__init__(self, *args, **kwargs) <NEW_LINE> if self.etag: <NEW_LINE> <INDENT> self.headers['etag'] = self.etag <NEW_LINE> <DEDENT> sw_sysmeta_headers = swob.HeaderKeyDict() <NEW_LINE> sw_headers = swob.HeaderKeyDict() <NEW_LINE> headers = HeaderKeyDict() <NEW_LINE> self.is_slo = False <NEW_LINE> for key, val in self.headers.iteritems(): <NEW_LINE> <INDENT> _key = key.lower() <NEW_LINE> if _key.startswith(sysmeta_prefix('object')) or _key.startswith(sysmeta_prefix('container')): <NEW_LINE> <INDENT> sw_sysmeta_headers[key] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sw_headers[key] = val <NEW_LINE> <DEDENT> <DEDENT> for key, val in sw_headers.iteritems(): <NEW_LINE> <INDENT> _key = key.lower() <NEW_LINE> if _key.startswith('x-object-meta-'): <NEW_LINE> <INDENT> if any(_str in _key for _str in ('object-type', 'hash-crc64ecma')): <NEW_LINE> <INDENT> headers['x-oss-' + _key[14:]] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> headers['x-oss-meta-' + _key[14:]] = val <NEW_LINE> <DEDENT> <DEDENT> elif _key.startswith('x-container-meta-'): <NEW_LINE> <INDENT> headers['x-oss-meta-' + _key[17:]] = val <NEW_LINE> <DEDENT> elif _key in ('content-length', 'content-type', 'content-range', 'content-encoding', 'content-disposition', 'content-language', 'etag', 'last-modified', 'x-robots-tag', 'cache-control', 'expires'): <NEW_LINE> <INDENT> headers[key] = val <NEW_LINE> <DEDENT> elif _key == 'x-static-large-object': <NEW_LINE> <INDENT> self.is_slo = config_true_value(val) <NEW_LINE> <DEDENT> <DEDENT> if headers['x-oss-meta-location'] is None: <NEW_LINE> <INDENT> headers['x-oss-meta-location'] = '' <NEW_LINE> <DEDENT> self.headers = headers <NEW_LINE> self.sw_headers = sw_headers <NEW_LINE> self.sysmeta_headers = sw_sysmeta_headers <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_swift_resp(cls, sw_resp): <NEW_LINE> <INDENT> if sw_resp.app_iter: <NEW_LINE> <INDENT> body = None <NEW_LINE> app_iter = sw_resp.app_iter <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> body = sw_resp.body <NEW_LINE> app_iter = None <NEW_LINE> <DEDENT> resp = Response(status=sw_resp.status, headers=sw_resp.headers, request=sw_resp.request, body=body, app_iter=app_iter, conditional_response=sw_resp.conditional_response) <NEW_LINE> resp.environ.update(sw_resp.environ) <NEW_LINE> return resp <NEW_LINE> <DEDENT> def append_copy_resp_body(self, controller_name, last_modified): <NEW_LINE> <INDENT> elem = Element('Copy%sResult' % controller_name) <NEW_LINE> SubElement(elem, 'LastModified').text = last_modified <NEW_LINE> SubElement(elem, 'ETag').text = '"%s"' % self.etag <NEW_LINE> self.headers['Content-Type'] = 'application/xml' <NEW_LINE> self.body = tostring(elem)
Similar to the Response class in Swift, but uses our HeaderKeyDict for headers instead of Swift's HeaderKeyDict. This also translates Swift specific headers to OSS headers.
6259907bf548e778e596cf95
class Function: <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> def execute(self, element, debug=False): <NEW_LINE> <INDENT> if not isinstance(element, numbers.Number): <NEW_LINE> <INDENT> raise TypeError("The element must be a number") <NEW_LINE> <DEDENT> result = self.func(element) <NEW_LINE> if debug: <NEW_LINE> <INDENT> print(f"Function: {self.func.__name__}. Input: {element:.2f}. Output: {result:.2f}") <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.func.__name__
Gives all mathematical funcitons a common interface to react with
6259907b3539df3088ecdc9c
class AnalyzeInterface(Popup): <NEW_LINE> <INDENT> score = ObjectProperty() <NEW_LINE> progression = ObjectProperty() <NEW_LINE> def __init__(self, parent): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._screen = parent <NEW_LINE> self.bind(score=self.setScore) <NEW_LINE> self.bind(progression=self.setProgress) <NEW_LINE> <DEDENT> def analyze(self, fileName="temp.wav"): <NEW_LINE> <INDENT> analyzer = Analyzer(fileName, self) <NEW_LINE> analyzer.analyze() <NEW_LINE> <DEDENT> @mainthread <NEW_LINE> def setScore(self, *args): <NEW_LINE> <INDENT> self._screen.setScore(self.score) <NEW_LINE> self.dismiss() <NEW_LINE> <DEDENT> @mainthread <NEW_LINE> def setProgress(self, *args): <NEW_LINE> <INDENT> self.ids["progress"].value = self.progression[0] <NEW_LINE> self.ids["label"].text = self.progression[1]
Analysis pop-up. Starts the analysis and displays its progression. Can't be dismissed before the end of the analysis.
6259907b4c3428357761bcbe
class Solution: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.mp = {} <NEW_LINE> <DEDENT> def isRatelimited(self, timestamp, event, rate, increment): <NEW_LINE> <INDENT> start = rate.find("/") <NEW_LINE> total_time = int(rate[:start]) <NEW_LINE> type = rate[start+1:] <NEW_LINE> time = 1 <NEW_LINE> if type == 'm': <NEW_LINE> <INDENT> time *= 60 <NEW_LINE> <DEDENT> elif type == 'h': <NEW_LINE> <INDENT> time = time * 60 * 60 <NEW_LINE> <DEDENT> elif type == 'd': <NEW_LINE> <INDENT> time = time * 60 * 60 * 24 <NEW_LINE> <DEDENT> last_time = timestamp - time + 1 <NEW_LINE> if event not in self.mp: <NEW_LINE> <INDENT> self.mp[event] = [] <NEW_LINE> <DEDENT> rt = self.find_event(self.mp[event], last_time) >= total_time <NEW_LINE> if increment and not rt: <NEW_LINE> <INDENT> self.insert_event(self.mp[event], timestamp) <NEW_LINE> <DEDENT> return rt <NEW_LINE> <DEDENT> def insert_event(self, event, timestamp): <NEW_LINE> <INDENT> event.append(timestamp) <NEW_LINE> <DEDENT> def find_event(self, event, last_time): <NEW_LINE> <INDENT> l, r = 0, len(event) - 1 <NEW_LINE> if r < 0 or event[r] < last_time: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> ans = 0 <NEW_LINE> while l <=r: <NEW_LINE> <INDENT> mid = (l + r) >> 1 <NEW_LINE> if event[mid] >= last_time: <NEW_LINE> <INDENT> ans = mid <NEW_LINE> r = mid - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l = mid + 1 <NEW_LINE> <DEDENT> <DEDENT> return len(event) - 1 - ans + 1
@param: timestamp: the current timestamp @param: event: the string to distinct different event @param: rate: the format is [integer]/[s/m/h/d] @param: increment: whether we should increase the counter @return: true or false to indicate the event is limited or not
6259907b91f36d47f2231b91
class WESTTool(WESTToolComponent): <NEW_LINE> <INDENT> prog = None <NEW_LINE> usage = None <NEW_LINE> description = None <NEW_LINE> epilog = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(WESTTool,self).__init__() <NEW_LINE> <DEDENT> def add_args(self, parser): <NEW_LINE> <INDENT> westpa.rc.add_args(parser) <NEW_LINE> <DEDENT> def process_args(self, args): <NEW_LINE> <INDENT> westpa.rc.process_args(args, config_required = self.config_required) <NEW_LINE> <DEDENT> def make_parser(self, prog=None, usage=None, description=None, epilog=None, args=None): <NEW_LINE> <INDENT> prog = prog or self.prog <NEW_LINE> usage = usage or self.usage <NEW_LINE> description = description or self.description <NEW_LINE> epilog = epilog or self.epilog <NEW_LINE> parser = argparse.ArgumentParser(prog=prog, usage=usage, description=description, epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter, conflict_handler='resolve') <NEW_LINE> self.add_all_args(parser) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def make_parser_and_process(self, prog=None, usage=None, description=None, epilog=None, args=None): <NEW_LINE> <INDENT> parser = self.make_parser(prog,usage,description,epilog,args) <NEW_LINE> args = parser.parse_args(args) <NEW_LINE> self.process_all_args(args) <NEW_LINE> return args <NEW_LINE> <DEDENT> def go(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> self.make_parser_and_process() <NEW_LINE> self.go()
Base class for WEST command line tools
6259907b44b2445a339b7660
class ListAssetsRequest(proto.Message): <NEW_LINE> <INDENT> parent = proto.Field( proto.STRING, number=1, ) <NEW_LINE> read_time = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) <NEW_LINE> asset_types = proto.RepeatedField( proto.STRING, number=3, ) <NEW_LINE> content_type = proto.Field( proto.ENUM, number=4, enum='ContentType', ) <NEW_LINE> page_size = proto.Field( proto.INT32, number=5, ) <NEW_LINE> page_token = proto.Field( proto.STRING, number=6, )
ListAssets request. Attributes: parent (str): Required. Name of the organization or project the assets belong to. Format: "organizations/[organization-number]" (such as "organizations/123"), "projects/[project-id]" (such as "projects/my-project-id"), or "projects/[project-number]" (such as "projects/12345"). read_time (google.protobuf.timestamp_pb2.Timestamp): Timestamp to take an asset snapshot. This can only be set to a timestamp between the current time and the current time minus 35 days (inclusive). If not specified, the current time will be used. Due to delays in resource data collection and indexing, there is a volatile window during which running the same query may get different results. asset_types (Sequence[str]): A list of asset types to take a snapshot for. For example: "compute.googleapis.com/Disk". Regular expression is also supported. For example: - "compute.googleapis.com.*" snapshots resources whose asset type starts with "compute.googleapis.com". - ".*Instance" snapshots resources whose asset type ends with "Instance". - ".*Instance.*" snapshots resources whose asset type contains "Instance". See `RE2 <https://github.com/google/re2/wiki/Syntax>`__ for all supported regular expression syntax. If the regular expression does not match any supported asset type, an INVALID_ARGUMENT error will be returned. If specified, only matching assets will be returned, otherwise, it will snapshot all asset types. See `Introduction to Cloud Asset Inventory <https://cloud.google.com/asset-inventory/docs/overview>`__ for all supported asset types. content_type (google.cloud.asset_v1.types.ContentType): Asset content type. If not specified, no content but the asset name will be returned. page_size (int): The maximum number of assets to be returned in a single response. Default is 100, minimum is 1, and maximum is 1000. page_token (str): The ``next_page_token`` returned from the previous ``ListAssetsResponse``, or unspecified for the first ``ListAssetsRequest``. It is a continuation of a prior ``ListAssets`` call, and the API should return the next page of assets.
6259907b60cbc95b06365a70
class Process(object): <NEW_LINE> <INDENT> def __init__(self, proc, outfile): <NEW_LINE> <INDENT> self.proc = proc <NEW_LINE> self.outfile = outfile <NEW_LINE> self.output = None <NEW_LINE> <DEDENT> def poll(self): <NEW_LINE> <INDENT> return self.proc.poll() <NEW_LINE> <DEDENT> def get_output(self): <NEW_LINE> <INDENT> if not self.output: <NEW_LINE> <INDENT> self.proc.wait() <NEW_LINE> self.outfile.seek(0) <NEW_LINE> self.output = self.outfile.read().decode("utf-8") <NEW_LINE> self.outfile.close() <NEW_LINE> <DEDENT> return self.output <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def start(cls, invocation): <NEW_LINE> <INDENT> outfile = tempfile.TemporaryFile(prefix='iwyu') <NEW_LINE> process = subprocess.Popen( invocation.command, cwd=invocation.cwd, stdout=outfile, stderr=subprocess.STDOUT) <NEW_LINE> return cls(process, outfile)
Manages an IWYU process in flight
6259907b5166f23b2e244ddc
class FifeAgent(Base): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Base.__init__(self, layer=object, behaviour=object) <NEW_LINE> <DEDENT> @property <NEW_LINE> def saveable_fields(self): <NEW_LINE> <INDENT> fields = self.fields.keys() <NEW_LINE> fields.remove("layer") <NEW_LINE> fields.remove("behaviour") <NEW_LINE> return fields
Component that stores the values for a fife agent
6259907be1aae11d1e7cf513
class Player: <NEW_LINE> <INDENT> def __init__(self, name, team, url): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.team = team <NEW_LINE> self.url = url <NEW_LINE> self.ppg = 0 <NEW_LINE> self.bpg = 0 <NEW_LINE> self.rpg = 0 <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.ppg < other.ppg <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return { "name": self.name, "team": self.team, "ppg": self.ppg, "bpg": self.bpg, "rpg": self.rpg }
Class to store information on NBA players
6259907b091ae35668706643
class PluggageMeta(type): <NEW_LINE> <INDENT> def __init__(cls, name, bases, dct): <NEW_LINE> <INDENT> factory = get_factory(cls.PLUGGAGE_FACTORY_NAME) <NEW_LINE> object_name = cls.PLUGGAGE_OBJECT_NAME <NEW_LINE> if object_name is None: <NEW_LINE> <INDENT> object_name = cls.__name__ <NEW_LINE> <DEDENT> factory.register(object_name, cls) <NEW_LINE> super(PluggageMeta, cls).__init__(name, bases, dct)
metaclass for Pluggage derived objects that register the objects with the appropriate registry/factory object
6259907b796e427e53850180
class XY(Line): <NEW_LINE> <INDENT> _dual = True <NEW_LINE> @cached_property <NEW_LINE> def xvals(self): <NEW_LINE> <INDENT> return [val[0] for serie in self.all_series for val in serie.values if val[0] is not None] <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def yvals(self): <NEW_LINE> <INDENT> return [val[1] for serie in self.series for val in serie.values if val[1] is not None] <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _min(self): <NEW_LINE> <INDENT> return (self.range[0] if (self.range and self.range[0] is not None) else (min(self.yvals) if self.yvals else None)) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _max(self): <NEW_LINE> <INDENT> return (self.range[1] if (self.range and self.range[1] is not None) else (max(self.yvals) if self.yvals else None)) <NEW_LINE> <DEDENT> def _has_data(self): <NEW_LINE> <INDENT> return sum( map(len, map(lambda s: s.safe_values, self.series))) != 0 and any(( sum(map(abs, self.xvals)) != 0, sum(map(abs, self.yvals)) != 0)) <NEW_LINE> <DEDENT> def _compute(self): <NEW_LINE> <INDENT> if self.xvals: <NEW_LINE> <INDENT> xmin = min(self.xvals) <NEW_LINE> xmax = max(self.xvals) <NEW_LINE> xrng = (xmax - xmin) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> xrng = None <NEW_LINE> <DEDENT> if self.yvals: <NEW_LINE> <INDENT> ymin = self._min <NEW_LINE> ymax = self._max <NEW_LINE> if self.include_x_axis: <NEW_LINE> <INDENT> ymin = min(self._min or 0, 0) <NEW_LINE> ymax = max(self._max or 0, 0) <NEW_LINE> <DEDENT> yrng = (ymax - ymin) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yrng = None <NEW_LINE> <DEDENT> for serie in self.all_series: <NEW_LINE> <INDENT> serie.points = serie.values <NEW_LINE> if self.interpolate and xrng: <NEW_LINE> <INDENT> vals = list(zip(*sorted( filter(lambda t: None not in t, serie.points), key=lambda x: x[0]))) <NEW_LINE> serie.interpolated = self._interpolate(vals[0], vals[1]) <NEW_LINE> <DEDENT> <DEDENT> if self.interpolate and xrng: <NEW_LINE> <INDENT> self.xvals = [val[0] for serie in self.all_series for val in serie.interpolated] <NEW_LINE> self.yvals = [val[1] for serie in self.series for val in serie.interpolated] <NEW_LINE> if self.xvals: <NEW_LINE> <INDENT> xmin = min(self.xvals) <NEW_LINE> xmax = max(self.xvals) <NEW_LINE> xrng = (xmax - xmin) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> xrng = None <NEW_LINE> <DEDENT> <DEDENT> if xrng: <NEW_LINE> <INDENT> self._box.xmin, self._box.xmax = xmin, xmax <NEW_LINE> <DEDENT> if yrng: <NEW_LINE> <INDENT> self._box.ymin, self._box.ymax = ymin, ymax <NEW_LINE> <DEDENT> x_pos = compute_scale( self._box.xmin, self._box.xmax, self.logarithmic, self.order_min) <NEW_LINE> y_pos = compute_scale( self._box.ymin, self._box.ymax, self.logarithmic, self.order_min) <NEW_LINE> self._x_labels = list(zip(map(self._format, x_pos), x_pos)) <NEW_LINE> self._y_labels = list(zip(map(self._format, y_pos), y_pos))
XY Line graph
6259907ba8370b77170f1dd5
class TestXTime: <NEW_LINE> <INDENT> def test_from_python_object(self): <NEW_LINE> <INDENT> time = datetime.time(16, 53, 12) <NEW_LINE> translated = MockXTime().translate(time, topython=False) <NEW_LINE> assert translated.__class__ is suds.sax.date.Time <NEW_LINE> assert str(translated) == "16:53:12" <NEW_LINE> <DEDENT> @pytest.mark.parametrize("source", ( None, object(), _Dummy(), datetime.date(2101, 1, 1), datetime.datetime(2101, 1, 1, 22, 47, 9, 981))) <NEW_LINE> def test_from_python_object__invalid(self, source): <NEW_LINE> <INDENT> assert MockXTime().translate(source, topython=False) is source <NEW_LINE> <DEDENT> def test_to_python_object(self): <NEW_LINE> <INDENT> assert MockXTime().translate("10:30:22") == datetime.time(10, 30, 22) <NEW_LINE> <DEDENT> def test_to_python_object__empty_string(self): <NEW_LINE> <INDENT> assert MockXTime().translate("") == None
suds.xsd.sxbuiltin.XTime.translate() tests. Related Python object <--> string conversion details are tested in a separate date/time related test module. These tests are only concerned with basic translate() functionality.
6259907b3d592f4c4edbc861
class SqlServer(object): <NEW_LINE> <INDENT> def __init__(self, host, user, pwd, db): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.user = user <NEW_LINE> self.pwd = pwd <NEW_LINE> self.db = db <NEW_LINE> <DEDENT> def get_connect(self): <NEW_LINE> <INDENT> if not self.db: <NEW_LINE> <INDENT> raise (NameError, '没有设置数据库信息') <NEW_LINE> <DEDENT> conn = pymssql.connect(host=self.host, user=self.user, password=self.pwd, database=self.db, charset='utf8') <NEW_LINE> if conn.cursor(): <NEW_LINE> <INDENT> return conn <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise (NameError, '连接数据库失败') <NEW_LINE> <DEDENT> <DEDENT> def exec_query(self, sql): <NEW_LINE> <INDENT> conn = self.get_connect() <NEW_LINE> cur = conn.cursor() <NEW_LINE> cur.execute(sql) <NEW_LINE> res_list = cur.fetchall() <NEW_LINE> conn.close() <NEW_LINE> return res_list <NEW_LINE> <DEDENT> def exec_non_query(self, sql): <NEW_LINE> <INDENT> conn = self.get_connect() <NEW_LINE> cur = conn.cursor() <NEW_LINE> try: <NEW_LINE> <INDENT> cur.execute(sql) <NEW_LINE> conn.commit() <NEW_LINE> return True <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> print(sql) <NEW_LINE> print('提交sql失败') <NEW_LINE> print(traceback.format_exc()) <NEW_LINE> return False <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> conn.close() <NEW_LINE> <DEDENT> <DEDENT> def exec_safety_non_query(self, sql): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn = self.get_connect() <NEW_LINE> cur = conn.cursor() <NEW_LINE> cur.execute(sql) <NEW_LINE> conn.commit() <NEW_LINE> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> print(sql) <NEW_LINE> print("提交sql失败,重新提交中...") <NEW_LINE> cur.execute(sql) <NEW_LINE> conn.commit() <NEW_LINE> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('提交sql失败,报错原因为%s,请检查sql代码' % e) <NEW_LINE> print(traceback.format_exc()) <NEW_LINE> return False
SqlServer工具类
6259907b23849d37ff852abf
class ConnectionMonitorResultProperties(ConnectionMonitorParameters): <NEW_LINE> <INDENT> _validation = { 'source': {'required': True}, 'destination': {'required': True}, } <NEW_LINE> _attribute_map = { 'source': {'key': 'source', 'type': 'ConnectionMonitorSource'}, 'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'}, 'auto_start': {'key': 'autoStart', 'type': 'bool'}, 'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'monitoring_status': {'key': 'monitoringStatus', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionMonitorResultProperties, self).__init__(**kwargs) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) <NEW_LINE> self.start_time = kwargs.get('start_time', None) <NEW_LINE> self.monitoring_status = kwargs.get('monitoring_status', None)
Describes the properties of a connection monitor. All required parameters must be populated in order to send to Azure. :param source: Required. Describes the source of connection monitor. :type source: ~azure.mgmt.network.v2018_12_01.models.ConnectionMonitorSource :param destination: Required. Describes the destination of connection monitor. :type destination: ~azure.mgmt.network.v2018_12_01.models.ConnectionMonitorDestination :param auto_start: Determines if the connection monitor will start automatically once created. :type auto_start: bool :param monitoring_interval_in_seconds: Monitoring interval in seconds. :type monitoring_interval_in_seconds: int :param provisioning_state: The provisioning state of the connection monitor. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :type provisioning_state: str or ~azure.mgmt.network.v2018_12_01.models.ProvisioningState :param start_time: The date and time when the connection monitor was started. :type start_time: ~datetime.datetime :param monitoring_status: The monitoring status of the connection monitor. :type monitoring_status: str
6259907bfff4ab517ebcf21f
class KFTQuerySet(models.query.QuerySet): <NEW_LINE> <INDENT> def update(self, **kwargs): <NEW_LINE> <INDENT> raw = kwargs.get('__raw', False) <NEW_LINE> if raw: <NEW_LINE> <INDENT> del kwargs['__raw'] <NEW_LINE> <DEDENT> super(KFTQuerySet, self).update(**kwargs) <NEW_LINE> for instance in self._clone(): <NEW_LINE> <INDENT> post_save.send( sender=self.model, instance=instance, raw=raw )
KFT Query Set. Contains overwritten update methods. Update call post_save signal and pass to them required data. Moreover, added '__raw' flag which works like a 'raw' flag from base_save method.
6259907b2c8b7c6e89bd51f2
class UserCodeDriver(metaclass=ABCMeta): <NEW_LINE> <INDENT> drive: 'Drive' <NEW_LINE> _status: CodeStatus <NEW_LINE> def __init__(self, drive: 'Drive', daemon_controller: 'Controller'): <NEW_LINE> <INDENT> self.drive = drive <NEW_LINE> self.daemon_controller = daemon_controller <NEW_LINE> self.status = CodeStatus.STARTING <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def start_execution(self) -> None: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def stop_execution(self) -> None: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self) -> CodeStatus: <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status: CodeStatus) -> None: <NEW_LINE> <INDENT> self._status = status <NEW_LINE> self.daemon_controller.inform_code_status(status)
User Code Driver class. This class defines a set of functionality that must be implemented in a usercode driver. A usercode driver runs and manages some user code through a uniform interface. This allows us to execute usercode in a variety of formats and environments, depending on the kit.
6259907bbf627c535bcb2ed7
class Pythonista(_Available): <NEW_LINE> <INDENT> def __init__(self, from_version=None, to_version=None, appex=None): <NEW_LINE> <INDENT> super().__init__(from_version, to_version) <NEW_LINE> self._appex = appex <NEW_LINE> <DEDENT> def _available(self): <NEW_LINE> <INDENT> available = super()._available() <NEW_LINE> if available and self._appex is not None: <NEW_LINE> <INDENT> import appex <NEW_LINE> available = appex.is_running_extension() == self._appex <NEW_LINE> <DEDENT> return available <NEW_LINE> <DEDENT> def version(self): <NEW_LINE> <INDENT> return PYTHONISTA_VERSION_TUPLE
Decorator to execute function under specific Pythonista versions. By default, function is not executed under application extension. You have to pass ``appex=True`` if you'd like to run some function under appex as well. Return value is return value of decorated function or `None` if Pythonista condition isn't met. Examples: Run function only within any Pythonista version:: @Pythonista() def run_me(): pass Run function only within any Pythonista version and allow appex:: @Pythonista(appex=True) def run_me(): pass Run function only within any Pythonista version and disallow appex:: @Pythonista(appex=False) def run_me(): pass Run function only within Pythonista >= 3.1.1:: @Pythonista('3.1.1') # or @Pythonista(from_version='3.1.1') def run_me(): pass Run function only within Pythonista <= 3.2:: @Pythonista(None, '3.2') # or @Pythonista(to_version='3.2') def run_me(): pass
6259907b26068e7796d4e346
class MergeUniquesBase(luigi.Task): <NEW_LINE> <INDENT> task_name = 'merge_uniques' <NEW_LINE> src_file = os.path.abspath(__file__) <NEW_LINE> allow_retry = False <NEW_LINE> input_path = luigi.Parameter() <NEW_LINE> input_key = luigi.Parameter() <NEW_LINE> output_path = luigi.Parameter() <NEW_LINE> output_key = luigi.Parameter() <NEW_LINE> dependency = luigi.TaskParameter() <NEW_LINE> def requires(self): <NEW_LINE> <INDENT> return self.dependency <NEW_LINE> <DEDENT> def run_impl(self): <NEW_LINE> <INDENT> shebang, block_shape, roi_begin, roi_end = self.global_config_values() <NEW_LINE> self.init(shebang) <NEW_LINE> shape = vu.get_shape(self.input_path, self.input_key) <NEW_LINE> block_list = vu.blocks_in_volume(shape, block_shape, roi_begin, roi_end) <NEW_LINE> n_jobs = min(len(block_list), self.max_jobs) <NEW_LINE> config = self.get_task_config() <NEW_LINE> config.update({'shape': shape, 'output_path': self.output_path, 'output_key': self.output_key, 'tmp_folder': self.tmp_folder, 'n_jobs': n_jobs}) <NEW_LINE> self.prepare_jobs(1, None, config) <NEW_LINE> self.submit_jobs(1) <NEW_LINE> self.wait_for_jobs() <NEW_LINE> self.check_jobs(1)
MergeUniques base class
6259907b8a349b6b43687c64
class List(tls.Unicode, TypeMeta): <NEW_LINE> <INDENT> info_text = "Communities.List" <NEW_LINE> class v1_0(tls.Unicode, TypeMeta): <NEW_LINE> <INDENT> info_text = "Communities.List-1.0"
List type
6259907ba8370b77170f1dd7
class ITextTitleWidget(ITextWidget): <NEW_LINE> <INDENT> pass
Marker interface for TextDate
6259907b97e22403b383c909
class PythonObjectToProtoVisitor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._protos = {} <NEW_LINE> <DEDENT> def GetProtos(self): <NEW_LINE> <INDENT> return self._protos <NEW_LINE> <DEDENT> def __call__(self, path, parent, children): <NEW_LINE> <INDENT> lib_path = 'tensorflow.%s' % path if path else 'tensorflow' <NEW_LINE> _, parent = tf_decorator.unwrap(parent) <NEW_LINE> def _AddMember(member_name, member_obj, proto): <NEW_LINE> <INDENT> _, member_obj = tf_decorator.unwrap(member_obj) <NEW_LINE> if (_SkipMember(parent, member_name) or isinstance(member_obj, deprecation.HiddenTfApiAttribute)): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if member_name == '__init__' or not six.ensure_str( member_name).startswith('_'): <NEW_LINE> <INDENT> if tf_inspect.isroutine(member_obj): <NEW_LINE> <INDENT> new_method = proto.member_method.add() <NEW_LINE> new_method.name = member_name <NEW_LINE> if hasattr(member_obj, '__code__'): <NEW_LINE> <INDENT> new_method.argspec = _SanitizedArgSpec(member_obj) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> new_member = proto.member.add() <NEW_LINE> new_member.name = member_name <NEW_LINE> if tf_inspect.ismodule(member_obj): <NEW_LINE> <INDENT> new_member.mtype = "<type \'module\'>" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_member.mtype = _NormalizeType(str(type(member_obj))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> parent_corner_cases = _CORNER_CASES.get(path, {}) <NEW_LINE> if path not in _CORNER_CASES or parent_corner_cases: <NEW_LINE> <INDENT> if tf_inspect.ismodule(parent): <NEW_LINE> <INDENT> module_obj = api_objects_pb2.TFAPIModule() <NEW_LINE> for name, child in children: <NEW_LINE> <INDENT> if name in parent_corner_cases: <NEW_LINE> <INDENT> if parent_corner_cases[name]: <NEW_LINE> <INDENT> module_obj.member.add(**(parent_corner_cases[name])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> _AddMember(name, child, module_obj) <NEW_LINE> <DEDENT> <DEDENT> self._protos[lib_path] = api_objects_pb2.TFAPIObject( path=lib_path, tf_module=module_obj) <NEW_LINE> <DEDENT> elif _IsProtoClass(parent): <NEW_LINE> <INDENT> proto_obj = api_objects_pb2.TFAPIProto() <NEW_LINE> parent.DESCRIPTOR.CopyToProto(proto_obj.descriptor) <NEW_LINE> self._protos[lib_path] = api_objects_pb2.TFAPIObject( path=lib_path, tf_proto=proto_obj) <NEW_LINE> <DEDENT> elif tf_inspect.isclass(parent): <NEW_LINE> <INDENT> class_obj = api_objects_pb2.TFAPIClass() <NEW_LINE> class_obj.is_instance.extend( _NormalizeIsInstance(i) for i in _SanitizedMRO(parent)) <NEW_LINE> for name, child in children: <NEW_LINE> <INDENT> if name in parent_corner_cases: <NEW_LINE> <INDENT> if parent_corner_cases[name]: <NEW_LINE> <INDENT> class_obj.member.add(**(parent_corner_cases[name])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> _AddMember(name, child, class_obj) <NEW_LINE> <DEDENT> <DEDENT> self._protos[lib_path] = api_objects_pb2.TFAPIObject( path=lib_path, tf_class=class_obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('Illegal call to ApiProtoDump::_py_obj_to_proto.' 'Object is neither a module nor a class: %s', path)
A visitor that summarizes given python objects as protobufs.
6259907b99fddb7c1ca63adb
class NextcloudBinarySensor(BinarySensorEntity): <NEW_LINE> <INDENT> def __init__(self, item): <NEW_LINE> <INDENT> self._name = item <NEW_LINE> self._is_on = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return "mdi:cloud" <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._is_on == "yes" <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return f"{self.hass.data[DOMAIN]['instance']}#{self._name}" <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self._is_on = self.hass.data[DOMAIN][self._name]
Represents a Nextcloud binary sensor.
6259907b7d847024c075dde5
class Milestone(SourceModel): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _builder(cls, milestones): <NEW_LINE> <INDENT> return instance_builder(cls, milestones) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Milestone.id={}.name={}>'.format( self['id'], self['name'] )
This object stores all information about a milestone. Data store in the milestones can be used to configure the ``Parameter`` object. Args: id (str): unique identifier of the milestone name (str): name of the milestone description (str, optional): description of the milestone start_date (datetime.datetime): start date of the time period of the milestone. end_date (datetime.datetime): end date of the time period of the milestone interval (str): interval for the time period. Can be ``month``, ``week``, ``day`` or ``hour``. picture_id (str): ID of the custom picture for the milestone timezone (pytz.timezone): timezone of the milestone visibility (str): ``public`` or ``private``
6259907b21bff66bcd724671
class XMLLink(TargetRelativeLink): <NEW_LINE> <INDENT> def __init__(self, target, tagFactory=tag('a'), text=None): <NEW_LINE> <INDENT> TargetRelativeLink.__init__(self, target, ('.xml',)) <NEW_LINE> self.tagFactory = tagFactory <NEW_LINE> self.text = text <NEW_LINE> <DEDENT> def render(self, context): <NEW_LINE> <INDENT> text = self.text <NEW_LINE> if text is None: <NEW_LINE> <INDENT> text = "Unformatted XML" <NEW_LINE> <DEDENT> return self.tagFactory(href=self.getURL(context))[text]
An anchor tag linking to the XML feed for a given stats target
6259907b67a9b606de5477aa
class FlatMapLatest(Stream): <NEW_LINE> <INDENT> def __init__(self, stream, method): <NEW_LINE> <INDENT> if not hasattr(method, '__call__'): <NEW_LINE> <INDENT> self.method = lambda x: method <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.method = method <NEW_LINE> <DEDENT> stream.register(self) <NEW_LINE> super().__init__() <NEW_LINE> self.unsubscribe = None <NEW_LINE> <DEDENT> def notify(self, value): <NEW_LINE> <INDENT> if self.unsubscribe is not None: <NEW_LINE> <INDENT> self.unsubscribe() <NEW_LINE> <DEDENT> self.unsubscribe = self.method(value).subscribe(self.emit)
Flat map but ignores all but latest stream
6259907bfff4ab517ebcf221
class AddToPrivileged(ServerMessage): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.user = None <NEW_LINE> <DEDENT> def parse_network_message(self, message): <NEW_LINE> <INDENT> _pos, self.user = self.get_object(message, str)
Server code: 91
6259907be1aae11d1e7cf515
class _ARUnblockTweak(_ARaceTweak): <NEW_LINE> <INDENT> _sig_and_attr = (b'OVERRIDE', u'OVERRIDE') <NEW_LINE> def wants_record(self, record): <NEW_LINE> <INDENT> if record._rec_sig != b'RACE': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> race_sig, race_attr = self._sig_and_attr <NEW_LINE> tweak_data = self.tweak_races_data <NEW_LINE> return tweak_data is None or getattr( record, race_attr) != tweak_data[race_sig] <NEW_LINE> <DEDENT> def tweak_record(self, record): <NEW_LINE> <INDENT> race_sig, race_attr = self._sig_and_attr <NEW_LINE> setattr(record, race_attr, self.tweak_races_data[race_sig])
Shared code of 'races have all X' tweaks.
6259907bbe7bc26dc9252b5a
class ExpectimaxAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> action = None <NEW_LINE> if 0 == self.depth: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> if self.index == 0: <NEW_LINE> <INDENT> actions = gameState.getLegalActions(self.index) <NEW_LINE> value = float("inf") <NEW_LINE> v = [] <NEW_LINE> for a in actions: <NEW_LINE> <INDENT> v.append((self.expValue(gameState.generateSuccessor(self.index, a), 0, 1), a)) <NEW_LINE> <DEDENT> value, action = max(v) <NEW_LINE> <DEDENT> return action <NEW_LINE> <DEDENT> def maxValue(self, gameState, depth, agentIndex): <NEW_LINE> <INDENT> value = float("-inf") <NEW_LINE> actions = gameState.getLegalActions(agentIndex) <NEW_LINE> v = [] <NEW_LINE> if depth == self.depth or len(actions) == 0: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for a in actions: <NEW_LINE> <INDENT> v.append(self.expValue(gameState.generateSuccessor(agentIndex, a), depth, (agentIndex + 1))) <NEW_LINE> <DEDENT> value = max(v) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def expValue(self, gameState, depth, agentIndex): <NEW_LINE> <INDENT> value = 0 <NEW_LINE> v = [] <NEW_LINE> actions = gameState.getLegalActions(agentIndex) <NEW_LINE> if depth == self.depth or len(actions) == 0: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for a in actions: <NEW_LINE> <INDENT> if agentIndex == gameState.getNumAgents() - 1: <NEW_LINE> <INDENT> v.append(self.maxValue(gameState.generateSuccessor(agentIndex, a), (depth + 1), 0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v.append(self.expValue(gameState.generateSuccessor(agentIndex, a), depth, (agentIndex + 1))) <NEW_LINE> <DEDENT> <DEDENT> weight = sum(v) / len(v) <NEW_LINE> <DEDENT> return weight
Your expectimax agent (question 4)
6259907b283ffb24f3cf52aa
class Processing: <NEW_LINE> <INDENT> __slots__ = ('contexts', 'calls') <NEW_LINE> def __init__(self, contexts): <NEW_LINE> <INDENT> assert isinstance(contexts, dict), 'Invalid contexts %s' % contexts <NEW_LINE> if __debug__: <NEW_LINE> <INDENT> for key, clazz in contexts.items(): <NEW_LINE> <INDENT> assert isinstance(key, str), 'Invalid context name %s' % key <NEW_LINE> assert isinstance(clazz, ContextMetaClass), 'Invalid context class %s for %s' % (clazz, key) <NEW_LINE> <DEDENT> <DEDENT> self.contexts = contexts <NEW_LINE> self.calls = deque()
Container for processor's, provides chains for their execution. !!! Attention, never ever use a processing in multiple threads, only one thread is allowed to execute a processing at one time.
6259907b4527f215b58eb6a5
class ComparisonTestFramework(BitcoinTestFramework): <NEW_LINE> <INDENT> def set_test_params(self): <NEW_LINE> <INDENT> self.num_nodes = 2 <NEW_LINE> self.setup_clean_chain = True <NEW_LINE> <DEDENT> def add_options(self, parser): <NEW_LINE> <INDENT> parser.add_option("--testbinary", dest="testbinary", default=os.getenv("BITCOIND", "ufyd"), help="ufyd binary to test") <NEW_LINE> parser.add_option("--refbinary", dest="refbinary", default=os.getenv("BITCOIND", "ufyd"), help="ufyd binary to use for reference nodes (if any)") <NEW_LINE> <DEDENT> def setup_network(self): <NEW_LINE> <INDENT> extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes <NEW_LINE> if hasattr(self, "extra_args"): <NEW_LINE> <INDENT> extra_args = self.extra_args <NEW_LINE> <DEDENT> self.add_nodes(self.num_nodes, extra_args, binary=[self.options.testbinary] + [self.options.refbinary] * (self.num_nodes - 1)) <NEW_LINE> self.start_nodes()
Test framework for doing p2p comparison testing Sets up some ufyd binaries: - 1 binary: test binary - 2 binaries: 1 test binary, 1 ref binary - n>2 binaries: 1 test binary, n-1 ref binaries
6259907b7047854f46340dc5
class CommViewSet(CORSMixin, MarketplaceView, GenericViewSet): <NEW_LINE> <INDENT> parser_classes = (FormParser, JSONParser) <NEW_LINE> def patched_get_request(self): <NEW_LINE> <INDENT> return lambda x: self.request <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> original = super(CommViewSet, self).get_serializer_class() <NEW_LINE> original.get_request = self.patched_get_request() <NEW_LINE> return original <NEW_LINE> <DEDENT> def partial_update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> val = BooleanField().from_native(request.DATA.get('is_read')) <NEW_LINE> if val: <NEW_LINE> <INDENT> self.mark_as_read(request.amo_user) <NEW_LINE> return Response(status=status.HTTP_204_NO_CONTENT) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response('Requested update operation not supported', status=status.HTTP_403_FORBIDDEN)
Some overriding and mixin stuff to adapt other viewsets.
6259907b167d2b6e312b8298
class CoGameEntity: <NEW_LINE> <INDENT> COOFFSET = None <NEW_LINE> def __init__(self, phyworld): <NEW_LINE> <INDENT> self.entity_id = id(self) <NEW_LINE> self.myworld = phyworld <NEW_LINE> self.mybody = None <NEW_LINE> self.myimage = None <NEW_LINE> self.image_idx = 0 <NEW_LINE> self.shift_len = 0 <NEW_LINE> self.front_back = 0 <NEW_LINE> self.image_rotate = 0 <NEW_LINE> self.render_level = 0 <NEW_LINE> self.current_state = [] <NEW_LINE> self.hp = 1 <NEW_LINE> self.alliance = ALLIANCE_GOD <NEW_LINE> self.attach = [] <NEW_LINE> self.myworld.addEntityToPhy(self) <NEW_LINE> <DEDENT> def exportRenderInfo(self, renderList, render_filter): <NEW_LINE> <INDENT> tmpvec = self.COOFFSET.rotate(int(0.0-self.image_rotate)) <NEW_LINE> pos = (self.mybody.position.x + tmpvec.x, self.mybody.position.y + tmpvec.y) <NEW_LINE> encode_data = encodeRenderInfo(self.myimage, self.image_idx, int(self.image_rotate%360), self.front_back) <NEW_LINE> for pid in render_filter(pos): <NEW_LINE> <INDENT> renderList[pid][self.render_level].append((pos, encode_data)) <NEW_LINE> <DEDENT> <DEDENT> def process(self): <NEW_LINE> <INDENT> if self.hp <= 0: <NEW_LINE> <INDENT> self.preDied() <NEW_LINE> self.destroy() <NEW_LINE> <DEDENT> <DEDENT> def updateAttachment(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def changeHP(self, val): <NEW_LINE> <INDENT> self.hp += val <NEW_LINE> <DEDENT> def getEntityPosition(self): <NEW_LINE> <INDENT> return self.mybody.position.x, self.mybody.position.y <NEW_LINE> <DEDENT> def preDied(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> if self.mybody and self.myworld: <NEW_LINE> <INDENT> self.myworld.destroyEntity(self)
Base Class For Game Entity
6259907bdc8b845886d54fc5
class PlayerState: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'location', (Location, Location.thrift_spec), None, ), (2, TType.I32, 'health', None, None, ), (3, TType.I32, 'type', None, None, ), ) <NEW_LINE> def __init__(self, type=None, location=None, health=None,): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.location = location <NEW_LINE> self.health = health <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 3: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.type = iprot.readI32(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.location = Location() <NEW_LINE> self.location.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.health = iprot.readI32(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('PlayerState') <NEW_LINE> if self.location is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('location', TType.STRUCT, 1) <NEW_LINE> self.location.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.health is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('health', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.health) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.type is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('type', TType.I32, 3) <NEW_LINE> oprot.writeI32(self.type) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - type - location - health
6259907bd268445f2663a863
class STD_ANON_ (pyxb.binding.datatypes.string): <NEW_LINE> <INDENT> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://www.sat.gob.mx/sitio_internet/cfd/3/cfdv33.xsd', 58, 16) <NEW_LINE> _Documentation = None
An atomic simple type.
6259907b5fdd1c0f98e5f989
class AgentTest(TestCaseImproved): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> mqttclient_mock.MQTTClientMock.mock_reset() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> mqttclient_mock.MQTTClientMock.mock_reset()
Setup and cleanup Agent mock.
6259907b4c3428357761bcc4
class WorkListIndexer(ValueIndexer): <NEW_LINE> <INDENT> indexName = 'worklist-value' <NEW_LINE> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return getattr(self.context, 'worklist', 'undefined')
Indexes work item in the worklist-value index.
6259907b67a9b606de5477ab
class DictionaryLanguageSetter(DefaultProfileSetter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('setSpellCheckLanguages', default=[]) <NEW_LINE> <DEDENT> def _find_installed(self, code): <NEW_LINE> <INDENT> local_filename = spell.local_filename(code) <NEW_LINE> if not local_filename: <NEW_LINE> <INDENT> message.warning( "Language {} is not installed - see scripts/dictcli.py " "in qutebrowser's sources".format(code)) <NEW_LINE> <DEDENT> return local_filename <NEW_LINE> <DEDENT> def _set(self, value, settings=None): <NEW_LINE> <INDENT> if settings is not None: <NEW_LINE> <INDENT> raise ValueError("'settings' may not be set with " "DictionaryLanguageSetter!") <NEW_LINE> <DEDENT> filenames = [self._find_installed(code) for code in value] <NEW_LINE> log.config.debug("Found dicts: {}".format(filenames)) <NEW_LINE> super()._set([f for f in filenames if f], settings)
Sets paths to dictionary files based on language codes.
6259907bd486a94d0ba2d9c2
class Type(ClassBasedTraitType): <NEW_LINE> <INDENT> def __init__ (self, default_value=None, klass=None, allow_none=True, **metadata ): <NEW_LINE> <INDENT> if default_value is None: <NEW_LINE> <INDENT> if klass is None: <NEW_LINE> <INDENT> klass = object <NEW_LINE> <DEDENT> <DEDENT> elif klass is None: <NEW_LINE> <INDENT> klass = default_value <NEW_LINE> <DEDENT> if not (inspect.isclass(klass) or isinstance(klass, py3compat.string_types)): <NEW_LINE> <INDENT> raise TraitError("A Type trait must specify a class.") <NEW_LINE> <DEDENT> self.klass = klass <NEW_LINE> self._allow_none = allow_none <NEW_LINE> super(Type, self).__init__(default_value, **metadata) <NEW_LINE> <DEDENT> def validate(self, obj, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if issubclass(value, self.klass): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> if (value is None) and (self._allow_none): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> self.error(obj, value) <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> if isinstance(self.klass, py3compat.string_types): <NEW_LINE> <INDENT> klass = self.klass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> klass = self.klass.__name__ <NEW_LINE> <DEDENT> result = 'a subclass of ' + klass <NEW_LINE> if self._allow_none: <NEW_LINE> <INDENT> return result + ' or None' <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def instance_init(self, obj): <NEW_LINE> <INDENT> self._resolve_classes() <NEW_LINE> super(Type, self).instance_init(obj) <NEW_LINE> <DEDENT> def _resolve_classes(self): <NEW_LINE> <INDENT> if isinstance(self.klass, py3compat.string_types): <NEW_LINE> <INDENT> self.klass = import_item(self.klass) <NEW_LINE> <DEDENT> if isinstance(self.default_value, py3compat.string_types): <NEW_LINE> <INDENT> self.default_value = import_item(self.default_value) <NEW_LINE> <DEDENT> <DEDENT> def get_default_value(self): <NEW_LINE> <INDENT> return self.default_value
A trait whose value must be a subclass of a specified class.
6259907b91f36d47f2231b94
class AuthAPI(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> raise NotAllowed() <NEW_LINE> <DEDENT> def put(self): <NEW_LINE> <INDENT> raise NotAllowed() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> raise NotAllowed() <NEW_LINE> <DEDENT> def post(self): <NEW_LINE> <INDENT> obj = request.get_json() <NEW_LINE> if 'username' not in obj: <NEW_LINE> <INDENT> raise CustomError('Parameter <username> missing', status_code=400) <NEW_LINE> <DEDENT> if 'password' not in obj: <NEW_LINE> <INDENT> raise CustomError('Parameter <password> missing', status_code=400) <NEW_LINE> <DEDENT> ext_id = DBHelper.get_ext_id(obj) <NEW_LINE> if ext_id is None: <NEW_LINE> <INDENT> raise CustomError('Invalid username or password!', status_code=400) <NEW_LINE> <DEDENT> return { 'message': 'login successfully!', 'ext_id': ext_id, 'status_code': 200 }, 200
Using username and password exchange ext_id
6259907b1f5feb6acb164602
class CommandlineItem(ConfigItem): <NEW_LINE> <INDENT> default = False <NEW_LINE> def __init__(self, default_value=None): <NEW_LINE> <INDENT> super(CommandlineItem, self).__init__(None, default_value)
A special ConfigItem, which is passed through the commandline
6259907b32920d7e50bc7a4d
class Channel(grpc.Channel): <NEW_LINE> <INDENT> def __init__(self, target, options, credentials): <NEW_LINE> <INDENT> self._channel = cygrpc.Channel( _common.encode(target), _options(options), credentials) <NEW_LINE> self._call_state = _ChannelCallState(self._channel) <NEW_LINE> self._connectivity_state = _ChannelConnectivityState(self._channel) <NEW_LINE> cygrpc.fork_register_channel(self) <NEW_LINE> <DEDENT> def subscribe(self, callback, try_to_connect=None): <NEW_LINE> <INDENT> _subscribe(self._connectivity_state, callback, try_to_connect) <NEW_LINE> <DEDENT> def unsubscribe(self, callback): <NEW_LINE> <INDENT> _unsubscribe(self._connectivity_state, callback) <NEW_LINE> <DEDENT> def unary_unary(self, method, request_serializer=None, response_deserializer=None): <NEW_LINE> <INDENT> return _UnaryUnaryMultiCallable( self._channel, _channel_managed_call_management(self._call_state), _common.encode(method), request_serializer, response_deserializer) <NEW_LINE> <DEDENT> def unary_stream(self, method, request_serializer=None, response_deserializer=None): <NEW_LINE> <INDENT> return _UnaryStreamMultiCallable( self._channel, _channel_managed_call_management(self._call_state), _common.encode(method), request_serializer, response_deserializer) <NEW_LINE> <DEDENT> def stream_unary(self, method, request_serializer=None, response_deserializer=None): <NEW_LINE> <INDENT> return _StreamUnaryMultiCallable( self._channel, _channel_managed_call_management(self._call_state), _common.encode(method), request_serializer, response_deserializer) <NEW_LINE> <DEDENT> def stream_stream(self, method, request_serializer=None, response_deserializer=None): <NEW_LINE> <INDENT> return _StreamStreamMultiCallable( self._channel, _channel_managed_call_management(self._call_state), _common.encode(method), request_serializer, response_deserializer) <NEW_LINE> <DEDENT> def _close(self): <NEW_LINE> <INDENT> self._channel.close(cygrpc.StatusCode.cancelled, 'Channel closed!') <NEW_LINE> cygrpc.fork_unregister_channel(self) <NEW_LINE> _moot(self._connectivity_state) <NEW_LINE> <DEDENT> def _close_on_fork(self): <NEW_LINE> <INDENT> self._channel.close_on_fork(cygrpc.StatusCode.cancelled, 'Channel closed due to fork') <NEW_LINE> _moot(self._connectivity_state) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self._close() <NEW_LINE> return False <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._close() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if _moot is not None and hasattr(self, '_connectivity_state'): <NEW_LINE> <INDENT> _moot(self._connectivity_state)
A cygrpc.Channel-backed implementation of grpc.Channel.
6259907b97e22403b383c90c
class MarkEmbeddedHyperlinkReferencesAnonymous(docutils.transforms.Transform): <NEW_LINE> <INDENT> default_priority = 480 <NEW_LINE> def apply(self): <NEW_LINE> <INDENT> for ref in self.document.traverse(docutils.nodes.reference): <NEW_LINE> <INDENT> if ref.get("refname"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not ref.get("refuri"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if ref.get("refuri") == ref.astext().strip(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ref["anonymous"] = 1
Mark all hyperlink references with embedded targets as ``anonymous``. Hyperlink references with embedded targets [1]_ are syntactically always anonymous references [2]_, but the standard parser only marks them as anonymous if they refer to a separate anonymous target (i.e. don't embed the target using angle brackets) [3]_. Presumably this is because such anonymous reference and anonymous target pairs need special processing that anonymous references with embedded targets don't [4]_. For our use, we want these embedded-target references to be picked up by the :cls:`docutils.transforms.references.TargetNotes` transform, which requires them to be marked anonymous. .. [1] https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#embedded-uris-and-aliases .. [2] https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#anonymous-hyperlinks .. [3] https://sourceforge.net/p/docutils/code/HEAD/tree/trunk/docutils/docutils/parsers/rst/states.py#l859 (see phrase_ref() method) .. [4] See :cls:`docutils.transforms.references.AnonymousHyperlinks`. >>> rst_to_text('`foo <https://example.com/foo>`__') 'foo [1]\n\n[1] https://example.com/foo\n' >>> rst_to_text(''' ... `bar <https://example.com/bar>`_ ... `baz <bar_>`_ ... ''') 'bar [1] baz [1]\n\n[1] https://example.com/bar\n' >>> rst_to_text('just a link, https://example.com/standalone, sitting right there') 'just a link, https://example.com/standalone, sitting right there\n'
6259907b442bda511e95da5d
class Space(Enum): <NEW_LINE> <INDENT> def to_float(self): <NEW_LINE> <INDENT> if self.value == 1: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> elif self.value == 2: <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> elif self.value == 3: <NEW_LINE> <INDENT> return 2.0 <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.value == 1: <NEW_LINE> <INDENT> return '_' <NEW_LINE> <DEDENT> elif self.value == 2: <NEW_LINE> <INDENT> return 'x' <NEW_LINE> <DEDENT> elif self.value == 3: <NEW_LINE> <INDENT> return 'o' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '?' <NEW_LINE> <DEDENT> <DEDENT> EMPTY = 1 <NEW_LINE> X = 2 <NEW_LINE> O = 3
Represents a single space by a player or an empty spot
6259907bf9cc0f698b1c5fd2
class GenerateUpdate(superdesk.Command): <NEW_LINE> <INDENT> option_list = [ superdesk.Option('--resource', '-r', dest='resource_name', required=True, help='Resource to update'), superdesk.Option('--global', '-g', dest='global_update', required=False, action='store_true', help='This data update belongs to superdesk core'), ] <NEW_LINE> def run(self, resource_name, global_update=False): <NEW_LINE> <INDENT> timestamp = time.strftime('%Y%m%d-%H%M%S') <NEW_LINE> try: <NEW_LINE> <INDENT> last_file = get_data_updates_files()[-1] <NEW_LINE> name_id = int(last_file.replace('.py', '').split('_')[0]) + 1 <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> name_id = 0 <NEW_LINE> <DEDENT> if global_update: <NEW_LINE> <INDENT> update_dir = MAIN_DATA_UPDATES_DIR <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> update_dir = get_dirs(only_relative_folder=True)[0] <NEW_LINE> <DEDENT> if not os.path.exists(update_dir): <NEW_LINE> <INDENT> os.makedirs(update_dir) <NEW_LINE> <DEDENT> data_update_filename = os.path.join(update_dir, '{:05d}_{}_{}.py'.format(name_id, timestamp, resource_name)) <NEW_LINE> if os.path.exists(data_update_filename): <NEW_LINE> <INDENT> raise Exception('The file "%s" already exists' % (data_update_filename)) <NEW_LINE> <DEDENT> with open(data_update_filename, 'w+') as f: <NEW_LINE> <INDENT> template_context = { 'resource': resource_name, 'current_date': time.strftime("%Y-%m-%d %H:%M"), 'user': getpass.getuser(), 'default_fw_implementation': DEFAULT_DATA_UPDATE_FW_IMPLEMENTATION, 'default_bw_implementation': DEFAULT_DATA_UPDATE_BW_IMPLEMENTATION } <NEW_LINE> f.write(Template(DATA_UPDATE_TEMPLATE).substitute(template_context)) <NEW_LINE> print('Data update file created %s' % (data_update_filename))
Generate a file where to define a new data update.
6259907b2c8b7c6e89bd51f7
class TestFeatureSelector: <NEW_LINE> <INDENT> def setup_method(self, test_method): <NEW_LINE> <INDENT> self.conf = Conf() <NEW_LINE> self.dt = FeatureSelector(self.conf) <NEW_LINE> self.dt.training_file = self.conf.training_file <NEW_LINE> try: <NEW_LINE> <INDENT> os.remove(self.conf.output_file) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("No test output file found.") <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> def test_C04T01_feature_selection(self): <NEW_LINE> <INDENT> self.dt.feature_selection() <NEW_LINE> self.dt.store() <NEW_LINE> rows_i = list(csv.reader(open(self.dt.input_file))) <NEW_LINE> rows_o = list(csv.reader(open(self.dt.output_file))) <NEW_LINE> assert (len(rows_i[0]) > len(rows_o[0])) <NEW_LINE> <DEDENT> def test_C04T02_cmdline_feature_selection(self): <NEW_LINE> <INDENT> os.system( "feature_selector --input_file " + self.conf.input_file + " --output_file " + self.conf.output_file + " --training_file " + self.conf.training_file ) <NEW_LINE> print("output_file = ", self.conf.output_file) <NEW_LINE> rows_i = list(csv.reader(open(self.conf.input_file))) <NEW_LINE> rows_o = list(csv.reader(open(self.conf.output_file))) <NEW_LINE> assert (len(rows_i[0]) > len(rows_o[0])) <NEW_LINE> <DEDENT> def test_C04T03_config_save_load(self): <NEW_LINE> <INDENT> os.system( "feature_selector --input_file " + self.conf.input_file + " --output_file " + self.conf.output_file + " --output_config_file " + self.conf.output_config_file + " --training_file " + self.conf.training_file ) <NEW_LINE> os.system( "feature_selector --input_file " + self.conf.input_file + " --input_config_file " + self.conf.output_config_file + " --output_file " + self.conf.output_file + ".c04t03" ) <NEW_LINE> assert filecmp.cmp(self.conf.output_file, self.conf.output_file + ".c04t03", shallow=True)
Component Tests
6259907ba8370b77170f1dda
class CopySerializer(serializers.Serializer): <NEW_LINE> <INDENT> config = serializers.JSONField( help_text=_("A JSON document describing sources, destinations, and content to be copied") ) <NEW_LINE> structured = serializers.BooleanField( help_text=_( "Also copy any distributions, components, and releases as needed for any packages " "being copied. This will allow for structured publications of the target repository." "Default is set to True" ), default=True, ) <NEW_LINE> dependency_solving = serializers.BooleanField( help_text=_( "Also copy dependencies of any packages being copied. NOT YET" 'IMPLEMENTED! You must keep this at "False"!' ), default=False, ) <NEW_LINE> def validate(self, data): <NEW_LINE> <INDENT> super().validate(data) <NEW_LINE> if hasattr(self, "initial_data"): <NEW_LINE> <INDENT> validate_unknown_fields(self.initial_data, self.fields) <NEW_LINE> <DEDENT> if "config" in data: <NEW_LINE> <INDENT> validator = Draft7Validator(COPY_CONFIG_SCHEMA) <NEW_LINE> err = [] <NEW_LINE> for error in sorted(validator.iter_errors(data["config"]), key=str): <NEW_LINE> <INDENT> err.append(error.message) <NEW_LINE> <DEDENT> if err: <NEW_LINE> <INDENT> raise serializers.ValidationError( _("Provided copy criteria is invalid:'{}'".format(err)) ) <NEW_LINE> <DEDENT> <DEDENT> return data
A serializer for Content Copy API.
6259907b7047854f46340dc7
class DAG: <NEW_LINE> <INDENT> def __init__(self, tasks, filter_regex='', execute_dependents=False, force_run=False): <NEW_LINE> <INDENT> all_tasks = {} <NEW_LINE> def recurse(task): <NEW_LINE> <INDENT> if task.get_full_id() in all_tasks: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for dep_task in task.dependencies: <NEW_LINE> <INDENT> recurse(dep_task) <NEW_LINE> <DEDENT> all_tasks[task.get_full_id()] = task <NEW_LINE> <DEDENT> for task in tasks: <NEW_LINE> <INDENT> recurse(task) <NEW_LINE> <DEDENT> self.execute_dependents = execute_dependents <NEW_LINE> self.force_run = force_run <NEW_LINE> self.filter_regex = filter_regex <NEW_LINE> self.tasks = all_tasks <NEW_LINE> <DEDENT> def run(self, period): <NEW_LINE> <INDENT> downstream_tasks = {} <NEW_LINE> tasks = self.tasks <NEW_LINE> if self.filter_regex: <NEW_LINE> <INDENT> tasks = { k: v for (k, v) in tasks.items() if re.search(self.filter_regex, k)} <NEW_LINE> <DEDENT> for task_id in tasks: <NEW_LINE> <INDENT> task = tasks[task_id] <NEW_LINE> for dependent_task in task.dependencies: <NEW_LINE> <INDENT> if dependent_task.get_full_id() not in downstream_tasks: <NEW_LINE> <INDENT> downstream_tasks[dependent_task.get_full_id()] = Set() <NEW_LINE> <DEDENT> downstream_tasks[dependent_task.get_full_id()].add(task.get_full_id()) <NEW_LINE> <DEDENT> <DEDENT> leaf_tasks = [] <NEW_LINE> for task_id in tasks: <NEW_LINE> <INDENT> task = tasks[task_id] <NEW_LINE> if task.get_full_id() not in downstream_tasks: <NEW_LINE> <INDENT> leaf_tasks.append(task) <NEW_LINE> <DEDENT> <DEDENT> tasks_to_execute = leaf_tasks <NEW_LINE> executed_tasks = Set() <NEW_LINE> def recurse(task): <NEW_LINE> <INDENT> if task.get_full_id() in executed_tasks: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> executed_tasks.add(task.get_full_id()) <NEW_LINE> for dependent_task in task.dependencies: <NEW_LINE> <INDENT> if self.execute_dependents: <NEW_LINE> <INDENT> recurse(dependent_task) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.info('skipping %s', dependent_task.get_full_id()) <NEW_LINE> <DEDENT> <DEDENT> if self.force_run: <NEW_LINE> <INDENT> task.set_force_run() <NEW_LINE> <DEDENT> task.run(period) <NEW_LINE> <DEDENT> for task_to_execute in tasks_to_execute: <NEW_LINE> <INDENT> recurse(task_to_execute)
Directed Acylic Graph. Basically give it a bunch of tasks and it will try to execute them.
6259907b8a349b6b43687c67
class PointAnnotateCallback(TextLabelCallback): <NEW_LINE> <INDENT> _type_name = "point" <NEW_LINE> def __init__(self, pos, text, data_coords=False, coord_system='data', text_args=None, inset_box_args=None): <NEW_LINE> <INDENT> super(PointAnnotateCallback, self).__init__(pos, text, data_coords, coord_system, text_args, inset_box_args) <NEW_LINE> warnings.warn("The PointAnnotateCallback (annotate_point()) is " "deprecated. Please use the TextLabelCallback " "(annotate_point()) instead.") <NEW_LINE> <DEDENT> def __call__(self, plot): <NEW_LINE> <INDENT> super(PointAnnotateCallback, self).__call__(plot)
annotate_point(pos, text, coord_system='data', text_args=None, inset_box_args=None) This callback is deprecated, as it is simply a wrapper around the TextLabelCallback (ie annotate_text()). Please see TextLabelCallback for more information.
6259907b627d3e7fe0e08893
class CasetaPicoRemote(Entity): <NEW_LINE> <INDENT> def __init__(self, pico, data, mac): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> self._name = pico[CONF_NAME] <NEW_LINE> self._area_name = None <NEW_LINE> if CONF_AREA_NAME in pico: <NEW_LINE> <INDENT> self._area_name = pico[CONF_AREA_NAME] <NEW_LINE> self._name = pico[CONF_AREA_NAME] + " " + pico[CONF_NAME] <NEW_LINE> <DEDENT> self._integration = int(pico[CONF_ID]) <NEW_LINE> self._buttons = pico[CONF_BUTTONS] <NEW_LINE> self._minbutton = 100 <NEW_LINE> for button_num in self._buttons: <NEW_LINE> <INDENT> if button_num < self._minbutton: <NEW_LINE> <INDENT> self._minbutton = button_num <NEW_LINE> <DEDENT> <DEDENT> self._state = 0 <NEW_LINE> self._mac = mac <NEW_LINE> <DEDENT> @property <NEW_LINE> def integration(self): <NEW_LINE> <INDENT> return self._integration <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> if self._mac is not None: <NEW_LINE> <INDENT> return "{}_{}_{}_{}".format(COMPONENT_DOMAIN, DOMAIN, self._mac, self._integration) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> attr = {ATTR_INTEGRATION_ID: self._integration} <NEW_LINE> if self._area_name: <NEW_LINE> <INDENT> attr[ATTR_AREA_NAME] = self._area_name <NEW_LINE> <DEDENT> return attr <NEW_LINE> <DEDENT> @property <NEW_LINE> def minbutton(self): <NEW_LINE> <INDENT> return self._minbutton <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> def update_state(self, state): <NEW_LINE> <INDENT> self._state = state
Representation of a Lutron Pico remote.
6259907b3d592f4c4edbc864