code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Mallory1(AbstractMallory): <NEW_LINE> <INDENT> def request_groups(self, p, g): <NEW_LINE> <INDENT> self.bob.request_groups(p, 1) <NEW_LINE> <DEDENT> def easy_decrypt(self, ct: bytes, iv: bytes): <NEW_LINE> <INDENT> recovered_pt = aes_cbc_decrypt(1, ct, iv) <NEW_LINE> print("recovered pt (g = 1):", recovered_pt)
g = 1
6259906be5267d203ee6cfc3
class UserDelegateSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Delegate <NEW_LINE> fields = ('id', 'delegate',)
Serializer for UserDelegate Model
6259906b0c0af96317c57964
class GroupMemberRecord(BaseGroupMember): <NEW_LINE> <INDENT> group = models.ForeignKey(BaseGroup, related_name="member_records", verbose_name=_('group')) <NEW_LINE> user = models.ForeignKey(User, related_name="group_records", verbose_name=_('user')) <NEW_LINE> datetime = models.DateTimeField(auto_now_add=True) <NEW_LINE> membership_start = models.BooleanField(default=False, help_text=_('Whether this record signifies the start of a membership or not.')) <NEW_LINE> membership_end = models.BooleanField(default=False, help_text=_('Whether this record signifies the end of a membership or not.')) <NEW_LINE> class Meta(BaseGroupMember.Meta): <NEW_LINE> <INDENT> get_latest_by = 'datetime' <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> instance = kwargs.pop('instance', None) <NEW_LINE> super(GroupMemberRecord, self).__init__(*args, **kwargs) <NEW_LINE> if instance is not None: <NEW_LINE> <INDENT> self.group = instance.group <NEW_LINE> self.user = instance.user <NEW_LINE> self.is_admin = instance.is_admin <NEW_LINE> self.admin_title = instance.admin_title <NEW_LINE> self.admin_order = instance.admin_order <NEW_LINE> self.joined = instance.joined <NEW_LINE> self.datetime = instance.joined
A snapshot of a user's group status at a particular point in time.
6259906b1b99ca400229013b
class ConnectivityInformation(Model): <NEW_LINE> <INDENT> _validation = { 'hops': {'readonly': True}, 'connection_status': {'readonly': True}, 'avg_latency_in_ms': {'readonly': True}, 'min_latency_in_ms': {'readonly': True}, 'max_latency_in_ms': {'readonly': True}, 'probes_sent': {'readonly': True}, 'probes_failed': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'hops': {'key': 'hops', 'type': '[ConnectivityHop]'}, 'connection_status': {'key': 'connectionStatus', 'type': 'str'}, 'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'}, 'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'}, 'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'}, 'probes_sent': {'key': 'probesSent', 'type': 'int'}, 'probes_failed': {'key': 'probesFailed', 'type': 'int'}, } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(ConnectivityInformation, self).__init__() <NEW_LINE> self.hops = None <NEW_LINE> self.connection_status = None <NEW_LINE> self.avg_latency_in_ms = None <NEW_LINE> self.min_latency_in_ms = None <NEW_LINE> self.max_latency_in_ms = None <NEW_LINE> self.probes_sent = None <NEW_LINE> self.probes_failed = None
Information on the connectivity status. Variables are only populated by the server, and will be ignored when sending a request. :ivar hops: List of hops between the source and the destination. :vartype hops: list[~azure.mgmt.network.v2017_06_01.models.ConnectivityHop] :ivar connection_status: The connection status. Possible values include: 'Unknown', 'Connected', 'Disconnected', 'Degraded' :vartype connection_status: str or ~azure.mgmt.network.v2017_06_01.models.ConnectionStatus :ivar avg_latency_in_ms: Average latency in milliseconds. :vartype avg_latency_in_ms: int :ivar min_latency_in_ms: Minimum latency in milliseconds. :vartype min_latency_in_ms: int :ivar max_latency_in_ms: Maximum latency in milliseconds. :vartype max_latency_in_ms: int :ivar probes_sent: Total number of probes sent. :vartype probes_sent: int :ivar probes_failed: Number of failed probes. :vartype probes_failed: int
6259906b99cbb53fe68326f2
class Assignment(base.Assignment): <NEW_LINE> <INDENT> implements(ILatestSectionableNITFPortlet) <NEW_LINE> header = u"" <NEW_LINE> limit = 10 <NEW_LINE> pretty_date = True <NEW_LINE> filter_collection = None <NEW_LINE> def __init__(self, header=u"", limit=10, pretty_date=True, filter_collection=None): <NEW_LINE> <INDENT> self.header = header <NEW_LINE> self.limit = limit <NEW_LINE> self.pretty_date = pretty_date <NEW_LINE> self.filter_collection = filter_collection <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return _(u"Latest Sectionable NITF")
Portlet assignment. This is what is actually managed through the portlets UI and associated with columns.
6259906b97e22403b383c718
class YubikeyFactor(SecondFactor, type='yubikey'): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.yubikey_client_id = kwargs.pop('yubikey_client_id', None) <NEW_LINE> self.yubikey_secret_key = kwargs.pop('yubikey_secret_key', None) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def args_from_app(cls, app): <NEW_LINE> <INDENT> return { 'yubikey_client_id': getattr(app, 'yubikey_client_id', None), 'yubikey_secret_key': getattr(app, 'yubikey_secret_key', None) } <NEW_LINE> <DEDENT> def is_configured(self): <NEW_LINE> <INDENT> return self.yubikey_client_id and self.yubikey_secret_key <NEW_LINE> <DEDENT> def is_valid(self, user_specific_config, factor): <NEW_LINE> <INDENT> return is_valid_yubikey( client_id=self.yubikey_client_id, secret_key=self.yubikey_secret_key, expected_yubikey_id=user_specific_config, yubikey=factor )
Implements a yubikey factor for the :class:`Auth` class.
6259906b66673b3332c31c09
class HexSnowflake(Model): <NEW_LINE> <INDENT> def __init__(self, width=50, height=50): <NEW_LINE> <INDENT> self.schedule = SimultaneousActivation(self) <NEW_LINE> self.grid = HexGrid(width, height, torus=True) <NEW_LINE> for (contents, x, y) in self.grid.coord_iter(): <NEW_LINE> <INDENT> cell = Cell((x, y), self) <NEW_LINE> self.grid.place_agent(cell, (x, y)) <NEW_LINE> self.schedule.add(cell) <NEW_LINE> <DEDENT> centerishCell = self.grid[width // 2][height // 2] <NEW_LINE> centerishCell.state = 1 <NEW_LINE> for a in centerishCell.neighbors: <NEW_LINE> <INDENT> a.isConsidered = True <NEW_LINE> <DEDENT> self.running = True <NEW_LINE> <DEDENT> def step(self): <NEW_LINE> <INDENT> self.schedule.step()
Represents the hex grid of cells. The grid is represented by a 2-dimensional array of cells with adjacency rules specific to hexagons.
6259906bfff4ab517ebcf026
class Command(BaseCommand): <NEW_LINE> <INDENT> def __copy__(self): <NEW_LINE> <INDENT> _ = Command(copy.copy(self._receiver), self._method) <NEW_LINE> _.append_arg(*self._args) <NEW_LINE> return _
An runnable/executable Command that acts as a prototype through the 'copy' python magic function. When a command instance is invoked with 'copy', the receiver is copied explicitly in a shallow way. The rest of the command arguments are assumed to be performance invariant (eg it is not expensive to copy the 'method' attribute, which is a string) and are handled automatically.
6259906b21bff66bcd724472
class DescribeLimitsInputSet(InputSet): <NEW_LINE> <INDENT> def set_AWSAccessKeyId(self, value): <NEW_LINE> <INDENT> super(DescribeLimitsInputSet, self)._set_input('AWSAccessKeyId', value) <NEW_LINE> <DEDENT> def set_AWSSecretKeyId(self, value): <NEW_LINE> <INDENT> super(DescribeLimitsInputSet, self)._set_input('AWSSecretKeyId', value) <NEW_LINE> <DEDENT> def set_UserRegion(self, value): <NEW_LINE> <INDENT> super(DescribeLimitsInputSet, self)._set_input('UserRegion', value)
An InputSet with methods appropriate for specifying the inputs to the DescribeLimits Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259906bcc0a2c111447c6d6
class V1PyTorchJobList(object): <NEW_LINE> <INDENT> openapi_types = { 'api_version': 'str', 'items': 'list[V1PyTorchJob]', 'kind': 'str', 'metadata': 'V1ListMeta' } <NEW_LINE> attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } <NEW_LINE> def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._api_version = None <NEW_LINE> self._items = None <NEW_LINE> self._kind = None <NEW_LINE> self._metadata = None <NEW_LINE> self.discriminator = None <NEW_LINE> if api_version is not None: <NEW_LINE> <INDENT> self.api_version = api_version <NEW_LINE> <DEDENT> self.items = items <NEW_LINE> if kind is not None: <NEW_LINE> <INDENT> self.kind = kind <NEW_LINE> <DEDENT> if metadata is not None: <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def api_version(self): <NEW_LINE> <INDENT> return self._api_version <NEW_LINE> <DEDENT> @api_version.setter <NEW_LINE> def api_version(self, api_version): <NEW_LINE> <INDENT> self._api_version = api_version <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> @items.setter <NEW_LINE> def items(self, items): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and items is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `items`, must not be `None`") <NEW_LINE> <DEDENT> self._items = items <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self._metadata <NEW_LINE> <DEDENT> @metadata.setter <NEW_LINE> def metadata(self, metadata): <NEW_LINE> <INDENT> self._metadata = metadata <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1PyTorchJobList): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1PyTorchJobList): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259906b0a50d4780f7069c6
class HousesIndexHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret = self.redis.get("home_page_data") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if ret: <NEW_LINE> <INDENT> resp = '{"errno": "0", "errmsg": "数据返回成功", "houses": %s}' %ret <NEW_LINE> return self.write(resp) <NEW_LINE> <DEDENT> <DEDENT> if not ret: <NEW_LINE> <INDENT> sql = "select hi_house_id, hi_title, hi_index_image_url from ih_house_info order by hi_ctime DESC LIMIT %s" <NEW_LINE> try: <NEW_LINE> <INDENT> resp = self.db.query(sql, constants.HOUSE_INDEX_SHOW_COUNTS) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> return self.write({"errno": RET.DBERR, "errmsg": "数据库读取失败"}) <NEW_LINE> <DEDENT> houses = [] <NEW_LINE> if resp: <NEW_LINE> <INDENT> for l in resp: <NEW_LINE> <INDENT> if l["hi_index_image_url"]: <NEW_LINE> <INDENT> house = { "house_id": l["hi_house_id"], "title": l["hi_title"], "image_url": constants.QINIU_DOMAIN_PREFIX + l["hi_index_image_url"] } <NEW_LINE> <DEDENT> houses.append(house) <NEW_LINE> <DEDENT> <DEDENT> json_houses = json.dumps(houses) <NEW_LINE> try: <NEW_LINE> <INDENT> self.redis.setex("home_page_data", constants.HOUSE_INDEX_REDIS_EXPIRES, json_houses) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> <DEDENT> resp = '{"errno": 0, "errmsg": "OK", "houses": %s}' % json_houses <NEW_LINE> return self.write(resp)
获取房屋主页图片信息
6259906b32920d7e50bc7852
class SigGraInferNet_GCN(nn.Module): <NEW_LINE> <INDENT> def __init__(self,feature_input_size,feature_output_size,PPI_input_size,PPI_output_size,num_GCN,drop_prob): <NEW_LINE> <INDENT> super(SigGraInferNet_GCN, self).__init__() <NEW_LINE> self.PPIGE_GCN=PPIGE_GCN(PPI_input_size,PPI_output_size,num_GCN,drop_prob) <NEW_LINE> self.GGE=GGE(feature_input_size,feature_output_size,drop_prob) <NEW_LINE> self.LED=LED(feature_output_size) <NEW_LINE> <DEDENT> def forward(self,a,bio_a,A,b,bio_b,B): <NEW_LINE> <INDENT> G_a=self.PPIGE_GCN(bio_a,A) <NEW_LINE> G_b=self.PPIGE_GCN(bio_b,B) <NEW_LINE> h_a=self.GGE(a) <NEW_LINE> h_b=self.GGE(b) <NEW_LINE> e_a=GAGA(G_a,h_a) <NEW_LINE> e_b=GAGA(G_b,h_b) <NEW_LINE> e_a=torch.cat([h_a,e_a],dim=-1) <NEW_LINE> e_b=torch.cat([h_b,e_b],dim=-1) <NEW_LINE> predict=self.LED(e_a,e_b) <NEW_LINE> return F.log_softmax(predict,dim=-1)
SigGraInferNet_GCN with PPIGE-GCN Args: feature_input_size: the input dimension of genomic feature feature_output_size: the output dimension of genomic feature PPI_input_size: the input dimension of protein-protein database feature PPI_output_size:the output dimension of protein-protein database feature num_GCN: number of GCN layer in model drop_prob: dropout probability
6259906b5fc7496912d48e6e
class WTASelection: <NEW_LINE> <INDENT> def __init__(self, network, threshold): <NEW_LINE> <INDENT> self.network = network <NEW_LINE> self.threshold = threshold <NEW_LINE> <DEDENT> def process(self, inputs): <NEW_LINE> <INDENT> results = self.network.process(inputs) <NEW_LINE> winner_index = max(range(len(results)), key=results.__getitem__) <NEW_LINE> winner_result = results[winner_index] <NEW_LINE> if winner_result < self.threshold: <NEW_LINE> <INDENT> return ([], winner_result) <NEW_LINE> <DEDENT> if self.network.is_single_layer: <NEW_LINE> <INDENT> return ([self.network.neuron_specs[winner_index]], winner_result) <NEW_LINE> <DEDENT> output_index = [ i for i, spec in enumerate(self.network.neuron_specs) if spec.role is NeuronRole.OUTPUT ][winner_index] <NEW_LINE> neuron_chain = [None for _ in self.network.neuron_specs] <NEW_LINE> neuron_chain[output_index] = deepcopy(self.network.neuron_specs[output_index]) <NEW_LINE> chain_indices = {output_index} <NEW_LINE> for i in range(output_index - 1, -1, -1): <NEW_LINE> <INDENT> intersection = self.network.neuron_specs[i].links.intersection( chain_indices ) <NEW_LINE> if intersection: <NEW_LINE> <INDENT> neuron_chain[i] = deepcopy(self.network.neuron_specs[i]) <NEW_LINE> neuron_chain[i].links = intersection <NEW_LINE> chain_indices.add(i) <NEW_LINE> <DEDENT> <DEDENT> neuron_chain = [elem for elem in neuron_chain if elem is not None] <NEW_LINE> sorted_ci = sorted(chain_indices) <NEW_LINE> link_correction_map = { src_i: dst_i for dst_i, src_i in enumerate(sorted_ci) } <NEW_LINE> for elem in neuron_chain: <NEW_LINE> <INDENT> elem.links = {link_correction_map[link] for link in elem.links} <NEW_LINE> <DEDENT> return (neuron_chain, winner_result)
Selects a neuron chain which produced the strongest response, which has passed the threshold. The chain can be used to construct a new network.
6259906b67a9b606de5476a8
class Meta: <NEW_LINE> <INDENT> types = registry.settings.types
Can change over time.
6259906bbe8e80087fbc0898
class ContentStream(BaseMixin, db.Model): <NEW_LINE> <INDENT> __tablename__ = u'content_stream' <NEW_LINE> name = db.Column(db.String(STRING_LEN)) <NEW_LINE> uri = db.Column(db.String(200)) <NEW_LINE> description = db.Column(db.String(1000)) <NEW_LINE> ok_to_play = db.Column(db.Boolean) <NEW_LINE> created_by = db.Column(db.ForeignKey('user_user.id')) <NEW_LINE> date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) <NEW_LINE> updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) <NEW_LINE> deleted = db.Column(db.Boolean)
Definition of a stream
6259906b92d797404e389761
class CheckThreads(argparse.Action): <NEW_LINE> <INDENT> def __init__(self, option_strings, dest, nargs=None, **kwargs): <NEW_LINE> <INDENT> if nargs is not None: <NEW_LINE> <INDENT> raise ValueError('nargs not allowed for ThreadCheck') <NEW_LINE> <DEDENT> super(CheckThreads, self).__init__(option_strings, dest, **kwargs) <NEW_LINE> <DEDENT> def __call__(self, parser, namespace, values, option_string=None): <NEW_LINE> <INDENT> threads = values <NEW_LINE> try: <NEW_LINE> <INDENT> assert type(threads) is int <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> raise TypeError('{0} is not an integer'.format(str(threads))) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> assert threads >= 1 <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> raise ValueError('Must use at least one thread') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> assert threads <= cpu_count() <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> raise ValueError('Cannot use more threads than available: {0}' .format(str(cpu_count()))) <NEW_LINE> <DEDENT> setattr(namespace, self.dest, threads)
Argparse Action that ensures number of threads requested is valid Example: .. code-block:: Python >>> from arandomness.argparse import CheckThreads >>> parser = argparse.ArgumentParser() >>> parser.add_argument('test', ... type=int, ... action=CheckThreads) >>> args = parser.parse_args([1]) >>> args.test 1
6259906b3cc13d1c6d466f52
class Judgment(SaveReversionMixin, TimestampMixin): <NEW_LINE> <INDENT> url = models.URLField( verbose_name=_("URL"), max_length=250, ) <NEW_LINE> actor = models.TextField( verbose_name=_("Actor"), null=False, ) <NEW_LINE> resume = models.TextField( verbose_name=_("Resumen"), null=False, ) <NEW_LINE> defendant = models.CharField( verbose_name=_("Demandado"), max_length=300, null=False, ) <NEW_LINE> court = models.CharField( verbose_name=_("Juzgado"), max_length=300, null=False, ) <NEW_LINE> state = models.CharField( verbose_name=_("Estado"), max_length=200, null=False, ) <NEW_LINE> case_file = models.TextField( verbose_name=_("Expediente"), max_length=200, null=False, ) <NEW_LINE> notifications = models.IntegerField( verbose_name=_("Notificaciones"), null=False, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Lista de Items") <NEW_LINE> verbose_name_plural = _("Listas de items") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{self.resume}"
Judgment info.
6259906b6e29344779b01e61
class TwoLayerNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=3*32*32, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> self.params['W1'] = np.random.normal(scale=weight_scale, size=(input_dim, hidden_dim)) <NEW_LINE> self.params['b1'] = np.zeros(hidden_dim) <NEW_LINE> self.params['W2'] = np.random.normal(scale=weight_scale, size=(hidden_dim, num_classes)) <NEW_LINE> self.params['b2'] = np.zeros(num_classes) <NEW_LINE> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> scores = None <NEW_LINE> out_h, cache_h = affine_relu_forward(X, self.params['W1'], self.params['b1']) <NEW_LINE> out_o, cache_o = affine_forward(out_h, self.params['W2'], self.params['b2']) <NEW_LINE> scores = out_o <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, grads = 0, {} <NEW_LINE> loss, dx_softmax = softmax_loss(out_o, y) <NEW_LINE> dx_o, dw_o, db_o = affine_backward(dout=dx_softmax, cache=cache_o) <NEW_LINE> dx_h, dw_h, db_h = affine_relu_backward(dout=dx_o, cache=cache_h) <NEW_LINE> loss += 0.5 * self.reg * (np.sum(self.params['W1'] ** 2) + np.sum(self.params['W2'] ** 2)) <NEW_LINE> grads['W1'] = dw_h + self.reg * self.params['W1'] <NEW_LINE> grads['b1'] = db_h <NEW_LINE> grads['W2'] = dw_o + self.reg * self.params['W2'] <NEW_LINE> grads['b2'] = db_o <NEW_LINE> return loss, grads
A two-layer fully-connected neural network with ReLU nonlinearity and softmax loss that uses a modular layer design. We assume an input dimension of D, a hidden dimension of H, and perform classification over C classes. The architecure should be affine - relu - affine - softmax. Note that this class does not implement gradient descent; instead, it will interact with a separate Solver object that is responsible for running optimization. The learnable parameters of the model are stored in the dictionary self.params that maps parameter names to numpy arrays.
6259906b8da39b475be049f8
class InjectShell(Callback): <NEW_LINE> <INDENT> def __init__(self, file='INJECT_SHELL.tmp', shell='ipython'): <NEW_LINE> <INDENT> self._file = file <NEW_LINE> assert shell in ['ipython', 'pdb'] <NEW_LINE> self._shell = shell <NEW_LINE> logger.info("Create a file '{}' to open {} shell.".format(file, shell)) <NEW_LINE> <DEDENT> def _trigger(self): <NEW_LINE> <INDENT> if os.path.isfile(self._file): <NEW_LINE> <INDENT> logger.info("File {} exists, entering shell.".format(self._file)) <NEW_LINE> self._inject() <NEW_LINE> <DEDENT> <DEDENT> def _inject(self): <NEW_LINE> <INDENT> trainer = self.trainer <NEW_LINE> if self._shell == 'ipython': <NEW_LINE> <INDENT> import IPython as IP <NEW_LINE> IP.embed() <NEW_LINE> <DEDENT> elif self._shell == 'pdb': <NEW_LINE> <INDENT> import pdb <NEW_LINE> pdb.set_trace() <NEW_LINE> <DEDENT> <DEDENT> def _after_train(self): <NEW_LINE> <INDENT> if os.path.isfile(self._file): <NEW_LINE> <INDENT> os.unlink(self._file)
Allow users to create a specific file as a signal to pause and iteratively debug the training. Once the :meth:`trigger` method is called, it detects whether the file exists, and opens an IPython/pdb shell if yes. In the shell, ``self`` is this callback, ``self.trainer`` is the trainer, and from that you can access everything else. Example: .. code-block:: none callbacks=[InjectShell('/path/to/pause-training.tmp'), ...] # the following command will pause the training when the epoch finishes: $ touch /path/to/pause-training.tmp
6259906b45492302aabfdce4
class Coordinate: <NEW_LINE> <INDENT> def __init__(self, pos: complex, **kwargs: Any) -> None: <NEW_LINE> <INDENT> self._pos: complex = pos <NEW_LINE> <DEDENT> @property <NEW_LINE> def pos(self) -> complex: <NEW_LINE> <INDENT> return self._pos <NEW_LINE> <DEDENT> @pos.setter <NEW_LINE> def pos(self, value: complex) -> None: <NEW_LINE> <INDENT> self._pos = value <NEW_LINE> <DEDENT> def calc_dist(self, other: "Coordinate") -> float: <NEW_LINE> <INDENT> dist: float = np.abs(self.pos - other.pos) <NEW_LINE> return dist <NEW_LINE> <DEDENT> def move_by_relative_coordinate(self, rel_pos: complex) -> None: <NEW_LINE> <INDENT> self.pos += rel_pos <NEW_LINE> <DEDENT> def move_by_relative_polar_coordinate(self, radius: float, angle: float) -> None: <NEW_LINE> <INDENT> rel_pos = cmath.rect(radius, angle) <NEW_LINE> self.move_by_relative_coordinate(rel_pos) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "{0}({1})".format(self.__class__.__name__, self.pos)
Base class for a coordinate in a 2D grid. A Coordinate object knows its location in the grid (represented as a complex number) and how to calculate the distance from it to another location.
6259906b8a43f66fc4bf39a0
class BridgeLibTest(base.BaseTestCase): <NEW_LINE> <INDENT> _NAMESPACE = 'test-namespace' <NEW_LINE> _BR_NAME = 'test-br' <NEW_LINE> _IF_NAME = 'test-if' <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(BridgeLibTest, self).setUp() <NEW_LINE> mock.patch.object(netutils, 'is_ipv6_enabled', return_value=True).start() <NEW_LINE> ip_wrapper = mock.patch('neutron.agent.linux.ip_lib.IPWrapper').start() <NEW_LINE> self.execute = ip_wrapper.return_value.netns.execute <NEW_LINE> self.create_p = mock.patch.object(priv_lib, 'create_interface') <NEW_LINE> self.create = self.create_p.start() <NEW_LINE> self.delete_p = mock.patch.object(priv_lib, 'delete_interface') <NEW_LINE> self.delete = self.delete_p.start() <NEW_LINE> <DEDENT> def _verify_bridge_mock(self, cmd): <NEW_LINE> <INDENT> self.execute.assert_called_once_with(cmd, run_as_root=True) <NEW_LINE> self.execute.reset_mock() <NEW_LINE> <DEDENT> def _verify_bridge_sysctl_mock(self, cmd): <NEW_LINE> <INDENT> self.execute.assert_called_once_with(cmd, run_as_root=True, log_fail_as_error=True) <NEW_LINE> self.execute.reset_mock() <NEW_LINE> <DEDENT> def test_is_bridged_interface(self): <NEW_LINE> <INDENT> exists = lambda path: path == "/sys/class/net/tapOK/brport" <NEW_LINE> with mock.patch('os.path.exists', side_effect=exists): <NEW_LINE> <INDENT> self.assertTrue(bridge_lib.is_bridged_interface("tapOK")) <NEW_LINE> self.assertFalse(bridge_lib.is_bridged_interface("tapKO")) <NEW_LINE> <DEDENT> <DEDENT> def test_get_interface_bridge(self): <NEW_LINE> <INDENT> with mock.patch('os.readlink', side_effect=["prefix/br0", OSError()]): <NEW_LINE> <INDENT> br = bridge_lib.BridgeDevice.get_interface_bridge('tap0') <NEW_LINE> self.assertIsInstance(br, bridge_lib.BridgeDevice) <NEW_LINE> self.assertEqual("br0", br.name) <NEW_LINE> br = bridge_lib.BridgeDevice.get_interface_bridge('tap0') <NEW_LINE> self.assertIsNone(br) <NEW_LINE> <DEDENT> <DEDENT> def test_addbr_exists(self): <NEW_LINE> <INDENT> self.create.side_effect = priv_lib.InterfaceAlreadyExists <NEW_LINE> bridge_lib.BridgeDevice.addbr(self._BR_NAME) <NEW_LINE> bridge_lib.BridgeDevice.addbr(self._BR_NAME) <NEW_LINE> <DEDENT> def test_owns_interface(self): <NEW_LINE> <INDENT> br = bridge_lib.BridgeDevice('br-int') <NEW_LINE> exists = lambda path: path == "/sys/class/net/br-int/brif/abc" <NEW_LINE> with mock.patch('os.path.exists', side_effect=exists): <NEW_LINE> <INDENT> self.assertTrue(br.owns_interface("abc")) <NEW_LINE> self.assertFalse(br.owns_interface("def")) <NEW_LINE> <DEDENT> <DEDENT> def test_get_interfaces(self): <NEW_LINE> <INDENT> br = bridge_lib.BridgeDevice('br-int') <NEW_LINE> interfaces = ["tap1", "tap2"] <NEW_LINE> with mock.patch('os.listdir', side_effect=[interfaces, OSError()]): <NEW_LINE> <INDENT> self.assertEqual(interfaces, br.get_interfaces()) <NEW_LINE> self.assertEqual([], br.get_interfaces())
A test suite to exercise the bridge libraries
6259906bf548e778e596cd9a
class TestOperators(unittest.TestCase): <NEW_LINE> <INDENT> def runSingleParticleBasis(self): <NEW_LINE> <INDENT> basis = SingleParticleBasis([['u', 'd'], [0, 1]]) <NEW_LINE> self.assertTrue(basis.getStateAlgebraically(13) == 'c^(\'u\', 0) c^(\'d\', 0) c^(\'d\', 1)') <NEW_LINE> self.assertTrue(basis.getFockspaceNr((1,0,1,1)) == 13) <NEW_LINE> self.assertTrue(basis.getOccupationRep(13) == '1011') <NEW_LINE> self.assertTrue(basis.getFockspaceNr((0,1,0,0)) == 2) <NEW_LINE> self.assertTrue(basis.getSingleParticleStateNr('u', 1) == 1) <NEW_LINE> self.assertTrue(basis.getOccupationRep(2) == '0100') <NEW_LINE> for sps, fockNr in zip(basis.orderedSingleParticleStates, [1,2,4,8]): <NEW_LINE> <INDENT> self.assertTrue(basis.getFockspaceNr(singleParticleState = sps) == fockNr) <NEW_LINE> <DEDENT> <DEDENT> def runAnnihilationOperator(self): <NEW_LINE> <INDENT> pass
def __init__(self, *args, **kwargs): super(TestOperators, self).__init__(*args, **kwargs)
6259906b66673b3332c31c0b
class ThreadPool: <NEW_LINE> <INDENT> def __init__(self,num_workers,q_size=0,resq_size=0,poll_timeout=5): <NEW_LINE> <INDENT> self._requestQueue = queue.Queue() <NEW_LINE> self._resultQueue = queue.Queue() <NEW_LINE> self.workers = [] <NEW_LINE> self.dismissedWorkers = [] <NEW_LINE> self.workRequests = {} <NEW_LINE> self.createWorkers(num_workers,poll_timeout) <NEW_LINE> <DEDENT> def createWorkers(self,num_workers,poll_timeout=5): <NEW_LINE> <INDENT> for i in range(num_workers): <NEW_LINE> <INDENT> self.workers.append(WorkerThread(self._requestQueue,self._resultQueue,poll_timeout=poll_timeout)) <NEW_LINE> <DEDENT> <DEDENT> def dismissWorkers(self,num_workers,do_join=False): <NEW_LINE> <INDENT> dismiss_list = [] <NEW_LINE> for i in range(min(num_workers,len(self.workers))): <NEW_LINE> <INDENT> worker = self.workers.pop() <NEW_LINE> worker.dismiss() <NEW_LINE> dismiss_list.append(worker) <NEW_LINE> <DEDENT> if do_join : <NEW_LINE> <INDENT> for worker in dismiss_list: <NEW_LINE> <INDENT> worker.join() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.dismissedWorkers.extend(dismiss_list) <NEW_LINE> <DEDENT> <DEDENT> def joinAllDismissedWorkers(self): <NEW_LINE> <INDENT> for worker in self.dismissedWorkers: <NEW_LINE> <INDENT> worker.join() <NEW_LINE> <DEDENT> self.dismissedWorkers = [] <NEW_LINE> <DEDENT> def putRequest(self,request ,block=True,timeout=None): <NEW_LINE> <INDENT> assert isinstance(request,WorkRequest) <NEW_LINE> assert not getattr(request,'exception',None) <NEW_LINE> self._requestQueue.put(request, block, timeout) <NEW_LINE> self.workRequests[request.requestID] = request <NEW_LINE> <DEDENT> def poll(self,block = False): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> if not self.workRequests: <NEW_LINE> <INDENT> raise NoResultsPending <NEW_LINE> <DEDENT> elif block and not self.workers: <NEW_LINE> <INDENT> raise NoWorkersAvailable <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> request , result = self._resultQueue.get(block=block) <NEW_LINE> if request.exception and request.exc_callback: <NEW_LINE> <INDENT> request.exc_callback(request,result) <NEW_LINE> <DEDENT> if request.callback and not (request.exception and request.exc_callback): <NEW_LINE> <INDENT> request.callback(request,result) <NEW_LINE> <DEDENT> del self.workRequests[request.requestID] <NEW_LINE> <DEDENT> except queue.Empty as e: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wait(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.poll(True) <NEW_LINE> <DEDENT> except NoResultsPending: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def workersize(self): <NEW_LINE> <INDENT> return len(self.workers) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.dismissWorkers(self.workersize(),True) <NEW_LINE> self.joinAllDismissedWorkers()
@param num_workers:初始化的线程数量 @param q_size,resq_size: requestQueue和result队列的初始大小 @param poll_timeout: 设置工作线程WorkerThread的timeout,也就是等待requestQueue的timeout
6259906b44b2445a339b7566
class SAPHDBPart(PacketNoPadded): <NEW_LINE> <INDENT> name = "SAP HANA SQL Command Network Protocol Part" <NEW_LINE> fields_desc = [ EnumField("partkind", 0, hdb_partkind_values, fmt="<b"), LESignedByteField("partattributes", 0), FieldLenField("argumentcount", None, count_of="buffer", fmt="<h"), LESignedIntField("bigargumentcount", 0), FieldLenField("bufferlength", None, length_of="buffer", fmt="<i"), LESignedIntField("buffersize", 2**17 - 32 - 24), PadField(PacketListField("buffer", [], next_cls_cb=saphdb_determine_part_class, count_from=lambda x: x.argumentcount, length_from=lambda x: x.bufferlength), 8), ]
SAP HANA SQL Command Network Protocol Part This packet represents a part within a HDB packet. The part header is comprised of 16 bytes.
6259906b8e7ae83300eea89d
class DatabaseAccountPatchParameters(Model): <NEW_LINE> <INDENT> _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'capabilities': {'key': 'properties.capabilities', 'type': '[Capability]'}, } <NEW_LINE> def __init__(self, tags=None, capabilities=None): <NEW_LINE> <INDENT> super(DatabaseAccountPatchParameters, self).__init__() <NEW_LINE> self.tags = tags <NEW_LINE> self.capabilities = capabilities
Parameters for patching Azure Cosmos DB database account properties. :param tags: :type tags: dict[str, str] :param capabilities: List of Cosmos DB capabilities for the account :type capabilities: list[~azure.mgmt.cosmosdb.models.Capability]
6259906b4a966d76dd5f06f8
class UserEditForm(FlaskForm): <NEW_LINE> <INDENT> username = StringField('Username', validators=[DataRequired()]) <NEW_LINE> email = StringField('E-mail', validators=[DataRequired(), Email()]) <NEW_LINE> image_url = StringField('(Optional) Image URL') <NEW_LINE> header_image_url = StringField('(Optional) Header Image URL') <NEW_LINE> bio = TextAreaField('(Optional) Bio') <NEW_LINE> password = PasswordField('Password', validators=[DataRequired(), Length(min=6)])
Form for editing users.
6259906b0a50d4780f7069c7
class FakeConfig(object): <NEW_LINE> <INDENT> def IsCluster(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def GetNodeList(self): <NEW_LINE> <INDENT> return ["a", "b", "c"] <NEW_LINE> <DEDENT> def GetRsaHostKey(self): <NEW_LINE> <INDENT> return FAKE_CLUSTER_KEY <NEW_LINE> <DEDENT> def GetDsaHostKey(self): <NEW_LINE> <INDENT> return FAKE_CLUSTER_KEY <NEW_LINE> <DEDENT> def GetClusterName(self): <NEW_LINE> <INDENT> return "test.cluster" <NEW_LINE> <DEDENT> def GetMasterNode(self): <NEW_LINE> <INDENT> return "a" <NEW_LINE> <DEDENT> def GetMasterNodeName(self): <NEW_LINE> <INDENT> return netutils.Hostname.GetSysName() <NEW_LINE> <DEDENT> def GetDefaultIAllocator(self): <NEW_LINE> <INDENT> return "testallocator" <NEW_LINE> <DEDENT> def GetNodeName(self, node_uuid): <NEW_LINE> <INDENT> if node_uuid in self.GetNodeList(): <NEW_LINE> <INDENT> return "node_%s.example.com" % (node_uuid,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def GetNodeNames(self, node_uuids): <NEW_LINE> <INDENT> return map(self.GetNodeName, node_uuids)
Fake configuration object
6259906b3346ee7daa338265
class Aggregation(object): <NEW_LINE> <INDENT> def __init__(self, agg=None): <NEW_LINE> <INDENT> if hasattr(agg, 'items'): <NEW_LINE> <INDENT> self.__dict__.update(agg) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return self.get(name, None) <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> self.__dict__[name] = value <NEW_LINE> <DEDENT> def get_buckets(self): <NEW_LINE> <INDENT> if 'buckets' in self.__dict__: <NEW_LINE> <INDENT> return self.__dict__['buckets'] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_docs_in_buckets(self, obj_document=Document): <NEW_LINE> <INDENT> buckets = self.get_buckets() <NEW_LINE> doc_buckets = {} <NEW_LINE> for bucket in buckets: <NEW_LINE> <INDENT> for k in bucket: <NEW_LINE> <INDENT> if k != 'key' and hasattr(bucket[k], '__contains__') and 'hits' in bucket[k]: <NEW_LINE> <INDENT> hits = bucket[k]['hits']['hits'] <NEW_LINE> docs = [obj_document(hit) for hit in hits] <NEW_LINE> doc_buckets[bucket['key']] = { 'docs': docs, 'doc_count': bucket['doc_count'] } <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return doc_buckets <NEW_LINE> <DEDENT> def get_hits(self): <NEW_LINE> <INDENT> return self.__dict__['hits']['hits'] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def build_aggs(cls, json_response): <NEW_LINE> <INDENT> aggs = None <NEW_LINE> if 'aggregations' in json_response: <NEW_LINE> <INDENT> aggs_list = json_response['aggregations'] <NEW_LINE> for agg_name in aggs_list: <NEW_LINE> <INDENT> if aggs is None: <NEW_LINE> <INDENT> aggs = {} <NEW_LINE> <DEDENT> aggs[agg_name] = Aggregation(aggs_list[agg_name]) <NEW_LINE> <DEDENT> <DEDENT> return aggs
Generic object to hold Elastic aggregation.
6259906baad79263cf42ffc3
class AbstractAuthenticator: <NEW_LINE> <INDENT> def __init__(self, token_type): <NEW_LINE> <INDENT> self.access_token = None <NEW_LINE> self.token_type = token_type <NEW_LINE> <DEDENT> def has_token(self): <NEW_LINE> <INDENT> return self.access_token != None <NEW_LINE> <DEDENT> def get_websocket_auth_query(self): <NEW_LINE> <INDENT> assert self.has_token() <NEW_LINE> return 'token={}&tokenType={}'.format(self.access_token, self.token_type)
Abstract base class for managing a user's auth token.
6259906ba17c0f6771d5d7af
class LazyTrello(object): <NEW_LINE> <INDENT> __metaclass__ = TrelloMeta <NEW_LINE> @property <NEW_LINE> def _prefix(self): <NEW_LINE> <INDENT> raise NotImplementedError("LazyTrello subclasses MUST define a _prefix") <NEW_LINE> <DEDENT> def __init__(self, conn, obj_id, data=None): <NEW_LINE> <INDENT> self._id = obj_id <NEW_LINE> self._conn = conn <NEW_LINE> self._path = self._prefix + obj_id <NEW_LINE> if data: <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if attr == '_data': <NEW_LINE> <INDENT> if not '_data' in self.__dict__: <NEW_LINE> <INDENT> self._data = json.loads(self._conn.get(self._path)) <NEW_LINE> <DEDENT> return self._data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("%r object has no attribute %r" % (type(self).__name__, attr)) <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> tmpl = u'<%(cls)s: %(name_or_id)s>' <NEW_LINE> if 'name' in self._data: <NEW_LINE> <INDENT> return tmpl % {'cls': self.__class__.__name__, 'name_or_id': self._data['name']} <NEW_LINE> <DEDENT> return tmpl % {'cls': self.__class__.__name__, 'name_or_id': self._id} <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.__unicode__()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.__unicode__()) <NEW_LINE> <DEDENT> def reload(self): <NEW_LINE> <INDENT> self.__dict__.pop("_data", None)
Parent class for Trello objects (cards, lists, boards, members, etc). This should always be subclassed, never used directly.
6259906bd486a94d0ba2d7cd
class CollectionListResource(BaseResource): <NEW_LINE> <INDENT> path = ['collections'] <NEW_LINE> methods = { 'find': FindListResourceMethod, 'count': CountListResourceMethod, }
Many collections manupulation object
6259906be76e3b2f99fda20f
class VeranstaltungAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> fieldsets = [ ('Stammdaten', {'fields': ['typ', 'name', 'semester', 'status', 'lv_nr', 'grundstudium', 'evaluieren', 'veranstalter', 'link_veranstalter', ]}), ('Bestellung', {'fields': ['sprache', 'anzahl', 'verantwortlich', 'ergebnis_empfaenger', 'primaerdozent', 'auswertungstermin', 'freiefrage1', 'freiefrage2', 'kleingruppen', ]}), ] <NEW_LINE> list_display = ('typ', 'name', 'semester', 'grundstudium', 'evaluieren', 'anzahl', 'sprache', 'status', 'veranstalter_list') <NEW_LINE> list_display_links = ['name'] <NEW_LINE> list_filter = ('typ', 'semester', 'status', 'grundstudium', 'evaluieren', 'sprache') <NEW_LINE> search_fields = ['name'] <NEW_LINE> filter_horizontal = ('veranstalter', 'ergebnis_empfaenger') <NEW_LINE> readonly_fields = ('link_veranstalter',) <NEW_LINE> inlines = [LogInline, ] <NEW_LINE> def save_model(self, request, obj, form, change): <NEW_LINE> <INDENT> super(VeranstaltungAdmin, self).save_model(request, obj, form, change) <NEW_LINE> for changed_att in form.changed_data: <NEW_LINE> <INDENT> if changed_att == "status": <NEW_LINE> <INDENT> obj.log(request.user) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> class StatusAendernForm(forms.Form): <NEW_LINE> <INDENT> _selected_action = forms.CharField(widget=forms.MultipleHiddenInput) <NEW_LINE> status = forms.ChoiceField(choices=Veranstaltung.STATUS_CHOICES) <NEW_LINE> <DEDENT> def status_aendern_action(self, request, queryset): <NEW_LINE> <INDENT> form = None <NEW_LINE> if 'apply' in request.POST: <NEW_LINE> <INDENT> form = self.StatusAendernForm(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> status = form.cleaned_data['status'] <NEW_LINE> queryset.update(status=status) <NEW_LINE> for veranstaltung in queryset: <NEW_LINE> <INDENT> veranstaltung.log(request.user) <NEW_LINE> <DEDENT> self.message_user(request, "Status erfolgreich geändert.") <NEW_LINE> return HttpResponseRedirect(request.get_full_path()) <NEW_LINE> <DEDENT> <DEDENT> if not form: <NEW_LINE> <INDENT> form = self.StatusAendernForm(initial={'_selected_action': request.POST.getlist(admin.ACTION_CHECKBOX_NAME)}) <NEW_LINE> <DEDENT> return render(request, 'admin/status_aendern.html', {'veranstaltungen': queryset, 'status': form, }) <NEW_LINE> <DEDENT> status_aendern_action.short_description = "Ändere den Status einer Veranstaltung" <NEW_LINE> actions = [status_aendern_action]
Admin View für Veranstaltung
6259906ba8370b77170f1bd4
class ArduinoUno(_ArduinoABC): <NEW_LINE> <INDENT> CLASS = pyfirmata.Arduino <NEW_LINE> IDS = [(0x2341, 0x0043), (0x2341, 0x0001), (0x2A03, 0x0043), (0x2341, 0x0243)] <NEW_LINE> def __init__(self, serial_number): <NEW_LINE> <INDENT> _ArduinoABC.__init__(self, serial_number)
Object wrapper for an Arduino Uno running StandardFirmata. Args: serial_number: String of the USB iSerialNumber associated with the device.
6259906b92d797404e389762
class PdfrateProxy(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.submit_url = config.get('pdfrateproxy', 'submit_url') <NEW_LINE> self.report_url = config.get('pdfrateproxy', 'report_url') <NEW_LINE> self.metadata_url = config.get('pdfrateproxy', 'metadata_url') <NEW_LINE> <DEDENT> def _invoke_curl(self, url, infile=None): <NEW_LINE> <INDENT> args = ['curl'] <NEW_LINE> if infile is not None: <NEW_LINE> <INDENT> args.extend(['--form', 'filesubmission=@{file}'.format(file=infile.encode('utf-8'))]) <NEW_LINE> <DEDENT> args.extend([url]) <NEW_LINE> sys.stderr.write("run$ {args}\n".format(args=' '.join(args))) <NEW_LINE> curl = subprocess.Popen(args, stdout=subprocess.PIPE) <NEW_LINE> (stdoutdata, stderrdata) = curl.communicate() <NEW_LINE> if curl.returncode != 0 and stderrdata is not None and len(stderrdata) > 0: <NEW_LINE> <INDENT> raise PdfrateProxyError(stderrdata) <NEW_LINE> <DEDENT> return stdoutdata <NEW_LINE> <DEDENT> def submit_file(self, infile): <NEW_LINE> <INDENT> reply = json.loads(self._invoke_curl(self.submit_url, infile)) <NEW_LINE> reply['status'] = 'success' if reply['fileinfo']['size'] > 0 else 'failsubmit' <NEW_LINE> return reply <NEW_LINE> <DEDENT> def get_report(self, search_hash): <NEW_LINE> <INDENT> reply = json.loads(self._invoke_curl(self.report_url.format(search_hash=search_hash))) <NEW_LINE> reply['status'] = 'success' if reply['fileinfo']['size'] > 0 else 'noreport' <NEW_LINE> return reply <NEW_LINE> <DEDENT> def get_metadata(self, search_hash): <NEW_LINE> <INDENT> reply = {'metadata':self._invoke_curl(self.metadata_url.format(search_hash=search_hash)).strip()} <NEW_LINE> reply['status'] = 'success' if len(reply['metadata']) > 0 else 'nometadata' <NEW_LINE> return reply
A class representing a local proxy object for PDFrate. Submit your PDFrate queries to this class and it will run them against PDFrate. It's not designed be used directly, but by a query scheduler. For batch or manual submissions, use the PdfrateQueryHandler class.
6259906b8da39b475be049fa
class Class(Base): <NEW_LINE> <INDENT> __tablename__ = 'class' <NEW_LINE> class_id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String) <NEW_LINE> description = Column(Text) <NEW_LINE> club_id = Column(Integer, ForeignKey('club.club_id')) <NEW_LINE> club = relationship("Club", back_populates="classes") <NEW_LINE> class_members = relationship("Member", secondary=class_members_table)
Represents a class offered by the club.
6259906b7047854f46340bc5
class VDSR3D(BaseNet): <NEW_LINE> <INDENT> def __init__(self, num_classes, w_initializer=None, w_regularizer=None, b_initializer=None, b_regularizer=None, acti_func='relu', name='VDSR3D'): <NEW_LINE> <INDENT> super(VDSR3D, self).__init__( num_classes=num_classes, w_initializer=w_initializer, w_regularizer=w_regularizer, b_initializer=b_initializer, b_regularizer=b_regularizer, acti_func=acti_func, name=name) <NEW_LINE> self.layers = [ {'name': 'conv_0', 'n_features': 64, 'kernel_size': 3}, {'name': 'conv_1', 'n_features': 64, 'kernel_size': 3, 'repeat':18}, {'name': 'conv_2', 'n_features': num_classes, 'kernel_size': 3}] <NEW_LINE> <DEDENT> def layer_op(self, images, is_training=True, layer_id=-1, **unused_kwargs): <NEW_LINE> <INDENT> assert (layer_util.check_spatial_dims( images, lambda x: x % 8 == 0)) <NEW_LINE> layer_instances = [] <NEW_LINE> input_tensor_res = images <NEW_LINE> params = self.layers[0] <NEW_LINE> first_conv_layer = ConvolutionalLayer( n_output_chns=params['n_features'], kernel_size=params['kernel_size'], with_bias=True, with_bn=False, acti_func=self.acti_func, w_initializer=self.initializers['w'], w_regularizer=self.regularizers['w'], name=params['name']) <NEW_LINE> flow = first_conv_layer(images, is_training) <NEW_LINE> layer_instances.append((first_conv_layer, flow)) <NEW_LINE> params = self.layers[1] <NEW_LINE> for j in range(params['repeat']): <NEW_LINE> <INDENT> conv_layer = ConvolutionalLayer( n_output_chns=params['n_features'], kernel_size=params['kernel_size'], with_bias=True, with_bn=False, acti_func=self.acti_func, w_initializer=self.initializers['w'], w_regularizer=self.regularizers['w'], name='%s_%d' % (params['name'], j)) <NEW_LINE> flow = conv_layer(flow, is_training) <NEW_LINE> layer_instances.append((conv_layer, flow)) <NEW_LINE> <DEDENT> params = self.layers[2] <NEW_LINE> fc_layer = ConvolutionalLayer( n_output_chns=params['n_features'], kernel_size=params['kernel_size'], with_bias=True, with_bn=False, acti_func=None, w_initializer=self.initializers['w'], w_regularizer=self.regularizers['w'], name=params['name']) <NEW_LINE> flow = fc_layer(flow, is_training) <NEW_LINE> layer_instances.append((fc_layer, flow)) <NEW_LINE> output_tensor_res = ElementwiseLayer('SUM')(input_tensor_res, flow) <NEW_LINE> if is_training: <NEW_LINE> <INDENT> self._print(layer_instances) <NEW_LINE> return output_tensor_res <NEW_LINE> <DEDENT> return output_tensor_res <NEW_LINE> <DEDENT> def _print(self, list_of_layers): <NEW_LINE> <INDENT> for (op, _) in list_of_layers: <NEW_LINE> <INDENT> print(op)
Implementation of VDSR [1] with 3D Kernel Spatial Support, based on NiftyNet [2]. This implementation utilizes highres3dnet.py [3] as template. [1] J. Kim et al., "Accurate Image Super-Resolution Using Very Deep Convolutional Networks". In 2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), pages 1646-1654, June 2016. [2] https://github.com/NifTK/NiftyNet [3] https://github.com/NifTK/NiftyNet/blob/dev/niftynet/network/highres3dnet.py
6259906b2c8b7c6e89bd4ff4
class CreateView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = Bucketlist.objects.all() <NEW_LINE> serializer_class = BucketlistSerializer <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save()
get list of bucketlists and post a bucketlist
6259906b5166f23b2e244be1
class CharacteristicUserDescriptionDescriptor(Descriptor): <NEW_LINE> <INDENT> def __init__(self, bus, index, characteristic, description): <NEW_LINE> <INDENT> self.writable = False <NEW_LINE> self.value = bytes(description, encoding='utf-8') <NEW_LINE> Descriptor.__init__( self, bus, index, CHAR_USER_DESCRIPTION_DESC_UUID, ['read'], characteristic) <NEW_LINE> <DEDENT> def ReadValue(self, options): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def WriteValue(self, value, options): <NEW_LINE> <INDENT> raise NotPermittedException()
Read only User descriptions, useful while using gatt tools on remote side.
6259906b32920d7e50bc7855
class SecurityProfile(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'uefi_settings': {'key': 'uefiSettings', 'type': 'UefiSettings'}, 'encryption_at_host': {'key': 'encryptionAtHost', 'type': 'bool'}, 'security_type': {'key': 'securityType', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, uefi_settings: Optional["UefiSettings"] = None, encryption_at_host: Optional[bool] = None, security_type: Optional[Union[str, "SecurityTypes"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(SecurityProfile, self).__init__(**kwargs) <NEW_LINE> self.uefi_settings = uefi_settings <NEW_LINE> self.encryption_at_host = encryption_at_host <NEW_LINE> self.security_type = security_type
Specifies the Security profile settings for the virtual machine or virtual machine scale set. :ivar uefi_settings: Specifies the security settings like secure boot and vTPM used while creating the virtual machine. :code:`<br>`:code:`<br>`Minimum api-version: 2020-12-01. :vartype uefi_settings: ~azure.mgmt.compute.v2021_04_01.models.UefiSettings :ivar encryption_at_host: This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. :code:`<br>`:code:`<br>` Default: The Encryption at host will be disabled unless this property is set to true for the resource. :vartype encryption_at_host: bool :ivar security_type: Specifies the SecurityType of the virtual machine. It is set as TrustedLaunch to enable UefiSettings. :code:`<br>`:code:`<br>` Default: UefiSettings will not be enabled unless this property is set as TrustedLaunch. Possible values include: "TrustedLaunch". :vartype security_type: str or ~azure.mgmt.compute.v2021_04_01.models.SecurityTypes
6259906bd486a94d0ba2d7ce
class TestGenericThrottlePolicy4(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testGenericThrottlePolicy4(self): <NEW_LINE> <INDENT> pass
GenericThrottlePolicy4 unit test stubs
6259906b71ff763f4b5e8fb6
class SafePickler(pickle.Unpickler): <NEW_LINE> <INDENT> def find_class(self, module, name): <NEW_LINE> <INDENT> global sk_whitelist <NEW_LINE> if not sk_whitelist: <NEW_LINE> <INDENT> whitelist_file = os.path.join(os.path.dirname(__file__), 'sk_whitelist.json') <NEW_LINE> with open(whitelist_file, 'r') as f: <NEW_LINE> <INDENT> sk_whitelist = json.load(f) <NEW_LINE> <DEDENT> <DEDENT> bad_names = ('and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'exec', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'print', 'raise', 'return', 'try', 'system', 'while', 'with', 'True', 'False', 'None', 'eval', 'execfile', '__import__', '__package__', '__subclasses__', '__bases__', '__globals__', '__code__', '__closure__', '__func__', '__self__', '__module__', '__dict__', '__class__', '__call__', '__get__', '__getattribute__', '__subclasshook__', '__new__', '__init__', 'func_globals', 'func_code', 'func_closure', 'im_class', 'im_func', 'im_self', 'gi_code', 'gi_frame', '__asteval__', 'f_locals', '__mro__') <NEW_LINE> good_names = ['copy_reg._reconstructor', '__builtin__.object'] <NEW_LINE> if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name): <NEW_LINE> <INDENT> fullname = module + '.' + name <NEW_LINE> if (fullname in good_names) or ( ( module.startswith('sklearn.') or module.startswith('xgboost.') or module.startswith('skrebate.') or module.startswith('imblearn') or module.startswith('numpy.') or module == 'numpy' ) and (name not in bad_names) ): <NEW_LINE> <INDENT> if fullname not in sk_whitelist['SK_NAMES'] + sk_whitelist['SKR_NAMES'] + sk_whitelist['XGB_NAMES'] + sk_whitelist['NUMPY_NAMES'] + sk_whitelist['IMBLEARN_NAMES'] + good_names: <NEW_LINE> <INDENT> print("Warning: global %s is not in pickler whitelist yet and will loss support soon. Contact tool author or leave a message at github.com" % fullname) <NEW_LINE> <DEDENT> mod = sys.modules[module] <NEW_LINE> return getattr(mod, name) <NEW_LINE> <DEDENT> <DEDENT> raise pickle.UnpicklingError("global '%s' is forbidden" % fullname)
Used to safely deserialize scikit-learn model objects serialized by cPickle.dump Usage: eg.: SafePickler.load(pickled_file_object)
6259906b3d592f4c4edbc6ef
@register_exporter(name="tflite_dynamic_range") <NEW_LINE> class TFLiteDynamicRangeExporter(TFLiteExporter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(quantization="dynamic_range")
TensorFlow Lite exporter with dynamic range quantization.
6259906b21bff66bcd724476
class BoxWithOne(gegede.builder.Builder): <NEW_LINE> <INDENT> defaults = dict( material = 'Air', dim = (Q('1m'),Q('1m'),Q('1m')), off = (Q('0m'),Q('0m'),Q('0m')), sbind = 0, volind = 0, ) <NEW_LINE> def construct(self, geom): <NEW_LINE> <INDENT> dim = [0.5*d for d in self.dim] <NEW_LINE> shape = geom.shapes.Box(self.name, *dim) <NEW_LINE> pos = geom.structure.Position(None, *self.off) <NEW_LINE> child = self.get_builder(self.sbind).get_volume(self.volind) <NEW_LINE> place = geom.structure.Placement(None, volume = child, pos = pos) <NEW_LINE> vol = geom.structure.Volume('vol'+self.name, material = self.material, shape=shape, placements = [place]) <NEW_LINE> self.add_volume(vol) <NEW_LINE> return
Build a simple box that holds one child taken from a particular builder.
6259906b56ac1b37e63038ea
class HTTPError(OpenTDBException): <NEW_LINE> <INDENT> pass
The HTTP request returned an unsuccessful status code.
6259906b8e7ae83300eea89f
class FesB2304(FesRequest): <NEW_LINE> <INDENT> def __init__(self, settle_date, org_nick_name): <NEW_LINE> <INDENT> super().__init__("2304") <NEW_LINE> self.data = { "trans_code": "2304", "settle_date": settle_date, "org_nick_name": org_nick_name }
联机交易生成清算数据请求类
6259906b44b2445a339b7567
class TestHash: <NEW_LINE> <INDENT> def test_hash_uses_the_entity_id_and_model_name(self) -> None: <NEW_LINE> <INDENT> entity = Entity(id_=1) <NEW_LINE> result = entity.__hash__() <NEW_LINE> assert result == hash("Entity-1")
Test the hashing of entities.
6259906b3317a56b869bf14b
class iface_engin_ioctrl_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.I32, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.success = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('iface_engin_ioctrl_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.I32, 0) <NEW_LINE> oprot.writeI32(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
6259906bd486a94d0ba2d7cf
class SynchronousSuiteClearingTests(SuiteClearingMixin, unittest.SynchronousTestCase): <NEW_LINE> <INDENT> TestCase = unittest.SynchronousTestCase
Tests for our extension that allows us to clear out a L{TestSuite} in the synchronous case. See L{twisted.trial.test.test_tests.SuiteClearingMixin}
6259906bfff4ab517ebcf02b
class Place(BaseModel): <NEW_LINE> <INDENT> city_id = "" <NEW_LINE> user_id = "" <NEW_LINE> name = "" <NEW_LINE> description = "" <NEW_LINE> number_rooms = 0 <NEW_LINE> number_bathrooms = 0 <NEW_LINE> max_guest = 0 <NEW_LINE> price_by_night = 0 <NEW_LINE> latitude = 0.0 <NEW_LINE> longitude = 0.0 <NEW_LINE> amenity_ids = []
[Place] Args: BaseModel ([class]): class that inherited by Place
6259906b67a9b606de5476aa
class SwiftBackendTest(BaseTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> BaseTest.setUp(self) <NEW_LINE> self.backend = Swift() <NEW_LINE> self.conn = mock.Mock() <NEW_LINE> mock.patch.object(self.backend, 'conn', self.conn).start() <NEW_LINE> <DEDENT> def test_download_missing(self): <NEW_LINE> <INDENT> self.conn.get_object.side_effect = ClientException('Missing') <NEW_LINE> with self.assertRaises(DownloadException) as manager: <NEW_LINE> <INDENT> self.backend.download('bucket', 'key', StringIO(), 1) <NEW_LINE> <DEDENT> the_exception = manager.exception <NEW_LINE> self.assertIn('Missing', str(the_exception)) <NEW_LINE> <DEDENT> def test_download_success(self): <NEW_LINE> <INDENT> data = 'content' <NEW_LINE> headers = { 'content-length': len(data) } <NEW_LINE> self.conn.get_object.return_value = (headers, list(data)) <NEW_LINE> result = StringIO() <NEW_LINE> self.backend.download('bucket', 'key', result, 1) <NEW_LINE> self.assertEqual(result.getvalue(), data) <NEW_LINE> <DEDENT> def test_download_partial(self): <NEW_LINE> <INDENT> data = 'content' <NEW_LINE> headers = { 'content-length': len(data) * 10 } <NEW_LINE> self.conn.get_object.return_value = (headers, list(data)) <NEW_LINE> self.assertRaises( DownloadException, self.backend.download, 'bucket', 'key', StringIO(), 1) <NEW_LINE> <DEDENT> def test_missing_content_length(self): <NEW_LINE> <INDENT> data = 'content' <NEW_LINE> headers = {} <NEW_LINE> self.conn.get_object.return_value = (headers, list(data)) <NEW_LINE> result = StringIO() <NEW_LINE> self.backend.download('bucket', 'key', result, 1) <NEW_LINE> self.assertEqual(result.getvalue(), data) <NEW_LINE> <DEDENT> def test_upload_exception(self): <NEW_LINE> <INDENT> self.conn.put_object.side_effect = ClientException('Failed to upload') <NEW_LINE> self.assertRaises(UploadException, self.backend.upload, 'bucket', 'key', StringIO('content'), 1) <NEW_LINE> <DEDENT> def test_list(self): <NEW_LINE> <INDENT> self.conn.get_container.side_effect = [(None, [{'name':'key'}]), (None, [])] <NEW_LINE> self.assertEqual(list(self.backend.list('bucket')), ['key']) <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> self.conn.delete_object.side_effect = ClientException('Failed to delete') <NEW_LINE> with self.assertRaises(DeleteException): <NEW_LINE> <INDENT> self.backend.delete('bucket', 'key', 1)
We can talk to Swift as expected.
6259906b3cc13d1c6d466f56
class RoleType(TypeDefinition): <NEW_LINE> <INDENT> DEVELOPER = 1 <NEW_LINE> SELLER = 2
角色对象类型
6259906b92d797404e389763
class PygameMouseController(Controller): <NEW_LINE> <INDENT> import pygame <NEW_LINE> import pygame.locals <NEW_LINE> def __init__(self,event_handler): <NEW_LINE> <INDENT> Controller.__init__(self,event_handler) <NEW_LINE> self.MOUSE_MOVE=event_handler.new_event_type() <NEW_LINE> self.MOUSE_BTN1_DOWN=event_handler.new_event_type() <NEW_LINE> self.MOUSE_BTN2_DOWN=event_handler.new_event_type() <NEW_LINE> self.MOUSE_BTN3_DOWN=event_handler.new_event_type() <NEW_LINE> self.MOUSE_BTN1_UP=event_handler.new_event_type() <NEW_LINE> self.MOUSE_BTN2_UP=event_handler.new_event_type() <NEW_LINE> self.MOUSE_BTN3_UP=event_handler.new_event_type() <NEW_LINE> self.pygame.init() <NEW_LINE> self.pygame.mouse.set_visible(True) <NEW_LINE> self.last_btn=[False, False, False] <NEW_LINE> <DEDENT> def on_update(self, event): <NEW_LINE> <INDENT> self.update() <NEW_LINE> <DEDENT> def hide(self): <NEW_LINE> <INDENT> self.pygame.mouse.set_visible(False) <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> self.pygame.mouse.set_visible(True) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> output_events=[] <NEW_LINE> pygame=self.pygame <NEW_LINE> btn=self.last_btn <NEW_LINE> for event in pygame.event.get([pygame.MOUSEMOTION, pygame.MOUSEBUTTONUP, pygame.MOUSEBUTTONDOWN]): <NEW_LINE> <INDENT> int_event=dict() <NEW_LINE> if pygame.mouse.get_focused(): <NEW_LINE> <INDENT> if event.type == pygame.MOUSEBUTTONDOWN: <NEW_LINE> <INDENT> pos=event.pos <NEW_LINE> if event.button == 1: <NEW_LINE> <INDENT> typ=self.MOUSE_BTN1_DOWN <NEW_LINE> btn[0]=True <NEW_LINE> <DEDENT> elif event.button == 2: <NEW_LINE> <INDENT> typ=self.MOUSE_BTN2_DOWN <NEW_LINE> btn[1]=True <NEW_LINE> <DEDENT> elif event.button == 3: <NEW_LINE> <INDENT> typ=self.MOUSE_BTN3_DOWN <NEW_LINE> btn[2]=True <NEW_LINE> <DEDENT> <DEDENT> elif event.type == pygame.MOUSEBUTTONUP: <NEW_LINE> <INDENT> pos=event.pos <NEW_LINE> if event.button == 1: <NEW_LINE> <INDENT> typ=self.MOUSE_BTN1_UP <NEW_LINE> btn[0]=False <NEW_LINE> <DEDENT> elif event.button == 2: <NEW_LINE> <INDENT> typ=self.MOUSE_BTN2_UP <NEW_LINE> btn[1]=False <NEW_LINE> <DEDENT> elif event.button == 3: <NEW_LINE> <INDENT> typ=self.MOUSE_BTN3_UP <NEW_LINE> btn[2]=False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, str(event.button) <NEW_LINE> <DEDENT> <DEDENT> elif event.type == pygame.MOUSEMOTION: <NEW_LINE> <INDENT> pos=event.pos <NEW_LINE> btn=[but==1 for but in event.buttons] <NEW_LINE> typ=self.MOUSE_MOVE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, str(event) <NEW_LINE> <DEDENT> int_event["Type"]=typ <NEW_LINE> int_event["Pos"]=pos <NEW_LINE> int_event["BTN"]=btn <NEW_LINE> output_events.append(int_event) <NEW_LINE> <DEDENT> <DEDENT> self.last_btn=btn <NEW_LINE> self.event_handler.post(output_events) <NEW_LINE> return True
A Mouse controller The tyes have the format {'Type': ID, 'Pos': Position (where? in Viewer?)where the event ocurred, 'BTN1': True o False if button 1 is pressed, 'BTN2' idem, 'BTN3' idem}
6259906b8da39b475be049fc
class invisibility_of(object): <NEW_LINE> <INDENT> def __init__(self, element): <NEW_LINE> <INDENT> self.element = element <NEW_LINE> <DEDENT> def __call__(self, driver): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (not self.element.is_displayed()) <NEW_LINE> <DEDENT> except EC.StaleElementReferenceException: <NEW_LINE> <INDENT> return True
Checks for a known element to be invisible. Much like the builtin visibility_of: https://github.com/SeleniumHQ/selenium/search?utf8=%E2%9C%93&q=visibility_of
6259906b6e29344779b01e65
class FlipUD(Scale): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(1, [-1, 1])
Flip up-down.
6259906b2ae34c7f260ac8f9
class CustomLoginViewTests(ViewTestingMixin, TestCase): <NEW_LINE> <INDENT> data = TestData() <NEW_LINE> view_class = CustomLoginView <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> data = self.data.get_customer_data() <NEW_LINE> form = CustomerCreationForm(data=data) <NEW_LINE> form.is_valid() <NEW_LINE> self.user_customer = form.save() <NEW_LINE> self.redirect_to = self.view_class.permission_denied_redirect <NEW_LINE> <DEDENT> def test_get(self): <NEW_LINE> <INDENT> self.is_callable(user=self.user_customer.user, post=True, redirect_to=self.redirect_to ) <NEW_LINE> <DEDENT> def test_post(self): <NEW_LINE> <INDENT> self.is_callable(user=self.user_customer.user, post=True, redirect_to=self.redirect_to, data={ 'username': self.user_customer.user.email, 'password': self.user_customer.user.password, })
Testing login view permissions and redirection. Only the perrmisions are tested because the view inherts the login behaviour from the Django built-in LoginView.
6259906bdd821e528d6da589
class AuthorizationMixin(object): <NEW_LINE> <INDENT> authorization_filter_class = None <NEW_LINE> def get_authorization_filter_class(self): <NEW_LINE> <INDENT> return self.authorization_filter_class <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> queryset = super().get_queryset() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> from django.core.exceptions import ImproperlyConfigured <NEW_LINE> if self.model is not None: <NEW_LINE> <INDENT> queryset = self.model._default_manager.all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ImproperlyConfigured( "{cls} is missing a QuerySet. Define {cls}.model, {cls}.queryset, or override " "{cls}.get_queryset().".format({'cls': self.__class__.__name__}) ) <NEW_LINE> <DEDENT> <DEDENT> if self.full_permission_for_superuser() or self.full_permission_for_staff(): <NEW_LINE> <INDENT> return queryset <NEW_LINE> <DEDENT> return self.filter_queryset(queryset) <NEW_LINE> <DEDENT> def full_permission_for_superuser(self): <NEW_LINE> <INDENT> return self.request.user.is_superuser and settings.FULL_PERMISSION_FOR_SUPERUSERS <NEW_LINE> <DEDENT> def full_permission_for_staff(self): <NEW_LINE> <INDENT> return self.request.user.is_staff and settings.FULL_PERMISSION_FOR_STAFF <NEW_LINE> <DEDENT> def filter_queryset(self, queryset): <NEW_LINE> <INDENT> filter_class = self.get_authorization_filter_class() <NEW_LINE> if filter_class: <NEW_LINE> <INDENT> queryset = filter_class().filter_queryset(self.request, queryset, self) <NEW_LINE> <DEDENT> return queryset
Mixin for filter queryset according the defined list of backend. :keyword filter_backends: filter backend list
6259906bac7a0e7691f73cf8
class EventsTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> events.unbindAll() <NEW_LINE> self.ctr = 0 <NEW_LINE> self.responses = None <NEW_LINE> <DEDENT> def _raiseException(self, event): <NEW_LINE> <INDENT> raise Exception('Failure condition') <NEW_LINE> <DEDENT> def _increment(self, event): <NEW_LINE> <INDENT> self.ctr += event.info['amount'] <NEW_LINE> <DEDENT> def _incrementWithResponse(self, event): <NEW_LINE> <INDENT> self._increment(event) <NEW_LINE> event.addResponse('foo') <NEW_LINE> <DEDENT> def _eatEvent(self, event): <NEW_LINE> <INDENT> event.addResponse({'foo': 'bar'}) <NEW_LINE> event.stopPropagation() <NEW_LINE> event.preventDefault() <NEW_LINE> <DEDENT> def _shouldNotBeCalled(self, event): <NEW_LINE> <INDENT> self.fail('This should not be called due to stopPropagation().') <NEW_LINE> <DEDENT> def testSynchronousEvents(self): <NEW_LINE> <INDENT> name, failname = '_test.event', '_test.failure' <NEW_LINE> handlerName = '_test.handler' <NEW_LINE> events.bind(name, handlerName, self._increment) <NEW_LINE> events.bind(failname, handlerName, self._raiseException) <NEW_LINE> try: <NEW_LINE> <INDENT> events.trigger(failname) <NEW_LINE> self.assertTrue(False) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.assertEqual(e.message, 'Failure condition') <NEW_LINE> <DEDENT> self.assertEqual(self.ctr, 0) <NEW_LINE> event = events.trigger(name, {'amount': 2}) <NEW_LINE> self.assertEqual(self.ctr, 2) <NEW_LINE> self.assertTrue(event.propagate) <NEW_LINE> self.assertFalse(event.defaultPrevented) <NEW_LINE> self.assertEqual(event.responses, []) <NEW_LINE> events.unbind(name, 'not the handler name') <NEW_LINE> events.trigger(name, {'amount': 2}) <NEW_LINE> self.assertEqual(self.ctr, 4) <NEW_LINE> events.unbind(name, handlerName) <NEW_LINE> events.trigger(name, {'amount': 2}) <NEW_LINE> self.assertEqual(self.ctr, 4) <NEW_LINE> events.bind(name, handlerName, self._eatEvent) <NEW_LINE> events.bind(name, 'other handler name', self._shouldNotBeCalled) <NEW_LINE> event = events.trigger(name) <NEW_LINE> self.assertTrue(event.defaultPrevented) <NEW_LINE> self.assertFalse(event.propagate) <NEW_LINE> self.assertEqual(event.responses, [{'foo': 'bar'}]) <NEW_LINE> <DEDENT> def testAsyncEvents(self): <NEW_LINE> <INDENT> name, failname = '_test.event', '_test.failure' <NEW_LINE> handlerName = '_test.handler' <NEW_LINE> events.bind(failname, handlerName, self._raiseException) <NEW_LINE> events.bind(name, handlerName, self._incrementWithResponse) <NEW_LINE> def callback(event): <NEW_LINE> <INDENT> self.ctr += 1 <NEW_LINE> self.responses = event.responses <NEW_LINE> <DEDENT> self.assertEqual(events.daemon.eventQueue.qsize(), 0) <NEW_LINE> events.daemon.trigger(failname, handlerName, callback) <NEW_LINE> self.assertEqual(events.daemon.eventQueue.qsize(), 1) <NEW_LINE> events.daemon.trigger(name, {'amount': 2}, callback) <NEW_LINE> self.assertEqual(events.daemon.eventQueue.qsize(), 2) <NEW_LINE> self.assertEqual(self.ctr, 0) <NEW_LINE> events.daemon.start() <NEW_LINE> time.sleep(0.1) <NEW_LINE> self.assertEqual(events.daemon.eventQueue.qsize(), 0) <NEW_LINE> self.assertEqual(self.ctr, 3) <NEW_LINE> self.assertEqual(self.responses, ['foo'])
This test case is just a unit test of the girder.events system. It does not require the server to be running, or any use of the database.
6259906b23849d37ff8528c7
class HotClient(redis.Redis): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.setdefault("decode_responses", True) <NEW_LINE> super(HotClient, self).__init__(*args, **kwargs) <NEW_LINE> requires_luabit = ("number_and", "number_or", "number_xor", "number_lshift", "number_rshift") <NEW_LINE> with open(self._get_lua_path("bit.lua")) as f: <NEW_LINE> <INDENT> luabit = f.read() <NEW_LINE> <DEDENT> for name, snippet in self._get_lua_funcs(): <NEW_LINE> <INDENT> if name in requires_luabit: <NEW_LINE> <INDENT> snippet = luabit + snippet <NEW_LINE> <DEDENT> self._create_lua_method(name, snippet) <NEW_LINE> <DEDENT> <DEDENT> def _get_lua_path(self, name): <NEW_LINE> <INDENT> parts = (os.path.dirname(os.path.abspath(__file__)), "lua", name) <NEW_LINE> return os.path.join(*parts) <NEW_LINE> <DEDENT> def _get_lua_funcs(self): <NEW_LINE> <INDENT> with open(self._get_lua_path("atoms.lua")) as f: <NEW_LINE> <INDENT> for func in f.read().strip().split("function "): <NEW_LINE> <INDENT> if func: <NEW_LINE> <INDENT> bits = func.split("\n", 1) <NEW_LINE> name = bits[0].split("(")[0].strip() <NEW_LINE> snippet = bits[1].rsplit("end", 1)[0].strip() <NEW_LINE> yield name, snippet <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _create_lua_method(self, name, code): <NEW_LINE> <INDENT> script = self.register_script(code) <NEW_LINE> method = lambda key, *a, **k: script(keys=[key], args=a, **k) <NEW_LINE> setattr(self, name, method)
A Redis client wrapper that loads Lua functions and creates client methods for calling them.
6259906b3d592f4c4edbc6f1
class loginTest(myunit.MyTest): <NEW_LINE> <INDENT> def user_login_verify(self,username,password): <NEW_LINE> <INDENT> login(self.driver).user_login(username,password) <NEW_LINE> <DEDENT> def test_login(self): <NEW_LINE> <INDENT> self.user_login_verify("username111","password111") <NEW_LINE> function.screenshot_name(self.driver) <NEW_LINE> text=login(self.driver).error_message() <NEW_LINE> self.assertEqual(text,"请XX输入验证码",msg="Qingbo登录失败!")
百度登录测试
6259906b1b99ca400229013e
class AsyncRequestHandler(RequestHandler): <NEW_LINE> <INDENT> def on_response(self, response): <NEW_LINE> <INDENT> if response.error: <NEW_LINE> <INDENT> self.send_error(500) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = json.loads(response.body) <NEW_LINE> self.write(data) <NEW_LINE> <DEDENT> self.finish() <NEW_LINE> <DEDENT> @tornado.web.asynchronous <NEW_LINE> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> client = AsyncHTTPClient() <NEW_LINE> url = 'http://www.baidu.com' <NEW_LINE> client.fetch(url, self.on_response)
类比diango中的视图
6259906bf548e778e596cd9e
class User: <NEW_LINE> <INDENT> def __init__(self, username, password): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.password = password
A user object.
6259906b5fcc89381b266d5f
class AccelStamped(metaclass=Metaclass): <NEW_LINE> <INDENT> __slots__ = [ '_header', '_accel', ] <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> assert all(['_' + key in self.__slots__ for key in kwargs.keys()]), 'Invalid arguments passed to constructor: %r' % kwargs.keys() <NEW_LINE> from std_msgs.msg import Header <NEW_LINE> self.header = kwargs.get('header', Header()) <NEW_LINE> from geometry_msgs.msg import Accel <NEW_LINE> self.accel = kwargs.get('accel', Accel()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> typename = self.__class__.__module__.split('.') <NEW_LINE> typename.pop() <NEW_LINE> typename.append(self.__class__.__name__) <NEW_LINE> args = [s[1:] + '=' + repr(getattr(self, s, None)) for s in self.__slots__] <NEW_LINE> return '%s(%s)' % ('.'.join(typename), ', '.join(args)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def header(self): <NEW_LINE> <INDENT> return self._header <NEW_LINE> <DEDENT> @header.setter <NEW_LINE> def header(self, value): <NEW_LINE> <INDENT> from std_msgs.msg import Header <NEW_LINE> assert isinstance(value, Header), "The 'header' field must be a sub message of type 'Header'" <NEW_LINE> self._header = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def accel(self): <NEW_LINE> <INDENT> return self._accel <NEW_LINE> <DEDENT> @accel.setter <NEW_LINE> def accel(self, value): <NEW_LINE> <INDENT> from geometry_msgs.msg import Accel <NEW_LINE> assert isinstance(value, Accel), "The 'accel' field must be a sub message of type 'Accel'" <NEW_LINE> self._accel = value
Message class 'AccelStamped'.
6259906b8e7ae83300eea8a0
class TensorBoardCallBack(tf.keras.callbacks.TensorBoard): <NEW_LINE> <INDENT> def on_train_batch_begin(self, batch, logs=None): <NEW_LINE> <INDENT> super(TensorBoardCallBack, self).on_train_batch_begin(batch, logs) <NEW_LINE> try: <NEW_LINE> <INDENT> lr = self.model.optimizer.learning_rate(batch) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> lr = self.model.optimizer.learning_rate <NEW_LINE> <DEDENT> if batch % 100 == 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with self.writer.as_default(): <NEW_LINE> <INDENT> tf.summary.scalar('learning rate', data=lr) <NEW_LINE> self.writer.flush() <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> logging.info('TensorBoard not init yet')
Tensorboard callback with added metrics.
6259906b4a966d76dd5f06fc
class WorkloadProtectableItem(Model): <NEW_LINE> <INDENT> _validation = { 'protectable_item_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'backup_management_type': {'key': 'backupManagementType', 'type': 'str'}, 'friendly_name': {'key': 'friendlyName', 'type': 'str'}, 'protection_state': {'key': 'protectionState', 'type': 'str'}, 'protectable_item_type': {'key': 'protectableItemType', 'type': 'str'}, } <NEW_LINE> _subtype_map = { 'protectable_item_type': {'IaaSVMProtectableItem': 'IaaSVMProtectableItem'} } <NEW_LINE> def __init__(self, backup_management_type=None, friendly_name=None, protection_state=None): <NEW_LINE> <INDENT> self.backup_management_type = backup_management_type <NEW_LINE> self.friendly_name = friendly_name <NEW_LINE> self.protection_state = protection_state <NEW_LINE> self.protectable_item_type = None
Base class for backup item. Workload-specific backup items are derived from this class. :param backup_management_type: Type of backup managemenent to backup an item. :type backup_management_type: str :param friendly_name: Friendly name of the backup item. :type friendly_name: str :param protection_state: State of the back up item. Possible values include: 'Invalid', 'NotProtected', 'Protecting', 'Protected' :type protection_state: str or :class:`ProtectionStatus <azure.mgmt.recoveryservicesbackup.models.ProtectionStatus>` :param protectable_item_type: Polymorphic Discriminator :type protectable_item_type: str
6259906b4e4d562566373c18
class SegmentationFeeder: <NEW_LINE> <INDENT> def __call__(self, affinities, segmentation, foreground_mask=None): <NEW_LINE> <INDENT> if foreground_mask is not None: <NEW_LINE> <INDENT> assert foreground_mask.shape == segmentation.shape <NEW_LINE> segmentation = segmentation.astype('int64') <NEW_LINE> segmentation = np.where(foreground_mask, segmentation, np.ones_like(segmentation) * (-1)) <NEW_LINE> <DEDENT> return segmentation
A simple function that expects affinities and initial segmentation (with optional foreground mask) and can be used as "superpixel_generator" for GASP
6259906b4f6381625f19a0b0
class TestTransactionFlags(TestPythonClientBase, vtgate_client_testsuite.TestTransactionFlags): <NEW_LINE> <INDENT> pass
Success test cases for the Python client.
6259906b7b25080760ed88eb
class TestHeldKarp(unittest.TestCase): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def read_process_tsp(file_name): <NEW_LINE> <INDENT> file_ = open(file_name, 'rb') <NEW_LINE> count, cities = q1.parse_cities(file_) <NEW_LINE> return held_karp.held_karp_dicts(cities, count, False) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read_process_tsp2(file_name): <NEW_LINE> <INDENT> file_ = open(file_name, 'rb') <NEW_LINE> count, cities = q1.parse_cities(file_) <NEW_LINE> return held_karp.held_karp_scipy(cities, count, False) <NEW_LINE> <DEDENT> def test_tsp_examples(self): <NEW_LINE> <INDENT> actual1 = TestHeldKarp.read_process_tsp('../data/tsp2.txt') <NEW_LINE> self.assertEqual(actual1, 4.0) <NEW_LINE> actual2 = round(TestHeldKarp.read_process_tsp('../data/tsp3.txt'), 4) <NEW_LINE> self.assertEqual(actual2, 10.4721) <NEW_LINE> actual3 = round(TestHeldKarp.read_process_tsp('../data/tsp4.txt'), 5) <NEW_LINE> self.assertEqual(actual3, 6.17986) <NEW_LINE> actual4 = round(TestHeldKarp.read_process_tsp('../data/tsp5.txt'), 5) <NEW_LINE> self.assertEqual(actual4, 6.26533) <NEW_LINE> actual5 = round(TestHeldKarp.read_process_tsp('../data/tsp6.txt'), 3) <NEW_LINE> self.assertEqual(actual5, 124.966) <NEW_LINE> actual6 = round(TestHeldKarp.read_process_tsp('../data/tsp8.txt'), 1) <NEW_LINE> self.assertEqual(actual6, 16898.1) <NEW_LINE> actual7 = round(TestHeldKarp.read_process_tsp('../data/tsp9.txt'), 1) <NEW_LINE> self.assertEqual(actual7, 26714.9) <NEW_LINE> <DEDENT> def test_tsp_examples2(self): <NEW_LINE> <INDENT> actual1 = TestHeldKarp.read_process_tsp2('../data/tsp2.txt') <NEW_LINE> self.assertEqual(actual1, 4.0) <NEW_LINE> actual2 = round(TestHeldKarp.read_process_tsp('../data/tsp3.txt'), 4) <NEW_LINE> self.assertEqual(actual2, 10.4721) <NEW_LINE> actual3 = round(TestHeldKarp.read_process_tsp('../data/tsp4.txt'), 5) <NEW_LINE> self.assertEqual(actual3, 6.17986) <NEW_LINE> actual4 = round(TestHeldKarp.read_process_tsp('../data/tsp5.txt'), 5) <NEW_LINE> self.assertEqual(actual4, 6.26533) <NEW_LINE> actual5 = round(TestHeldKarp.read_process_tsp('../data/tsp6.txt'), 3) <NEW_LINE> self.assertEqual(actual5, 124.966) <NEW_LINE> actual6 = round(TestHeldKarp.read_process_tsp('../data/tsp8.txt'), 1) <NEW_LINE> self.assertEqual(actual6, 16898.1) <NEW_LINE> actual7 = round(TestHeldKarp.read_process_tsp('../data/tsp9.txt'), 1) <NEW_LINE> self.assertEqual(actual7, 26714.9)
unittest module for testing the held-karp algo on example TSPs.
6259906b8da39b475be049fe
class CosmosPatchTransformPolicy(SansIOHTTPPolicy): <NEW_LINE> <INDENT> def on_request(self, request): <NEW_LINE> <INDENT> if request.http_request.method == "PATCH": <NEW_LINE> <INDENT> _transform_patch_to_cosmos_post(request.http_request)
Policy to transform PATCH requests into POST requests with the "X-HTTP-Method":"MERGE" header set.
6259906b92d797404e389764
class TestAuthentication(TestController): <NEW_LINE> <INDENT> application_under_test = 'main' <NEW_LINE> def test_forced_login(self): <NEW_LINE> <INDENT> resp = self.app.get('/secc/', status=302) <NEW_LINE> ok_( resp.location.startswith('http://localhost/login')) <NEW_LINE> resp = resp.follow(status=200) <NEW_LINE> form = resp.form <NEW_LINE> form['login'] = 'manager' <NEW_LINE> form['password'] = 'managepass' <NEW_LINE> post_login = form.submit(status=302) <NEW_LINE> ok_(post_login.location.startswith('http://localhost/post_login')) <NEW_LINE> initial_page = post_login.follow(status=302) <NEW_LINE> ok_('authtkt' in initial_page.request.cookies, "Session cookie wasn't defined: %s" % initial_page.request.cookies) <NEW_LINE> ok_(initial_page.location.startswith('http://localhost/secc/'), initial_page.location) <NEW_LINE> <DEDENT> def test_voluntary_login(self): <NEW_LINE> <INDENT> resp = self.app.get('/login', status=200) <NEW_LINE> form = resp.form <NEW_LINE> form['login'] = 'manager' <NEW_LINE> form['password'] = 'managepass' <NEW_LINE> post_login = form.submit(status=302) <NEW_LINE> ok_(post_login.location.startswith('http://localhost/post_login')) <NEW_LINE> home_page = post_login.follow(status=302) <NEW_LINE> ok_('authtkt' in home_page.request.cookies, 'Session cookie was not defined: %s' % home_page.request.cookies) <NEW_LINE> eq_(home_page.location, 'http://localhost/') <NEW_LINE> <DEDENT> def test_logout(self): <NEW_LINE> <INDENT> resp = self.app.get('/login_handler?login=manager&password=managepass', status=302) <NEW_LINE> resp = resp.follow(status=302) <NEW_LINE> ok_('authtkt' in resp.request.cookies, 'Session cookie was not defined: %s' % resp.request.cookies) <NEW_LINE> resp = self.app.get('/logout_handler', status=302) <NEW_LINE> ok_(resp.location.startswith('http://localhost/post_logout')) <NEW_LINE> home_page = resp.follow(status=302) <NEW_LINE> authtkt = home_page.request.cookies.get('authtkt') <NEW_LINE> ok_(not authtkt or authtkt == 'INVALID', 'Session cookie was not deleted: %s' % home_page.request.cookies) <NEW_LINE> eq_(home_page.location, 'http://localhost/')
Tests for the default authentication setup. If your application changes how the authentication layer is configured those tests should be updated accordingly
6259906b2ae34c7f260ac8fb
class AbsoluteURLRedirectMiddleware(object): <NEW_LINE> <INDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> if (_uses_relative_redirects and 'Location' in response and request.get_host()): <NEW_LINE> <INDENT> response['Location'] = request.build_absolute_uri(response['Location']) <NEW_LINE> <DEDENT> return response
Middleware that turns all relative URL redirects to absolute. Django 1.9 changed URL redirects to be relative by default (so long as they're redirecting to the same host). For compatibility across Django versions (and to theoretically work around some broken HTTP implementations out in the wild), this middleware will turn those relative URLs back into absolute URLs.
6259906b45492302aabfdcea
class ResetInstancesResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
ResetInstances返回参数结构体
6259906bdd821e528d6da58a
class CadernetaList(generics.ListAPIView): <NEW_LINE> <INDENT> queryset = Caderneta.objects.all() <NEW_LINE> serializer_class = CadernetaSerializer <NEW_LINE> paginate_by = 10
List all Cadernetas in EBD.
6259906b435de62698e9d619
class frontend: <NEW_LINE> <INDENT> title_color = color("steelblue2") <NEW_LINE> subtitle_color = color("dodgerblue4") <NEW_LINE> @staticmethod <NEW_LINE> def flavortext(body, delay_ms, *, end=None, factor = 10): <NEW_LINE> <INDENT> if end == None: <NEW_LINE> <INDENT> end = '' <NEW_LINE> <DEDENT> stream.echo(" *" + " "* int(stream.width()/factor), 0, end='') <NEW_LINE> _ = stream.echo(body, delay_ms, end=end) <NEW_LINE> return _ <NEW_LINE> del end <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setting(cls, title, subtitle, t_delay_ms, s_delay_ms): <NEW_LINE> <INDENT> e = cls.flavortext(cls.title_color.w(title), t_delay_ms, end='\n') <NEW_LINE> if e != 0: <NEW_LINE> <INDENT> cls.flavortext(cls.subtitle_color.w(subtitle), s_delay_ms) <NEW_LINE> <DEDENT> elif e == 0: <NEW_LINE> <INDENT> cls.flavortext(cls.subtitle_color.w(subtitle), 0) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def block(cls, body, delay_ms, downtime, *, end=None, check=None, cont=False, factor=10, begin=None): <NEW_LINE> <INDENT> if check == None: <NEW_LINE> <INDENT> sys.stdout.write(t.move_y(8)) <NEW_LINE> <DEDENT> if begin != None: <NEW_LINE> <INDENT> sys.stdout.write(begin) <NEW_LINE> <DEDENT> mone = False <NEW_LINE> if check: <NEW_LINE> <INDENT> delay_ms = 0 <NEW_LINE> downtime = 0 <NEW_LINE> mone = True <NEW_LINE> <DEDENT> if end==None: <NEW_LINE> <INDENT> end = '\n' <NEW_LINE> <DEDENT> for thing in body: <NEW_LINE> <INDENT> if thing == "\n" or thing == ("\n") or thing == ["\n"]: <NEW_LINE> <INDENT> e = stream.echo(thing, delay_ms, end=end) <NEW_LINE> <DEDENT> elif cont: <NEW_LINE> <INDENT> e = stream.echo(thing, delay_ms, end=end) <NEW_LINE> <DEDENT> elif not cont: <NEW_LINE> <INDENT> e = cls.flavortext(thing, delay_ms, end=end, factor=factor) <NEW_LINE> <DEDENT> if e: <NEW_LINE> <INDENT> delay_ms = 0 <NEW_LINE> downtime = 0 <NEW_LINE> mone = True <NEW_LINE> <DEDENT> if not (thing == "\n" or thing == ("\n") or thing == ["\n"]): <NEW_LINE> <INDENT> stream.wait(timeout_ms = downtime) <NEW_LINE> <DEDENT> <DEDENT> return mone
Frontend collections
6259906b3539df3088ecdab1
class SynoNasSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, api, variable, variable_info, monitor_device=None): <NEW_LINE> <INDENT> self.var_id = variable <NEW_LINE> self.var_name = variable_info[0] <NEW_LINE> self.var_units = variable_info[1] <NEW_LINE> self.var_icon = variable_info[2] <NEW_LINE> self.monitor_device = monitor_device <NEW_LINE> self._api = api <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> if self.monitor_device is not None: <NEW_LINE> <INDENT> return "{} ({})".format(self.var_name, self.monitor_device) <NEW_LINE> <DEDENT> return self.var_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return self.var_icon <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> if self.var_id in ['volume_disk_temp_avg', 'volume_disk_temp_max', 'disk_temp']: <NEW_LINE> <INDENT> return self._api.temp_unit <NEW_LINE> <DEDENT> return self.var_units <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self._api is not None: <NEW_LINE> <INDENT> self._api.update()
Representation of a Synology NAS Sensor.
6259906ba8370b77170f1bd9
class TestKernel(kernels_lib.PositiveSemidefiniteKernel): <NEW_LINE> <INDENT> def __init__(self, multiplier): <NEW_LINE> <INDENT> self._multiplier = tf.convert_to_tensor(multiplier) <NEW_LINE> super(TestKernel, self).__init__(feature_ndims=1) <NEW_LINE> <DEDENT> def _batch_shape(self): <NEW_LINE> <INDENT> return self._multiplier.shape <NEW_LINE> <DEDENT> def _batch_shape_tensor(self): <NEW_LINE> <INDENT> return tf.shape(self._multiplier) <NEW_LINE> <DEDENT> def _apply(self, x1, x2, param_expansion_ndims=0): <NEW_LINE> <INDENT> x1 = tf.convert_to_tensor(x1) <NEW_LINE> x2 = tf.convert_to_tensor(x2) <NEW_LINE> multiplier = kernels_util.pad_shape_right_with_ones( self._multiplier, param_expansion_ndims) <NEW_LINE> return multiplier * tf.reduce_sum(x1 + x2, axis=-1)
A PositiveSemidefiniteKernel implementation just for testing purposes. k(x, y) = m * sum(x + y) Not at all positive semidefinite, but we don't care about this here.
6259906b23849d37ff8528c9
class MFloatVectorArray( api.MFloatVectorArray, ArrayBase ): <NEW_LINE> <INDENT> _apicls = api.MFloatVectorArray <NEW_LINE> def __iter__( self ): <NEW_LINE> <INDENT> for i in xrange(len(self)): <NEW_LINE> <INDENT> yield _floatvectorarray_getitem( self, i )
Wrap MFloatVector to make it compatible to pythonic contructs. :note: for performance reasons, we do not provide negative index support
6259906b97e22403b383c720
class TestMain(unittest.TestCase): <NEW_LINE> <INDENT> @patch.object(main, 'get_logger') <NEW_LINE> @patch.object(main.docker, 'from_env') <NEW_LINE> @patch.object(main.time, 'sleep') <NEW_LINE> def test_main(self, fake_sleep, fake_from_env, fake_get_logger): <NEW_LINE> <INDENT> fake_sleep.side_effect = RuntimeError('breaking loop') <NEW_LINE> with self.assertRaises(RuntimeError): <NEW_LINE> <INDENT> main.main() <NEW_LINE> <DEDENT> call_args, _ = fake_sleep.call_args <NEW_LINE> slept_for = call_args[0] <NEW_LINE> self.assertTrue(main.LOOP_INTERVAL > slept_for) <NEW_LINE> <DEDENT> @patch.object(main.time, 'time') <NEW_LINE> @patch.object(main, 'get_logger') <NEW_LINE> @patch.object(main.docker, 'from_env') <NEW_LINE> @patch.object(main.time, 'sleep') <NEW_LINE> def test_main_slow_spawn(self, fake_sleep, fake_from_env, fake_get_logger, fake_time): <NEW_LINE> <INDENT> fake_time.side_effect = [100, 200] <NEW_LINE> fake_sleep.side_effect = RuntimeError('breaking loop') <NEW_LINE> with self.assertRaises(RuntimeError): <NEW_LINE> <INDENT> main.main() <NEW_LINE> <DEDENT> call_args, _ = fake_sleep.call_args <NEW_LINE> slept_for = call_args[0] <NEW_LINE> expected = 0 <NEW_LINE> self.assertEqual(slept_for, expected) <NEW_LINE> <DEDENT> @patch.object(main.time, 'time') <NEW_LINE> @patch.object(main, 'get_logger') <NEW_LINE> @patch.object(main.docker, 'from_env') <NEW_LINE> @patch.object(main.time, 'sleep') <NEW_LINE> def test_main_wonky_clock(self, fake_sleep, fake_from_env, fake_get_logger, fake_time): <NEW_LINE> <INDENT> fake_time.side_effect = [300, 200] <NEW_LINE> fake_sleep.side_effect = RuntimeError('breaking loop') <NEW_LINE> with self.assertRaises(RuntimeError): <NEW_LINE> <INDENT> main.main() <NEW_LINE> <DEDENT> call_args, _ = fake_sleep.call_args <NEW_LINE> slept_for = call_args[0] <NEW_LINE> expected = 10 <NEW_LINE> self.assertEqual(slept_for, expected)
A suite of test cases for the ``main`` function
6259906bf548e778e596cda0
class HostnamePoll(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.counterlist = [] <NEW_LINE> <DEDENT> def highest_count_type(self): <NEW_LINE> <INDENT> winner = '' <NEW_LINE> notzeroflag = False <NEW_LINE> for o in self.counterlist: <NEW_LINE> <INDENT> if o.count > 0: <NEW_LINE> <INDENT> notzeroflag = True <NEW_LINE> <DEDENT> <DEDENT> if notzeroflag: <NEW_LINE> <INDENT> self.counterlist.sort(key=operator.attrgetter('count')) <NEW_LINE> w = self.counterlist.pop() <NEW_LINE> winner = w.hname <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.debug("TypePoll: Unable to determine winner.") <NEW_LINE> winner = "UNDETERMINED" <NEW_LINE> <DEDENT> return winner
Holder for all HostnameCounters. Has methods to sort based on highest count.
6259906ba219f33f346c801b
class Volume_type_access(extensions.ExtensionDescriptor): <NEW_LINE> <INDENT> name = "VolumeTypeAccess" <NEW_LINE> alias = "os-volume-type-access" <NEW_LINE> namespace = ("http://docs.openstack.org/volume/" "ext/os-volume-type-access/api/v1") <NEW_LINE> updated = "2014-06-26T00:00:00Z" <NEW_LINE> def get_resources(self): <NEW_LINE> <INDENT> resources = [] <NEW_LINE> res = extensions.ResourceExtension( Volume_type_access.alias, VolumeTypeAccessController(), parent=dict(member_name='type', collection_name='types')) <NEW_LINE> resources.append(res) <NEW_LINE> return resources <NEW_LINE> <DEDENT> def get_controller_extensions(self): <NEW_LINE> <INDENT> controller = VolumeTypeActionController() <NEW_LINE> extension = extensions.ControllerExtension(self, 'types', controller) <NEW_LINE> return [extension]
Volume type access support.
6259906b76e4537e8c3f0d96
class Stream(PlotlyDict): <NEW_LINE> <INDENT> pass
A dictionary-like object representing a data stream.
6259906b99fddb7c1ca639da
class ST_Transform(GenericFunction): <NEW_LINE> <INDENT> name = 'ST_Transform' <NEW_LINE> type = Geometry
Exposes PostGIS ST_Transform function
6259906b3539df3088ecdab2
class NytNotFoundError(NytCongressError): <NEW_LINE> <INDENT> pass
Exception for things not found
6259906be1aae11d1e7cf417
class LIRCMOP4(LIRCMOP2): <NEW_LINE> <INDENT> def __init__(self, number_of_variables: int = 30): <NEW_LINE> <INDENT> super(LIRCMOP4, self).__init__(number_of_variables) <NEW_LINE> <DEDENT> def evaluate_constraints(self, solution: FloatSolution) -> FloatSolution: <NEW_LINE> <INDENT> x = solution.variables <NEW_LINE> constraints = [0.0 for _ in range(self.number_of_constraints)] <NEW_LINE> a = 0.51 <NEW_LINE> b = 0.5 <NEW_LINE> c = 20.0 <NEW_LINE> constraints[0] = (a - self.g1(x)) * (self.g1(x) - b) <NEW_LINE> constraints[1] = (a - self.g2(x)) * (self.g2(x) - b) <NEW_LINE> constraints[2] = sin(c * pi * x[0]) - 0.5 <NEW_LINE> solution.constraints = constraints <NEW_LINE> return solution <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return 'LIR-CMOP4'
Class representing problem LIR-CMOP4, defined in: * An Improved epsilon-constrained Method in MOEA/D for CMOPs with Large Infeasible Regions. Fan, Z., Li, W., Cai, X. et al. Soft Comput (2019). https://doi.org/10.1007/s00500-019-03794-x
6259906b2ae34c7f260ac8fc
class TaskRegistry(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._registry = {} <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self._registry <NEW_LINE> <DEDENT> def register(self, task): <NEW_LINE> <INDENT> if not isinstance(task, Task): <NEW_LINE> <INDENT> raise VPollerException('The task should be an instance of Task class') <NEW_LINE> <DEDENT> self._registry[task.name] = task <NEW_LINE> <DEDENT> def unregister(self, name): <NEW_LINE> <INDENT> self._registry.pop(name) <NEW_LINE> <DEDENT> def get(self, name): <NEW_LINE> <INDENT> return self._registry.get(name)
A registry for the vPoller tasks
6259906be76e3b2f99fda215
class ConnectionMonitorResultProperties(ConnectionMonitorParameters): <NEW_LINE> <INDENT> _validation = { 'source': {'required': True}, 'destination': {'required': True}, } <NEW_LINE> _attribute_map = { 'source': {'key': 'source', 'type': 'ConnectionMonitorSource'}, 'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'}, 'auto_start': {'key': 'autoStart', 'type': 'bool'}, 'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'monitoring_status': {'key': 'monitoringStatus', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, source: "ConnectionMonitorSource", destination: "ConnectionMonitorDestination", auto_start: Optional[bool] = True, monitoring_interval_in_seconds: Optional[int] = 60, provisioning_state: Optional[Union[str, "ProvisioningState"]] = None, start_time: Optional[datetime.datetime] = None, monitoring_status: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionMonitorResultProperties, self).__init__(source=source, destination=destination, auto_start=auto_start, monitoring_interval_in_seconds=monitoring_interval_in_seconds, **kwargs) <NEW_LINE> self.provisioning_state = provisioning_state <NEW_LINE> self.start_time = start_time <NEW_LINE> self.monitoring_status = monitoring_status
Describes the properties of a connection monitor. All required parameters must be populated in order to send to Azure. :param source: Required. Describes the source of connection monitor. :type source: ~azure.mgmt.network.v2018_08_01.models.ConnectionMonitorSource :param destination: Required. Describes the destination of connection monitor. :type destination: ~azure.mgmt.network.v2018_08_01.models.ConnectionMonitorDestination :param auto_start: Determines if the connection monitor will start automatically once created. :type auto_start: bool :param monitoring_interval_in_seconds: Monitoring interval in seconds. :type monitoring_interval_in_seconds: int :param provisioning_state: The provisioning state of the connection monitor. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :type provisioning_state: str or ~azure.mgmt.network.v2018_08_01.models.ProvisioningState :param start_time: The date and time when the connection monitor was started. :type start_time: ~datetime.datetime :param monitoring_status: The monitoring status of the connection monitor. :type monitoring_status: str
6259906b01c39578d7f1433f
class SD(_SDPacketBase): <NEW_LINE> <INDENT> SOMEIP_MSGID_SRVID = 0xffff <NEW_LINE> SOMEIP_MSGID_SUBID = 0x1 <NEW_LINE> SOMEIP_MSGID_EVENTID = 0x100 <NEW_LINE> SOMEIP_CLIENT_ID = 0x0000 <NEW_LINE> SOMEIP_MINIMUM_SESSION_ID = 0x0001 <NEW_LINE> SOMEIP_PROTO_VER = 0x01 <NEW_LINE> SOMEIP_IFACE_VER = 0x01 <NEW_LINE> SOMEIP_MSG_TYPE = SOMEIP.TYPE_NOTIFICATION <NEW_LINE> SOMEIP_RETCODE = SOMEIP.RET_E_OK <NEW_LINE> _sdFlag = collections.namedtuple('Flag', 'mask offset') <NEW_LINE> FLAGSDEF = { "REBOOT": _sdFlag(mask=0x80, offset=7), "UNICAST": _sdFlag(mask=0x40, offset=6) } <NEW_LINE> name = "SD" <NEW_LINE> fields_desc = [ XByteField("flags", 0), X3BytesField("res", 0), FieldLenField("len_entry_array", None, length_of="entry_array", fmt="!I"), PacketListField("entry_array", None, _sdentry_class, length_from=lambda pkt: pkt.len_entry_array), FieldLenField("len_option_array", None, length_of="option_array", fmt="!I"), PacketListField("option_array", None, _sdoption_class, length_from=lambda pkt: pkt.len_option_array) ] <NEW_LINE> def get_flag(self, name): <NEW_LINE> <INDENT> name = name.upper() <NEW_LINE> if name in self.FLAGSDEF: <NEW_LINE> <INDENT> return ((self.flags & self.FLAGSDEF[name].mask) >> self.FLAGSDEF[name].offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def set_flag(self, name, value): <NEW_LINE> <INDENT> name = name.upper() <NEW_LINE> if name in self.FLAGSDEF: <NEW_LINE> <INDENT> self.flags = (self.flags & (ctypes.c_ubyte(~self.FLAGSDEF[name].mask).value)) | ((value & 0x01) << self.FLAGSDEF[name].offset) <NEW_LINE> <DEDENT> <DEDENT> def set_entryArray(self, entry_list): <NEW_LINE> <INDENT> if isinstance(entry_list, list): <NEW_LINE> <INDENT> self.entry_array = entry_list <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.entry_array = [entry_list] <NEW_LINE> <DEDENT> <DEDENT> def set_optionArray(self, option_list): <NEW_LINE> <INDENT> if isinstance(option_list, list): <NEW_LINE> <INDENT> self.option_array = option_list <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.option_array = [option_list]
SD Packet NOTE : when adding 'entries' or 'options', do not use list.append() method but create a new list e.g. : p = SD() p.option_array = [SDOption_Config(),SDOption_IP6_EndPoint()]
6259906b7b25080760ed88ec
class HostBase(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def fields(self): <NEW_LINE> <INDENT> return {"id", "name"} <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return const.MN_HOSTS
Expected format: "hosts": [{"id": UUID, "name": String}, ...],
6259906b4e4d562566373c1b
class Highlander(Exception): <NEW_LINE> <INDENT> pass
There can be only one Layout or Widget with certain options set (designed to fill the rest of the screen). If you hit this exception you have a bug in your application. If you don't get the name, take a look at this link: https://en.wikipedia.org/wiki/Highlander_(film)
6259906b8e71fb1e983bd2dc
class GlobalWaypoint: <NEW_LINE> <INDENT> def __init__(self,lat=None,lon=None,alt=None): <NEW_LINE> <INDENT> self.latitude = lat <NEW_LINE> self.longitude = lon <NEW_LINE> self.altitude = alt <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_waypoint_message(cls,wp): <NEW_LINE> <INDENT> if msg.Waypoint.FRAME_GLOBAL != wp.frame: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return cls(lat=wp.x, lon=wp.y, alt=wp.z) <NEW_LINE> <DEDENT> def to_waypoint_message(self): <NEW_LINE> <INDENT> wp = msg.Waypoint() <NEW_LINE> wp.frame = msg.Waypoint.FRAME_GLOBAL <NEW_LINE> wp.x = self.latitude <NEW_LINE> wp.y = self.longitude <NEW_LINE> wp.z = self.altitude <NEW_LINE> wp.autocontinue = False <NEW_LINE> wp.radius = 0.0 <NEW_LINE> wp.waitTime = rospy.Duration(secs=0.0) <NEW_LINE> return wp
Utility class for representing waypoints in global frame
6259906b2ae34c7f260ac8fd
class DnsService(service_description.ServiceDescription): <NEW_LINE> <INDENT> supported_versions = { '2': _proxy.Proxy }
The DNS service.
6259906bd486a94d0ba2d7d4
class Collections(enum.Enum): <NEW_LINE> <INDENT> PROJECTS = ( 'projects', 'projects/{projectsId}', {}, [u'projectsId'] ) <NEW_LINE> PROJECTS_LOCATIONS = ( 'projects.locations', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}', }, [u'name'] ) <NEW_LINE> PROJECTS_LOCATIONS_INSTANCES = ( 'projects.locations.instances', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}/instances/' '{instancesId}', }, [u'name'] ) <NEW_LINE> PROJECTS_LOCATIONS_OPERATIONS = ( 'projects.locations.operations', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}/operations/' '{operationsId}', }, [u'name'] ) <NEW_LINE> def __init__(self, collection_name, path, flat_paths, params): <NEW_LINE> <INDENT> self.collection_name = collection_name <NEW_LINE> self.path = path <NEW_LINE> self.flat_paths = flat_paths <NEW_LINE> self.params = params
Collections for all supported apis.
6259906b460517430c432c60
class ExecComparison(object): <NEW_LINE> <INDENT> def get_command(self, reference_filepath, other_filepath, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("Method must be implemented")
Class ExecComparison is interface for comparison which compares two files using external binary file
6259906b55399d3f05627d36
@implementer(IATTopicSearchCriterion) <NEW_LINE> class ATRelativePathCriterion(ATBaseCriterion): <NEW_LINE> <INDENT> security = ClassSecurityInfo() <NEW_LINE> schema = ATRelativePathCriterionSchema <NEW_LINE> meta_type = 'ATRelativePathCriterion' <NEW_LINE> archetype_name = 'Relative Path Criterion' <NEW_LINE> shortDesc = 'Location in site relative to the current location' <NEW_LINE> def getNavTypes(self): <NEW_LINE> <INDENT> ptool = self.plone_utils <NEW_LINE> nav_types = ptool.typesToList() <NEW_LINE> return nav_types <NEW_LINE> <DEDENT> @security.protected(View) <NEW_LINE> def getCriteriaItems(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> depth = (not self.Recurse() and 1) or -1 <NEW_LINE> relPath = self.getRelativePath() <NEW_LINE> relPath = relPath.replace("\\", "/") <NEW_LINE> portalPath = list(getToolByName( self, 'portal_url').getPortalObject().getPhysicalPath()) <NEW_LINE> if relPath[0] == '/': <NEW_LINE> <INDENT> path = '/'.join(portalPath) + relPath <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> folders = relPath.split('/') <NEW_LINE> path = list(aq_parent(self).getPhysicalPath()) <NEW_LINE> for folder in folders: <NEW_LINE> <INDENT> if folder == '..': <NEW_LINE> <INDENT> if path == portalPath: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = path[:-1] <NEW_LINE> <DEDENT> <DEDENT> elif folder == '.': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path.append(folder) <NEW_LINE> <DEDENT> <DEDENT> path = '/'.join(path) <NEW_LINE> <DEDENT> if path is not '': <NEW_LINE> <INDENT> result.append((self.Field(), {'query': path, 'depth': depth})) <NEW_LINE> <DEDENT> return tuple(result)
A path criterion
6259906b99cbb53fe68326fc
class BadPropertyError (PyXBException): <NEW_LINE> <INDENT> pass
Raised when a schema component property is accessed on a component instance that does not define that property.
6259906bd268445f2663a767
class AccessoryMDNSServiceInfo(ServiceInfo): <NEW_LINE> <INDENT> def __init__(self, accessory, state, zeroconf_server=None): <NEW_LINE> <INDENT> self.accessory = accessory <NEW_LINE> self.state = state <NEW_LINE> adv_data = self._get_advert_data() <NEW_LINE> valid_name = self._valid_name() <NEW_LINE> short_mac = self.state.mac[-8:].replace(":", "") <NEW_LINE> name = f"{valid_name} {short_mac}.{HAP_SERVICE_TYPE}" <NEW_LINE> valid_host_name = self._valid_host_name() <NEW_LINE> server = zeroconf_server or f"{valid_host_name}-{short_mac}.local." <NEW_LINE> super().__init__( HAP_SERVICE_TYPE, name=name, server=server, port=self.state.port, weight=0, priority=0, properties=adv_data, addresses=[socket.inet_aton(self.state.address)], ) <NEW_LINE> <DEDENT> def _valid_name(self): <NEW_LINE> <INDENT> return re.sub( LEADING_TRAILING_SPACE_DASH, "", re.sub(VALID_MDNS_REGEX, " ", self.accessory.display_name), ) <NEW_LINE> <DEDENT> def _valid_host_name(self): <NEW_LINE> <INDENT> return re.sub( DASH_REGEX, "-", re.sub(VALID_MDNS_REGEX, " ", self.accessory.display_name) .strip() .replace(" ", "-") .strip("-"), ) <NEW_LINE> <DEDENT> def _setup_hash(self): <NEW_LINE> <INDENT> setup_hash_material = self.state.setup_id + self.state.mac <NEW_LINE> temp_hash = hashlib.sha512() <NEW_LINE> temp_hash.update(setup_hash_material.encode()) <NEW_LINE> return base64.b64encode(temp_hash.digest()[:4]).decode() <NEW_LINE> <DEDENT> def _get_advert_data(self): <NEW_LINE> <INDENT> return { "md": self._valid_name(), "pv": HAP_PROTOCOL_SHORT_VERSION, "id": self.state.mac, "c#": str(self.state.config_version), "s#": "1", "ff": "0", "ci": str(self.accessory.category), "sf": "0" if self.state.paired else "1", "sh": self._setup_hash(), }
A mDNS service info representation of an accessory.
6259906b1b99ca4002290140
class Region(object): <NEW_LINE> <INDENT> def __init__(self, region_name, profile=None): <NEW_LINE> <INDENT> self.session = botocore.session.get_session() <NEW_LINE> self.session.profile = profile <NEW_LINE> self.region_name = region_name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.region_name <NEW_LINE> <DEDENT> def debug(self): <NEW_LINE> <INDENT> self.session.set_debug_logger() <NEW_LINE> <DEDENT> def get_service_endpoint(self, service_name): <NEW_LINE> <INDENT> service = self.session.get_service(service_name) <NEW_LINE> endpoint = service.get_endpoint(self.region_name) <NEW_LINE> return ServiceEndpoint(self, service, endpoint)
Represents a specific region and a specific set of credentials. Using the ``Region`` object you can then create ``ServiceEndpoints`` to talk to a specific service within that region and using those credentials. :type region_name: str :param region_name: The name of the region (e.g. us-east-1). :type profile: str :param profile: The profile you wish to associate with this object. This can be any valid profile within your botocore config file. If no profile is specified, the default profile is used.
6259906b76e4537e8c3f0d98
@dependency.provider('endpoint_filter_api') <NEW_LINE> @dependency.requires('catalog_api', 'resource_api') <NEW_LINE> class Manager(manager.Manager): <NEW_LINE> <INDENT> driver_namespace = 'keystone.endpoint_filter' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Manager, self).__init__(CONF.endpoint_filter.driver) <NEW_LINE> <DEDENT> def _get_endpoint_groups_for_project(self, project_id): <NEW_LINE> <INDENT> self.resource_api.get_project(project_id) <NEW_LINE> try: <NEW_LINE> <INDENT> refs = self.driver.list_endpoint_groups_for_project( project_id) <NEW_LINE> endpoint_groups = [self.driver.get_endpoint_group( ref['endpoint_group_id']) for ref in refs] <NEW_LINE> return endpoint_groups <NEW_LINE> <DEDENT> except exception.EndpointGroupNotFound: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def _get_endpoints_filtered_by_endpoint_group(self, endpoint_group_id): <NEW_LINE> <INDENT> endpoints = self.catalog_api.list_endpoints() <NEW_LINE> filters = self.driver.get_endpoint_group(endpoint_group_id)['filters'] <NEW_LINE> filtered_endpoints = [] <NEW_LINE> for endpoint in endpoints: <NEW_LINE> <INDENT> is_candidate = True <NEW_LINE> for key, value in filters.items(): <NEW_LINE> <INDENT> if endpoint[key] != value: <NEW_LINE> <INDENT> is_candidate = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if is_candidate: <NEW_LINE> <INDENT> filtered_endpoints.append(endpoint) <NEW_LINE> <DEDENT> <DEDENT> return filtered_endpoints <NEW_LINE> <DEDENT> def list_endpoints_for_project(self, project_id): <NEW_LINE> <INDENT> refs = self.driver.list_endpoints_for_project(project_id) <NEW_LINE> filtered_endpoints = {} <NEW_LINE> for ref in refs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> endpoint = self.catalog_api.get_endpoint(ref['endpoint_id']) <NEW_LINE> filtered_endpoints.update({ref['endpoint_id']: endpoint}) <NEW_LINE> <DEDENT> except exception.EndpointNotFound: <NEW_LINE> <INDENT> self.remove_endpoint_from_project(ref['endpoint_id'], project_id) <NEW_LINE> <DEDENT> <DEDENT> endpoint_groups = self._get_endpoint_groups_for_project(project_id) <NEW_LINE> for endpoint_group in endpoint_groups: <NEW_LINE> <INDENT> endpoint_refs = self._get_endpoints_filtered_by_endpoint_group( endpoint_group['id']) <NEW_LINE> for endpoint_ref in endpoint_refs: <NEW_LINE> <INDENT> if endpoint_ref['id'] not in filtered_endpoints: <NEW_LINE> <INDENT> filtered_endpoints[endpoint_ref['id']] = endpoint_ref <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return filtered_endpoints
Default pivot point for the Endpoint Filter backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend.
6259906bf7d966606f7494c6
class SearchForUsers(ListModelView): <NEW_LINE> <INDENT> resource = UserResource <NEW_LINE> permissions = (IsAuthenticated, IsAnyAdmin) <NEW_LINE> minimum_characters = 2 <NEW_LINE> response_limit = 10 <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> querystring = self.request.GET.get('query', '') <NEW_LINE> if len(querystring) <= self.minimum_characters: <NEW_LINE> <INDENT> return EmptyQuerySet() <NEW_LINE> <DEDENT> qry = User.objects.filter(Q(username__icontains=querystring) | Q(email__icontains=querystring) | Q(devilryuserprofile__full_name__icontains=querystring)) <NEW_LINE> qry = qry.select_related('devilryuserprofile') <NEW_LINE> return qry[:self.response_limit]
Provides an API suited for autocompleting users. # GET Search for users by: - Full name - Username - Email Uses case-ignore-contains search. ## Parameters The search is specified in the ``query`` parameter in the querystring. ## Response A list of 0 to 10 users with the following attributes for each user: - ``id`` (internal Devilry ID) - ``username`` - ``full_name`` - ``email`` - ``languagecode`` (preferred language) If the ``query`` has less than 3 characters, and empty list is returned.
6259906b44b2445a339b756a
class CityJsonLoaderDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.dialog = CityJsonLoaderDialog(None) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.dialog = None <NEW_LINE> <DEDENT> def test_dialog_ok(self): <NEW_LINE> <INDENT> button = self.dialog.button_box.button(QDialogButtonBox.Ok) <NEW_LINE> button.click() <NEW_LINE> result = self.dialog.result() <NEW_LINE> self.assertEqual(result, QDialog.Accepted) <NEW_LINE> <DEDENT> def test_dialog_cancel(self): <NEW_LINE> <INDENT> button = self.dialog.button_box.button(QDialogButtonBox.Cancel) <NEW_LINE> button.click() <NEW_LINE> result = self.dialog.result() <NEW_LINE> self.assertEqual(result, QDialog.Rejected)
Test dialog works.
6259906b4428ac0f6e659d48
class TissueRadiusFitter(object): <NEW_LINE> <INDENT> def __init__(self, sim_params): <NEW_LINE> <INDENT> self.sim_params = sim_params <NEW_LINE> self.krogh_sol = KroghSolution2DCone(sim_params) <NEW_LINE> <DEDENT> def fit_mean_oxygen_extraction_rate(self, hb_a, hb_v): <NEW_LINE> <INDENT> self.sim_params['HbInlet'] = hb_a <NEW_LINE> if abs(self.krogh_sol.RBCFlux() < 1e-12): <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> self.krogh_sol.old_consumptionPerLengthAtX = self.krogh_sol.consumptionPerLengthAtX <NEW_LINE> max_bound = np.pi*1e4*100e-6**2 <NEW_LINE> min_bound = -max_bound <NEW_LINE> jt = brentq(self._residual_oxygen_extraction_rate_fit, min_bound, max_bound, args=hb_v) <NEW_LINE> return jt <NEW_LINE> <DEDENT> def fit_mean_tissue_radius(self, hb_a, hb_v): <NEW_LINE> <INDENT> jt = self.fit_mean_oxygen_extraction_rate(hb_a, hb_v) <NEW_LINE> rt_squared = jt/(self.krogh_sol.M*np.pi) + self.sim_params['radiusWall']**2 <NEW_LINE> if rt_squared >= 0: <NEW_LINE> <INDENT> return np.sqrt(rt_squared) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -np.sqrt(-rt_squared) <NEW_LINE> <DEDENT> <DEDENT> def fit_tissue_volume(self, hb_a, hb_v): <NEW_LINE> <INDENT> jt = self.fit_mean_oxygen_extraction_rate(hb_a, hb_v) <NEW_LINE> return self.sim_params['domainLength']*jt/self.krogh_sol.M <NEW_LINE> <DEDENT> def _residual_oxygen_extraction_rate_fit(self, jt, hb_v_goal): <NEW_LINE> <INDENT> self.krogh_sol.consumptionPerLengthAtX = lambda x: jt <NEW_LINE> hb_v = self.krogh_sol.saturationAtX(self.krogh_sol.geometry['domainLength']) <NEW_LINE> return hb_v - hb_v_goal
Provides fitting functions for the oxygen extraction from a capillary as a function of RBC flow and hemoglobin saturation drop. The computations rely on the Krogh cylinder model. Attributes: sim_params (SimulationParameters): simulation parameters krogh_sol (KroghSolution2DCone): solver for Krogh model
6259906b4f6381625f19a0b2