code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class MatanFormulaCaptchaTask(MatanCaptchaTask): <NEW_LINE> <INDENT> _formula = None <NEW_LINE> def get(self): <NEW_LINE> <INDENT> return str(self._formula)
Matan task with default representation as a formula
62599034d6c5a102081e3243
class SoyoungSpider: <NEW_LINE> <INDENT> def __init__(self, keyword): <NEW_LINE> <INDENT> self.keyword = keyword <NEW_LINE> self.hospital_url = r'http://www.soyoung.com/searchNew/hospital?keyword={}&page={}' <NEW_LINE> query = {'cityId': 1, 'page_size': 100, '_json': 1, 'sort': 3} <NEW_LINE> self.hospital_url = self.hospital_url + '&' + urlencode(query) <NEW_LINE> self.page = 1 <NEW_LINE> self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36' } <NEW_LINE> self.item = [] <NEW_LINE> self.count = 1 <NEW_LINE> <DEDENT> def get_info(self): <NEW_LINE> <INDENT> url = self.hospital_url.format(self.keyword, self.page) <NEW_LINE> r = requests.get(url, headers=self.headers, cookies=MY_COOKIE) <NEW_LINE> hospitals = r.json()['responseData']['hospital_list'] <NEW_LINE> hasmore = r.json()['responseData']['has_more'] <NEW_LINE> for hospital in hospitals: <NEW_LINE> <INDENT> for product in hospital['products']: <NEW_LINE> <INDENT> if self.keyword in product['title']: <NEW_LINE> <INDENT> info = { 'hospital_id': hospital['hospital_id'], 'hospital': hospital['name_cn'], 'address': hospital['address'], 'title': product['title'], 'price': product['price_online'], 'link': 'http://y.soyoung.com/cp' + product['pid'], } <NEW_LINE> self.item.append(info) <NEW_LINE> log(f"[+] {self.count} Start to download {info['link']}") <NEW_LINE> self.count += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if hasmore: <NEW_LINE> <INDENT> self.page += 1 <NEW_LINE> self.get_info() <NEW_LINE> <DEDENT> <DEDENT> def save(self, save_path): <NEW_LINE> <INDENT> log(f'[+] Total item: {len(self.item)}') <NEW_LINE> today = datetime.datetime.today().strftime('%Y-%m-%d') <NEW_LINE> file = f'{today}{self.keyword}新氧销售情况.csv' <NEW_LINE> path = os.path.join(save_path, file) <NEW_LINE> log(f'[+] Start to save file to {path}') <NEW_LINE> with open(path, "w+", newline='', encoding='utf-8') as csvfile: <NEW_LINE> <INDENT> fieldnames = [ 'title', 'price', 'link', 'address', 'hospital', 'hospital_id' ] <NEW_LINE> writer = csv.DictWriter(csvfile, fieldnames=fieldnames) <NEW_LINE> writer.writeheader() <NEW_LINE> for row in self.item: <NEW_LINE> <INDENT> writer.writerow(row) <NEW_LINE> <DEDENT> <DEDENT> log('[+] Save success') <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.get_info()
docstring for SoyoungSpider
625990341f5feb6acb163d0f
class RecursiveA: <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> <DEDENT> @rpc() <NEW_LINE> def status(self) -> List[str]: <NEW_LINE> <INDENT> return ['running', 'pending'] <NEW_LINE> <DEDENT> @rpc() <NEW_LINE> def poll(self, name: str = 'running') -> int: <NEW_LINE> <INDENT> s = service(RecursiveB, self.url, ClientConfig(timeout_total=1., horz=False)) <NEW_LINE> return s.count_status(name) <NEW_LINE> <DEDENT> @signal() <NEW_LINE> def exit(self): <NEW_LINE> <INDENT> raise TerminationException()
Call `RecursiveB` in order to return
6259903426238365f5fadc71
class StorageBucketAccessControlsDeleteRequest(_messages.Message): <NEW_LINE> <INDENT> bucket = _messages.StringField(1, required=True) <NEW_LINE> entity = _messages.StringField(2, required=True) <NEW_LINE> userProject = _messages.StringField(3)
A StorageBucketAccessControlsDeleteRequest object. Fields: bucket: Name of a bucket. entity: The entity holding the permission. Can be user-userId, user- emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers. userProject: The project to be billed for this request. Required for Requester Pays buckets.
625990348c3a8732951f7676
class VerticaLinkedService(LinkedService): <NEW_LINE> <INDENT> _validation = { 'type': {'required': True}, } <NEW_LINE> _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(VerticaLinkedService, self).__init__(**kwargs) <NEW_LINE> self.type = 'Vertica' <NEW_LINE> self.connection_string = kwargs.get('connection_string', None) <NEW_LINE> self.pwd = kwargs.get('pwd', None) <NEW_LINE> self.encrypted_credential = kwargs.get('encrypted_credential', None)
Vertica linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object
6259903415baa723494630b8
class Gamma(Prior): <NEW_LINE> <INDENT> domain = _POSITIVE <NEW_LINE> _instances = [] <NEW_LINE> def __new__(cls, a=1, b=.5): <NEW_LINE> <INDENT> if cls._instances: <NEW_LINE> <INDENT> cls._instances[:] = [instance for instance in cls._instances if instance()] <NEW_LINE> for instance in cls._instances: <NEW_LINE> <INDENT> if instance().a == a and instance().b == b: <NEW_LINE> <INDENT> return instance() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> newfunc = super(Prior, cls).__new__ <NEW_LINE> if newfunc is object.__new__: <NEW_LINE> <INDENT> o = newfunc(cls) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> o = newfunc(cls, a, b) <NEW_LINE> <DEDENT> cls._instances.append(weakref.ref(o)) <NEW_LINE> return cls._instances[-1]() <NEW_LINE> <DEDENT> @property <NEW_LINE> def a(self): <NEW_LINE> <INDENT> return self._a <NEW_LINE> <DEDENT> @property <NEW_LINE> def b(self): <NEW_LINE> <INDENT> return self._b <NEW_LINE> <DEDENT> def __init__(self, a, b): <NEW_LINE> <INDENT> self._a = float(a) <NEW_LINE> self._b = float(b) <NEW_LINE> self.constant = -gammaln(self.a) + a * np.log(b) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Ga({:.2g}, {:.2g})".format(self.a, self.b) <NEW_LINE> <DEDENT> def summary(self): <NEW_LINE> <INDENT> ret = {"E[x]": self.a / self.b, "E[ln x]": digamma(self.a) - np.log(self.b), "var[x]": self.a / self.b / self.b, "Entropy": gammaln(self.a) - (self.a - 1.) * digamma(self.a) - np.log(self.b) + self.a} <NEW_LINE> if self.a > 1: <NEW_LINE> <INDENT> ret['Mode'] = (self.a - 1.) / self.b <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret['mode'] = np.nan <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def lnpdf(self, x): <NEW_LINE> <INDENT> return self.constant + (self.a - 1) * np.log(x) - self.b * x <NEW_LINE> <DEDENT> def lnpdf_grad(self, x): <NEW_LINE> <INDENT> return (self.a - 1.) / x - self.b <NEW_LINE> <DEDENT> def rvs(self, n): <NEW_LINE> <INDENT> return np.random.gamma(scale=1. / self.b, shape=self.a, size=n) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_EV(E, V): <NEW_LINE> <INDENT> a = np.square(E) / V <NEW_LINE> b = E / V <NEW_LINE> return Gamma(a, b) <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.a, self.b <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self._a = state[0] <NEW_LINE> self._b = state[1] <NEW_LINE> self.constant = -gammaln(self.a) + self.a * np.log(self.b)
Implementation of the Gamma probability function, coupled with random variables. :param a: shape parameter :param b: rate parameter (warning: it's the *inverse* of the scale) .. Note:: Bishop 2006 notation is used throughout the code
625990344e696a045264e6b1
class MemArchive(Archive): <NEW_LINE> <INDENT> def __init__(self, items, mode='r'): <NEW_LINE> <INDENT> self.dic = {} <NEW_LINE> for name, csvstr in items: <NEW_LINE> <INDENT> self.add(name, csvstr) <NEW_LINE> <DEDENT> self.opened = set() <NEW_LINE> <DEDENT> def add(self, name, csvstr): <NEW_LINE> <INDENT> self.dic[name] = FileObject(name, csvstr) <NEW_LINE> <DEDENT> def _open(self, name, mode='r'): <NEW_LINE> <INDENT> if mode in ('w', 'w+', 'r+'): <NEW_LINE> <INDENT> self.dic[name] = f = FileObject(name, '') <NEW_LINE> return f <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.dic[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise NotInArchive(name) <NEW_LINE> <DEDENT> <DEDENT> def extract_filenames(self, prefix=''): <NEW_LINE> <INDENT> return [f for f in self.dic if f.startswith(prefix)]
Provides an archive interface over FileObjects in memory
625990348c3a8732951f7677
class BaseXMLSchemaValidator: <NEW_LINE> <INDENT> def __init__(self, schema_loader=None, parser=None, translator=None): <NEW_LINE> <INDENT> self._schema_loader = schema_loader or XMLSchemaFileLoader() <NEW_LINE> self._parser = parser or etree.XMLParser() <NEW_LINE> self._translator = translator or Libxml2Translator() <NEW_LINE> <DEDENT> def _run(self, xml, schema_type): <NEW_LINE> <INDENT> xml_doc = self._parse_xml(xml) <NEW_LINE> schema = self._load_schema(schema_type) <NEW_LINE> return self._validate_xml(xml_doc, schema) <NEW_LINE> <DEDENT> def _parse_xml(self, xml): <NEW_LINE> <INDENT> return etree.fromstring(xml, parser=self._parser) <NEW_LINE> <DEDENT> def _load_schema(self, schema_type): <NEW_LINE> <INDENT> return self._schema_loader.load(schema_type) <NEW_LINE> <DEDENT> def _validate_xml(self, xml_doc, schema): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> schema.assertValid(xml_doc) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self._handle_errors(schema.error_log) <NEW_LINE> <DEDENT> <DEDENT> def _handle_errors(self, error_log): <NEW_LINE> <INDENT> errors = self._build_errors(error_log) <NEW_LINE> print(errors) <NEW_LINE> localized_errors = self._localize_messages(errors) <NEW_LINE> raise XMLSchemaValidationError(localized_errors) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _build_errors(error_log): <NEW_LINE> <INDENT> return [ ValidationDetail(None, err.line, err.column, err.domain_name, err.type_name, _strip_namespaces(err.message), err.path) for err in error_log ] <NEW_LINE> <DEDENT> def _localize_messages(self, errors): <NEW_LINE> <INDENT> return self._translator.translate_many(errors)
Validate XML fragments against XML Schema (XSD).
625990348e05c05ec3f6f6ea
class CannotChangeSubscription(BaseSubscriptionErrors): <NEW_LINE> <INDENT> pass
The subscription change cannot be fulfilled. This is raised when the person is not a allowed to change their subscription address. For example, this is raised when the person is not a member of the team linked to this mailing list, when `person` is a team, or when `person` does not own the given email address.
6259903423e79379d538d629
class MainDriver: <NEW_LINE> <INDENT> def __init__(self, prmMngr): <NEW_LINE> <INDENT> self.prmMngr = prmMngr <NEW_LINE> <DEDENT> def executeRuns(self): <NEW_LINE> <INDENT> self.prmMngr.resetUsedParmsList() <NEW_LINE> logstream.mstOut.resetStreamContent(); <NEW_LINE> sectionNameList = self.prmMngr.getSectionNames() <NEW_LINE> for name in sectionNameList: <NEW_LINE> <INDENT> if (name.find("Run_") == 0): <NEW_LINE> <INDENT> runSect = self.prmMngr.getSection(name) <NEW_LINE> if (runSect.getParameter("enabled").lower() == "true"): <NEW_LINE> <INDENT> doRunParmSectionHacks(runSect) <NEW_LINE> for filtSectName in runSect.getSectionNames(): <NEW_LINE> <INDENT> if ((mango.mpi.world == None) or (mango.mpi.world.Get_rank() == 0)): <NEW_LINE> <INDENT> doFilterRunBannerLogging(filtSectName) <NEW_LINE> <DEDENT> if (mango.mpi.world != None): <NEW_LINE> <INDENT> mango.mpi.world.barrier() <NEW_LINE> <DEDENT> filtSect = runSect.getSection(filtSectName) <NEW_LINE> filtDataType = mango.makeMType(runSect.getParameter("input_data_type")) <NEW_LINE> filtCls = findMainDriverFilter(filtSectName, filtDataType) <NEW_LINE> filt = filtCls() <NEW_LINE> filt.setFilterParms(filtSect) <NEW_LINE> filt.setRunParms(runSect) <NEW_LINE> filt.loadExecuteWrite() <NEW_LINE> self.prmMngr.resetUsedParmsList() <NEW_LINE> logstream.mstOut.resetStreamContent();
Simple driver for executing a series of *Run* sections as specified by a :obj:`InputParmManager` object.
625990348a349b6b4368735e
class StreamCollectionEmbedded(object): <NEW_LINE> <INDENT> swagger_types = { 'streams': 'list[Stream]' } <NEW_LINE> attribute_map = { 'streams': 'streams' } <NEW_LINE> def __init__(self, streams=None): <NEW_LINE> <INDENT> self._streams = None <NEW_LINE> if streams is not None: <NEW_LINE> <INDENT> self.streams = streams <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def streams(self): <NEW_LINE> <INDENT> return self._streams <NEW_LINE> <DEDENT> @streams.setter <NEW_LINE> def streams(self, streams): <NEW_LINE> <INDENT> self._streams = streams <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, StreamCollectionEmbedded): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259903407d97122c4217dc6
class AbstractChannel(object): <NEW_LINE> <INDENT> def __init__(self, connection, channel_id): <NEW_LINE> <INDENT> self.connection = connection <NEW_LINE> self.channel_id = channel_id <NEW_LINE> connection.channels[channel_id] = self <NEW_LINE> self.method_queue = [] <NEW_LINE> self.auto_encode_decode = False <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *exc_info): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> def _send_method(self, method_sig, args=bytes(), content=None): <NEW_LINE> <INDENT> conn = self.connection <NEW_LINE> if conn is None: <NEW_LINE> <INDENT> raise RecoverableConnectionError('connection already closed') <NEW_LINE> <DEDENT> if isinstance(args, AMQPWriter): <NEW_LINE> <INDENT> args = args.getvalue() <NEW_LINE> <DEDENT> conn.method_writer.write_method( self.channel_id, method_sig, args, content, self.auto_encode_decode ) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> raise NotImplementedError('Must be overriden in subclass') <NEW_LINE> <DEDENT> def wait(self, allowed_methods=None): <NEW_LINE> <INDENT> method_sig, args, content = self.connection._wait_method( self.channel_id, allowed_methods) <NEW_LINE> return self.dispatch_method(method_sig, args, content) <NEW_LINE> <DEDENT> def dispatch_method(self, method_sig, args, content): <NEW_LINE> <INDENT> if content and self.auto_encode_decode and hasattr(content, 'content_encoding'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> content.body = content.body.decode(content.content_encoding) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> amqp_method = self._METHOD_MAP[method_sig] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AMQPNotImplementedError( 'Unknown AMQP method {0!r}'.format(method_sig)) <NEW_LINE> <DEDENT> if content is None: <NEW_LINE> <INDENT> return amqp_method(self, args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return amqp_method(self, args, content) <NEW_LINE> <DEDENT> <DEDENT> _METHOD_MAP = {}
Superclass for both the Connection, which is treated as channel 0, and other user-created Channel objects. The subclasses must have a _METHOD_MAP class property, mapping between AMQP method signatures and Python methods.
6259903463f4b57ef0086603
class CommercialTests(support.tests.SupportTests): <NEW_LINE> <INDENT> def test_remove_gpl(self): <NEW_LINE> <INDENT> pyfile = 'python2/mysql/connector/connection.py' <NEW_LINE> tmpfile = tempfile.NamedTemporaryFile(mode='w+') <NEW_LINE> tmpfile.write(open(pyfile, 'r').read()) <NEW_LINE> commercial.remove_gpl(tmpfile.name) <NEW_LINE> tmpfile.seek(0) <NEW_LINE> data = tmpfile.read() <NEW_LINE> self.assertTrue('# Following empty comments are intentional.' in data) <NEW_LINE> self.assertTrue('# End empty comments.' in data) <NEW_LINE> self.assertFalse('GPLv2' in data) <NEW_LINE> self.assertFalse('Free Software' in data) <NEW_LINE> tmpfile.close() <NEW_LINE> tmpfile = tempfile.NamedTemporaryFile(mode='w+') <NEW_LINE> tmpfile.write("# No content.") <NEW_LINE> self.assertRaises(DistutilsError, commercial.remove_gpl, tmpfile.name) <NEW_LINE> tmpfile.close() <NEW_LINE> tmpfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) <NEW_LINE> exp = open(pyfile, 'r').read() <NEW_LINE> tmpfile.write(exp) <NEW_LINE> commercial.remove_gpl(tmpfile.name, dry_run=1) <NEW_LINE> self.assertTrue(exp, open(tmpfile.name, 'r').read())
Test functionality regarding commercial releases
625990345e10d32532ce4193
class FakeUser(models.User): <NEW_LINE> <INDENT> def __init__( self, name, id, have_access_to_free_instances=True, is_super_user=False, is_blocked=False, shape=None, default_coord=None, ): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.login = name <NEW_LINE> self.type = 'with_free_instances' <NEW_LINE> if not have_access_to_free_instances: <NEW_LINE> <INDENT> self.type = 'without_free_instances' <NEW_LINE> <DEDENT> if is_super_user: <NEW_LINE> <INDENT> self.type = 'super_user' <NEW_LINE> <DEDENT> self.end_point_id = None <NEW_LINE> self._is_blocked = is_blocked <NEW_LINE> self.shape = shape <NEW_LINE> self.default_coord = default_coord <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_from_token(cls, token, valid_until): <NEW_LINE> <INDENT> assert False <NEW_LINE> <DEDENT> def is_blocked(self, datetime_utc): <NEW_LINE> <INDENT> return self._is_blocked
We create a user independent from a database
625990348c3a8732951f7679
class LoginView(APIView): <NEW_LINE> <INDENT> authentication_classes = api_settings.DEFAULT_AUTHENTICATION_CLASSES <NEW_LINE> serializer_class = LogInSerializer <NEW_LINE> def post(self, request, format=None): <NEW_LINE> <INDENT> serializer = LogInSerializer( data=request.data, context={'request': request}) <NEW_LINE> if serializer.is_valid(raise_exception=True): <NEW_LINE> <INDENT> token = AuthToken.objects.create(request.user) <NEW_LINE> return Response({ "user": KNOXUSERSERIALIZER(request.user).data, "token": token, }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response({"data": serializer.errors})
Login View
625990349b70327d1c57fea5
class BatchedDiffPoolLayer(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_feats, out_feats, assign_dim, batch_size, dropout=0.5, link_pred_loss=True, entropy_loss=True): <NEW_LINE> <INDENT> super(BatchedDiffPoolLayer, self).__init__() <NEW_LINE> self.assign_dim = assign_dim <NEW_LINE> self.dropout = dropout <NEW_LINE> self.use_link_pred = link_pred_loss <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.embd_gnn = SAGEConv(in_feats, out_feats, normalize=False) <NEW_LINE> self.pool_gnn = SAGEConv(in_feats, assign_dim, normalize=False) <NEW_LINE> self.loss_dict = dict() <NEW_LINE> <DEDENT> def forward(self, x, edge_index, batch, edge_weight=None): <NEW_LINE> <INDENT> embed = self.embd_gnn(x, edge_index) <NEW_LINE> pooled = F.softmax(self.pool_gnn(x, edge_index), dim=-1) <NEW_LINE> device = x.device <NEW_LINE> masked_tensor = [] <NEW_LINE> value_set, value_counts = torch.unique(batch, return_counts=True) <NEW_LINE> batch_size = len(value_set) <NEW_LINE> for i in value_counts: <NEW_LINE> <INDENT> masked = torch.ones((i, int(pooled.size()[1]/batch_size))) <NEW_LINE> masked_tensor.append(masked) <NEW_LINE> <DEDENT> masked = torch.FloatTensor(block_diag(*masked_tensor)).to(device) <NEW_LINE> result = torch.nn.functional.softmax(masked * pooled, dim=-1) <NEW_LINE> result = result * masked <NEW_LINE> result = result / (result.sum(dim=-1, keepdim=True) + 1e-13) <NEW_LINE> h = torch.matmul(result.t(), embed) <NEW_LINE> if not edge_weight: <NEW_LINE> <INDENT> edge_weight = torch.ones(edge_index.shape[1]).to(x.device) <NEW_LINE> <DEDENT> adj = torch.sparse_coo_tensor(edge_index, edge_weight) <NEW_LINE> adj_new = torch.sparse.mm(adj, result) <NEW_LINE> adj_new = torch.mm(result.t(), adj_new) <NEW_LINE> if self.use_link_pred: <NEW_LINE> <INDENT> adj_loss = torch.norm((adj.to_dense() - torch.mm(result, result.t()))) / np.power((len(batch)), 2) <NEW_LINE> self.loss_dict["adj_loss"] = adj_loss <NEW_LINE> <DEDENT> entropy_loss = (torch.distributions.Categorical(probs=pooled).entropy()).mean() <NEW_LINE> assert not torch.isnan(entropy_loss) <NEW_LINE> self.loss_dict["entropy_loss"] = entropy_loss <NEW_LINE> return adj_new, h <NEW_LINE> <DEDENT> def get_loss(self): <NEW_LINE> <INDENT> loss_n = 0 <NEW_LINE> for _, value in self.loss_dict.items(): <NEW_LINE> <INDENT> loss_n += value <NEW_LINE> <DEDENT> return loss_n
DIFFPOOL from paper `"Hierarchical Graph Representation Learning with Differentiable Pooling" <https://arxiv.org/pdf/1806.08804.pdf>`__. .. math:: X^{(l+1)} = S^{l)}^T Z^{(l)} A^{(l+1)} = S^{(l)}^T A^{(l)} S^{(l)} Z^{(l)} = GNN_{l, embed}(A^{(l)}, X^{(l)}) S^{(l)} = softmax(GNN_{l,pool}(A^{(l)}, X^{(l)})) Parameters ---------- in_feats : int Size of each input sample. out_feats : int Size of each output sample. assign_dim : int Size of next adjacency matrix. batch_size : int Size of each mini-batch. dropout : float, optional Size of dropout, default: ``0.5``. link_pred_loss : bool, optional Use link prediction loss if True, default: ``True``.
6259903407d97122c4217dc8
class OrderItem(BaseContent): <NEW_LINE> <INDENT> implements(IOrderItem) <NEW_LINE> security = ClassSecurityInfo() <NEW_LINE> _at_rename_after_creation = True <NEW_LINE> schema = BaseSchema.copy() + schema.copy() <NEW_LINE> security.declarePublic('getProduct') <NEW_LINE> def getProduct(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.getRefs('orderitem_product')[0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> security.declarePublic('setProduct') <NEW_LINE> def setProduct(self,product): <NEW_LINE> <INDENT> self.addReference(product, "orderitem_product")
An order item holds price, tax and products informations from the moment the customer has buyed aka checked out its cart. This means it doesn't need any calculations any more.
62599034d4950a0f3b1116cf
class Function(object): <NEW_LINE> <INDENT> def __init__(self, func, *args, **kwargs): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> self.result = None <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.result = self.func(*self.args, **self.kwargs) <NEW_LINE> <DEDENT> def call(self): <NEW_LINE> <INDENT> return self.func(*self.args, **self.kwargs) <NEW_LINE> <DEDENT> def set_args(self, *args, **kwargs): <NEW_LINE> <INDENT> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def get_result(self): <NEW_LINE> <INDENT> return self.result
Provides an interface to define and call a function. Usage: f = Function(self.do_this, path) f.run()
6259903491af0d3eaad3af50
class KnnModel(object): <NEW_LINE> <INDENT> def __init__(self, data, k=20, similarity="euclidean"): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.k = k <NEW_LINE> self.similarity = similarity <NEW_LINE> self.getsimilarityfunc() <NEW_LINE> <DEDENT> def getsimilarityfunc(self): <NEW_LINE> <INDENT> if not hasattr(self, 'similarityfunc'): <NEW_LINE> <INDENT> method_name = self.similarity <NEW_LINE> if self.similarity.find('.')==-1: <NEW_LINE> <INDENT> method_name = "knn.SimilarityFunc." + self.similarity <NEW_LINE> <DEDENT> self.similarityfunc = ReflectUtil.import_class(method_name) <NEW_LINE> <DEDENT> return self.similarityfunc <NEW_LINE> <DEDENT> def classify(self, data): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for item in data: <NEW_LINE> <INDENT> similar_set = sorted(self.data, key=lambda c: self.getsimilarityfunc()(item, c.data))[:self.k] <NEW_LINE> ser = pd.Series(item.target for item in similar_set) <NEW_LINE> result.append(ser.mode().values[0]) <NEW_LINE> <DEDENT> return np.array(result)
K近邻算法模型
6259903466673b3332c31514
class Config(File): <NEW_LINE> <INDENT> pass
.git/config
62599034711fe17d825e152c
class NumpyDataLoader: <NEW_LINE> <INDENT> def __init__(self, *inputs): <NEW_LINE> <INDENT> self.inputs = inputs <NEW_LINE> self.n_inputs = len(inputs) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.inputs[0].shape[0] <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if self.n_inputs == 1: <NEW_LINE> <INDENT> return self.inputs[0][item] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [array[item] for array in self.inputs]
Convert numpy arrays into a dataloader. :param numpy.array *inputs: Numpy arrays.
62599034e76e3b2f99fd9b2d
class Base(object): <NEW_LINE> <INDENT> def __init__(self, state, stateCallback = None, name = "", ): <NEW_LINE> <INDENT> self._state = state <NEW_LINE> self._reason = "" <NEW_LINE> if stateCallback: <NEW_LINE> <INDENT> self._stateCallbackList = [stateCallback] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._stateCallbackList = [] <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def fullState(self): <NEW_LINE> <INDENT> return (self._state, self._reason) <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def isDone(self): <NEW_LINE> <INDENT> return self._state in self._DoneStates <NEW_LINE> <DEDENT> @property <NEW_LINE> def isReady(self): <NEW_LINE> <INDENT> return self._state in self._ReadyStates <NEW_LINE> <DEDENT> @property <NEW_LINE> def didFail(self): <NEW_LINE> <INDENT> return self._state in self._FailedStates <NEW_LINE> <DEDENT> def addStateCallback(self, callFunc): <NEW_LINE> <INDENT> self._stateCallbackList.append(callFunc) <NEW_LINE> <DEDENT> def removeStateCallback(self, callFunc, doRaise=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._stateCallbackList.remove(callFunc) <NEW_LINE> return True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> if doRaise: <NEW_LINE> <INDENT> raise ValueError("Callback %r not found" % callFunc) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def setStateCallback(self, callFunc=None): <NEW_LINE> <INDENT> if callFunc: <NEW_LINE> <INDENT> self._stateCallbackList = [callFunc] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._stateCallbackList = [] <NEW_LINE> <DEDENT> <DEDENT> def setName(self, newName): <NEW_LINE> <INDENT> self.name = newName <NEW_LINE> <DEDENT> def _clearCallbacks(self): <NEW_LINE> <INDENT> self._stateCallbackList = [] <NEW_LINE> <DEDENT> def _setState(self, newState, reason=None): <NEW_LINE> <INDENT> if self.isDone: <NEW_LINE> <INDENT> raise RuntimeError("Already done; cannot change state") <NEW_LINE> <DEDENT> self._state = newState <NEW_LINE> if reason is not None: <NEW_LINE> <INDENT> self._reason = str(reason) <NEW_LINE> <DEDENT> for stateCallback in self._stateCallbackList: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> stateCallback(self) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> sys.stderr.write("%s state stateCallback %s failed: %s\n" % (self, stateCallback, e,)) <NEW_LINE> traceback.print_exc(file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> if self.isDone: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._clearCallbacks() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> sys.stderr.write("%s failed to clear callbacks: %s\n" % (self, e,)) <NEW_LINE> traceback.print_exc(file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _getArgStr(self): <NEW_LINE> <INDENT> return "name=%r" % (self.name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%s)" % (self.__class__.__name__, self._getArgStr())
Base class for BaseSocket and BaseServer Subclasses may wish to override class variables: - _AllStates: a set of states (strings) - _DoneStates: a set of states indicating the object is done (e.g. Closed or Failed) - _ReadyStates: a set of states indicating the object is ready for use (e.g. Connected)
6259903576d4e153a661db02
class HtmlPageView(DetailView): <NEW_LINE> <INDENT> model = HtmlPage <NEW_LINE> template_name = 'html_page.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(HtmlPageView, self).get_context_data(**kwargs) <NEW_LINE> context['title_tag'] = self.object.title_tag <NEW_LINE> context['description_tag'] = self.object.description_tag <NEW_LINE> return context
View for render a HtmlPage object.
625990355e10d32532ce4194
class TestSourceTypes(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testSourceTypes(self): <NEW_LINE> <INDENT> model = smartrecruiters_python_client.models.source_types.SourceTypes()
SourceTypes unit test stubs
62599035c432627299fa4119
class ExpQWeightedPolicy(snt.Module): <NEW_LINE> <INDENT> def __init__(self, actor_network: snt.Module, critic_network: snt.Module, beta: float = 1.0, num_action_samples: int = 16): <NEW_LINE> <INDENT> super().__init__(name='ExpQWeightedPolicy') <NEW_LINE> self._actor_network = actor_network <NEW_LINE> self._critic_network = critic_network <NEW_LINE> self._num_action_samples = num_action_samples <NEW_LINE> self._beta = beta <NEW_LINE> <DEDENT> def __call__(self, inputs: types.NestedTensor) -> tf.Tensor: <NEW_LINE> <INDENT> tiled_inputs = tf2_utils.tile_nested(inputs, self._num_action_samples) <NEW_LINE> shape = tf.shape(tree.flatten(tiled_inputs)[0]) <NEW_LINE> n, b = shape[0], shape[1] <NEW_LINE> tf.debugging.assert_equal(n, self._num_action_samples, 'Internal Error. Unexpected tiled_inputs shape.') <NEW_LINE> dummy_zeros_n_b = tf.zeros((n, b)) <NEW_LINE> merge = lambda x: snt.merge_leading_dims(x, 2) <NEW_LINE> tiled_inputs = tree.map_structure(merge, tiled_inputs) <NEW_LINE> tiled_actions = self._actor_network(tiled_inputs) <NEW_LINE> q = self._critic_network(tiled_inputs, tiled_actions) <NEW_LINE> boltzmann_probs = tf.nn.softmax(q / self._beta) <NEW_LINE> boltzmann_probs = snt.split_leading_dim(boltzmann_probs, dummy_zeros_n_b, 2) <NEW_LINE> boltzmann_probs = tf.transpose(boltzmann_probs, perm=(1, 0)) <NEW_LINE> action_idx = tfp.distributions.Categorical(probs=boltzmann_probs).sample() <NEW_LINE> action_idx = tf.stack((tf.range(b), action_idx), axis=1) <NEW_LINE> tiled_actions = snt.split_leading_dim(tiled_actions, dummy_zeros_n_b, 2) <NEW_LINE> action_dim = len(tiled_actions.get_shape().as_list()) <NEW_LINE> tiled_actions = tf.transpose(tiled_actions, perm=[1, 0] + list(range(2, action_dim))) <NEW_LINE> action_sample = tf.gather_nd(tiled_actions, action_idx) <NEW_LINE> return action_sample
Exponentially Q-weighted policy. Given a stochastic policy and a critic, returns a (stochastic) policy which samples multiple actions from the underlying policy, computes the Q-values for each action, and chooses the final action among the sampled ones with probability proportional to the exponentiated Q values, tempered by a parameter beta.
625990353eb6a72ae038b789
class StringToTime(ElementFilter): <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> super(StringToTime, self).__init__(obj) <NEW_LINE> <DEDENT> def process(self, obj, key, valDict, fmt="%Y%m%d%H%M%SZ"): <NEW_LINE> <INDENT> if type(valDict[key]['value'] is not None): <NEW_LINE> <INDENT> valDict[key]['value'] = map(lambda x: datetime.datetime.strptime(x, fmt), valDict[key]['value']) <NEW_LINE> <DEDENT> return key, valDict
Converts a string object into a datetime.datetime object.. =========== =========================== Key Description =========== =========================== fmt The format string. E.g. '%Y%m%d%H%M%SZ' =========== =========================== e.g.: >>> <FilterEntry> >>> <Filter> >>> <Name>StringToTime</Name> >>> <Param>%Y%m%d%H%M%SZ</Param> >>> </Filter> >>> </FilterEntry> >>> ...
62599035d164cc6175822095
class ColorPickerWidget(JinjaWidget): <NEW_LINE> <INDENT> def __init__(self, show_field=True): <NEW_LINE> <INDENT> super().__init__('forms/color_picker_widget.html', single_line=True, show_field=show_field) <NEW_LINE> <DEDENT> def __call__(self, field, **kwargs): <NEW_LINE> <INDENT> return super().__call__(field, input_args=kwargs)
Render a colorpicker input field.
62599035d99f1b3c44d067c5
class FlagTestCase(TestCaseBase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(FlagTestCase, self).setUp() <NEW_LINE> self.user = user(save=True) <NEW_LINE> self.question = question(creator=self.user, save=True) <NEW_LINE> self.client.login(username=self.user.username, password='testpass') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super(FlagTestCase, self).tearDown() <NEW_LINE> self.client.logout() <NEW_LINE> <DEDENT> def test_flag(self): <NEW_LINE> <INDENT> d = {'content_type': ContentType.objects.get_for_model(Question).id, 'object_id': self.question.id, 'reason': 'spam', 'next': self.question.get_absolute_url()} <NEW_LINE> post(self.client, 'flagit.flag', d) <NEW_LINE> eq_(1, FlaggedObject.objects.count()) <NEW_LINE> flag = FlaggedObject.objects.all()[0] <NEW_LINE> eq_(self.user.username, flag.creator.username) <NEW_LINE> eq_('spam', flag.reason) <NEW_LINE> eq_(self.question, flag.content_object)
Test the flag view.
625990351d351010ab8f4c3d
@base.Hidden <NEW_LINE> @base.ReleaseTracks(base.ReleaseTrack.ALPHA) <NEW_LINE> class Update(base.UpdateCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> flags.FolderIdArg('you want to update.').AddToParser(parser) <NEW_LINE> parser.add_argument( '--display-name', required=True, help='New display name for the folder (unique under the same parent).') <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> folder = folders.GetFolder(args.id) <NEW_LINE> folder.displayName = args.display_name <NEW_LINE> update = folders.FoldersMessages().CloudresourcemanagerFoldersUpdateRequest( folder=folder, foldersId=args.id) <NEW_LINE> log.UpdatedResource(folders.FoldersService().Update(update))
Update the display name of a folder. Updates the given folder with new folder name. This command can fail for the following reasons: * There is no folder with the given ID. * The active account does not have permission to update the given folder. * The new display name is taken by another folder under this folder's parent. ## EXAMPLES The following command updates a folder with the ID `123456789` to have the name "Foo Bar and Grill": $ {command} 123456789 --display_name="Foo Bar and Grill"
6259903530c21e258be99930
class OrgTeacherView(View): <NEW_LINE> <INDENT> def get(self, request, org_id): <NEW_LINE> <INDENT> course_org = CourseOrg.objects.get(id=int(org_id)) <NEW_LINE> has_fav = False <NEW_LINE> if request.user.is_authenticated(): <NEW_LINE> <INDENT> if UserFavorite.objects.filter(user=request.user, fav_id=course_org.id, fav_type=2): <NEW_LINE> <INDENT> has_fav = True <NEW_LINE> <DEDENT> <DEDENT> all_teachers = course_org.teacher_set.all() <NEW_LINE> current_page = "teacher" <NEW_LINE> return render(request, "org-detail-teachers.html", { "all_teachers": all_teachers, "course_org": course_org, "current_page": current_page, "has_fav": has_fav, })
机构讲师页
6259903591af0d3eaad3af52
class Timestamp(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=30, primary_key=True) <NEW_LINE> ts = models.DateTimeField(null=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return '%s: %s' % (self.name, self.ts)
Model to hold metadata about the collection of external data. This model is a hash table mapping a timestamp name to the timestamp value. All timestamps values are in UTC. The utility function rpki.gui.app.timestmap.update(name) should be used to set timestamps rather than updating this model directly.
625990358a43f66fc4bf32ae
class MediaStatus(EnumTable): <NEW_LINE> <INDENT> def delete(self): <NEW_LINE> <INDENT> self.item_set.all().update(mediaStatus=self.delete_replacement()) <NEW_LINE> return super(MediaStatus, self).delete()
If an item requires some media as well, what is the status of the provision of that media
62599035be383301e0254938
class Natural_Breaks(Map_Classifier): <NEW_LINE> <INDENT> def __init__(self, y, k=K, initial=100): <NEW_LINE> <INDENT> self.k = k <NEW_LINE> self.initial = initial <NEW_LINE> Map_Classifier.__init__(self, y) <NEW_LINE> self.name = 'Natural_Breaks' <NEW_LINE> <DEDENT> def _set_bins(self): <NEW_LINE> <INDENT> x = self.y.copy() <NEW_LINE> k = self.k <NEW_LINE> values = np.array(x) <NEW_LINE> uv = np.unique(values) <NEW_LINE> uvk = len(uv) <NEW_LINE> if uvk < k: <NEW_LINE> <INDENT> Warn('Warning: Not enough unique values in array to form k classes', UserWarning) <NEW_LINE> Warn("Warning: setting k to %d" % uvk, UserWarning) <NEW_LINE> k = uvk <NEW_LINE> uv.sort() <NEW_LINE> self.bins = uv <NEW_LINE> self.k = k <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res0 = natural_breaks(x, k) <NEW_LINE> fit = res0[2] <NEW_LINE> for i in xrange(self.initial): <NEW_LINE> <INDENT> res = natural_breaks(x, k) <NEW_LINE> fit_i = res[2] <NEW_LINE> if fit_i < fit: <NEW_LINE> <INDENT> res0 = res <NEW_LINE> <DEDENT> <DEDENT> self.bins = np.array(res0[-1]) <NEW_LINE> self.k = len(self.bins) <NEW_LINE> <DEDENT> <DEDENT> def update(self, y=None, inplace=False, **kwargs): <NEW_LINE> <INDENT> kwargs.update({'k':kwargs.pop('k', self.k)}) <NEW_LINE> kwargs.update({'initial':kwargs.pop('initial', self.initial)}) <NEW_LINE> if inplace: <NEW_LINE> <INDENT> self._update(y, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new = copy.deepcopy(self) <NEW_LINE> new._update(y, **kwargs) <NEW_LINE> return new
Natural Breaks Map Classification Parameters ---------- y : array (n,1), values to classify k : int number of classes required initial : int number of initial solutions to generate, (default=100) Attributes ---------- yb : array (n,1), bin ids for observations, bins : array (k,1), the upper bounds of each class k : int the number of classes counts : array (k,1), the number of observations falling in each class Examples -------- >>> import numpy >>> import pysal >>> numpy.random.seed(123456) >>> cal = pysal.esda.mapclassify.load_example() >>> nb = pysal.Natural_Breaks(cal, k=5) >>> nb.k 5 >>> nb.counts array([41, 9, 6, 1, 1]) >>> nb.bins array([ 29.82, 110.74, 370.5 , 722.85, 4111.45]) >>> x = numpy.array([1] * 50) >>> x[-1] = 20 >>> nb = pysal.Natural_Breaks(x, k = 5, initial = 0) Warning: Not enough unique values in array to form k classes Warning: setting k to 2 >>> nb.bins array([ 1, 20]) >>> nb.counts array([49, 1]) Notes ----- There is a tradeoff here between speed and consistency of the classification If you want more speed, set initial to a smaller value (0 would result in the best speed, if you want more consistent classes in multiple runs of Natural_Breaks on the same data, set initial to a higher value.
625990356e29344779b01774
class Streamer(): <NEW_LINE> <INDENT> def __init__(self, environment='live', heartbeat=1.0): <NEW_LINE> <INDENT> if environment == 'live': <NEW_LINE> <INDENT> self.api_url = 'https://coincheck.com/api/ticker' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.heartbeat = heartbeat <NEW_LINE> self.client = requests.Session() <NEW_LINE> <DEDENT> def start(self, **params): <NEW_LINE> <INDENT> self.connected = True <NEW_LINE> request_args = {} <NEW_LINE> content_ = {'last':None,'bid':None,'volume':None,'ask':None,'low':None,'high':None} <NEW_LINE> while self.connected: <NEW_LINE> <INDENT> response = self.client.get(self.api_url, **request_args) <NEW_LINE> content = response.content.decode('ascii') <NEW_LINE> content = json.loads(content) <NEW_LINE> if response.status_code != 200: <NEW_LINE> <INDENT> self.on_error(content) <NEW_LINE> <DEDENT> self.on_success(content) <NEW_LINE> time.sleep(self.heartbeat) <NEW_LINE> <DEDENT> <DEDENT> def on_success(self, content): <NEW_LINE> <INDENT> print(content) <NEW_LINE> return True <NEW_LINE> <DEDENT> def on_error(self, content): <NEW_LINE> <INDENT> self.connected = False <NEW_LINE> return
Provides functionality for HTTPS Streaming
625990350a366e3fb87ddb0a
class Action: <NEW_LINE> <INDENT> def __init__(self, action_name , parameter_list = [], preconditions_dict = {},pos_effects_dict = {},neg_effects_dict = {}): <NEW_LINE> <INDENT> self.action_name = str.lower(action_name) <NEW_LINE> self.parameter_list = parameter_list <NEW_LINE> self.preconditions_set = set(convert_dict_to_list(preconditions_dict)) <NEW_LINE> self.pos_effects_set = set(convert_dict_to_list(pos_effects_dict)) <NEW_LINE> self.neg_effects_set = set(convert_dict_to_list(neg_effects_dict)) <NEW_LINE> <DEDENT> def get_parameter_list(self): <NEW_LINE> <INDENT> return self.parameter_list <NEW_LINE> <DEDENT> def produce_resultant_state_dict(self,action_name,starting_state_set): <NEW_LINE> <INDENT> ret_state_set = starting_state_set <NEW_LINE> action_parts = action_name.split("_") <NEW_LINE> action_name = str.lower(action_parts[0]) <NEW_LINE> param_instantiation = action_parts[1:] <NEW_LINE> translation_dict = dict(zip(self.parameter_list,param_instantiation)) <NEW_LINE> for single_precondition in self.preconditions_set: <NEW_LINE> <INDENT> precondition_type = single_precondition.split("_")[0] <NEW_LINE> precondition_parameters = "_".join(single_precondition.split("_")[1:]) <NEW_LINE> grounded_precondition_params = translate_by_dict(precondition_parameters,translation_dict) <NEW_LINE> grounded_precondition = precondition_type + "_" + grounded_precondition_params <NEW_LINE> if not grounded_precondition in starting_state_set: <NEW_LINE> <INDENT> raise Exception("The action could not be executed from the previous state, preconditions did not match") <NEW_LINE> return starting_state_set <NEW_LINE> <DEDENT> <DEDENT> for single_neg_effect in self.neg_effects_set: <NEW_LINE> <INDENT> eff_type = single_neg_effect.split("_")[0] <NEW_LINE> eff_parameters = "_".join(single_neg_effect.split("_")[1:]) <NEW_LINE> grounded_eff_params = translate_by_dict(eff_parameters,translation_dict) <NEW_LINE> grounded_eff = eff_type + "_" + grounded_eff_params <NEW_LINE> ret_state_set.remove(grounded_eff) <NEW_LINE> <DEDENT> for single_pos_effect in self.pos_effects_set: <NEW_LINE> <INDENT> eff_type = single_pos_effect.split("_")[0] <NEW_LINE> eff_parameters = "_".join(single_pos_effect.split("_")[1:]) <NEW_LINE> grounded_eff_params = translate_by_dict(eff_parameters,translation_dict) <NEW_LINE> grounded_eff = eff_type + "_" + grounded_eff_params <NEW_LINE> ret_state_set.add(grounded_eff) <NEW_LINE> <DEDENT> return ret_state_set
Contains information about a LIFTED action such as preconditions and effects. Has functions that when given a state dict, and parameter instantiation, will give the resultant state dict
62599035287bf620b6272d0d
class PreResBottleneck(nn.Layer): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, strides, conv1_stride, data_format="channels_last", **kwargs): <NEW_LINE> <INDENT> super(PreResBottleneck, self).__init__(**kwargs) <NEW_LINE> mid_channels = out_channels // 4 <NEW_LINE> self.conv1 = pre_conv1x1_block( in_channels=in_channels, out_channels=mid_channels, strides=(strides if conv1_stride else 1), return_preact=True, data_format=data_format, name="conv1") <NEW_LINE> self.conv2 = pre_conv3x3_block( in_channels=mid_channels, out_channels=mid_channels, strides=(1 if conv1_stride else strides), data_format=data_format, name="conv2") <NEW_LINE> self.conv3 = pre_conv1x1_block( in_channels=mid_channels, out_channels=out_channels, data_format=data_format, name="conv3") <NEW_LINE> <DEDENT> def call(self, x, training=None): <NEW_LINE> <INDENT> x, x_pre_activ = self.conv1(x, training=training) <NEW_LINE> x = self.conv2(x, training=training) <NEW_LINE> x = self.conv3(x, training=training) <NEW_LINE> return x, x_pre_activ
PreResNet bottleneck block for residual path in PreResNet unit. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. strides : int or tuple/list of 2 int Strides of the convolution. conv1_stride : bool Whether to use stride in the first or the second convolution layer of the block. data_format : str, default 'channels_last' The ordering of the dimensions in tensors.
625990358c3a8732951f767d
class JWTSerializer(serializers.Serializer): <NEW_LINE> <INDENT> token = serializers.CharField() <NEW_LINE> user = UserSerializer()
Serializer for JWT authentication.
6259903550485f2cf55dc0a2
class DataSet(object): <NEW_LINE> <INDENT> def __init__(self, people=[], groups=[], clubs=[]): <NEW_LINE> <INDENT> self.people = people <NEW_LINE> self.groups = groups <NEW_LINE> self.clubs = clubs <NEW_LINE> <DEDENT> def findPersonById(self, ident): <NEW_LINE> <INDENT> for p in self.people: <NEW_LINE> <INDENT> if p.ident == ident: <NEW_LINE> <INDENT> return p <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> __author__ = 'student'
Holds a list of clubs, a list of persons and a list of groups
625990356fece00bbacccace
class Breed(models.Model): <NEW_LINE> <INDENT> breed = models.CharField(max_length=75) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"{self.breed}"
Defines an animal's breed (e.g. domestic short hair). Returns __str__ breed
6259903515baa723494630bf
class FakeSession: <NEW_LINE> <INDENT> statuses = None <NEW_LINE> content = [b"first", b"second", None] <NEW_LINE> @asynccontextmanager <NEW_LINE> async def request(self, method, url, **kwargs): <NEW_LINE> <INDENT> async def _fake_text(): <NEW_LINE> <INDENT> return method <NEW_LINE> <DEDENT> async def _fake_json(): <NEW_LINE> <INDENT> return {"method": method} <NEW_LINE> <DEDENT> async def _fake_content_read(*args, **kwargs): <NEW_LINE> <INDENT> return self.content.pop(0) <NEW_LINE> <DEDENT> if not self.statuses: <NEW_LINE> <INDENT> self.statuses = url.split(",") <NEW_LINE> <DEDENT> resp = mock.MagicMock() <NEW_LINE> resp.status = int(self.statuses.pop(0)) <NEW_LINE> resp.text = _fake_text <NEW_LINE> resp.json = _fake_json <NEW_LINE> content = mock.MagicMock() <NEW_LINE> content.read = _fake_content_read <NEW_LINE> resp.content = content <NEW_LINE> history = mock.MagicMock() <NEW_LINE> history.status = resp.status <NEW_LINE> history.text = _fake_text <NEW_LINE> resp.history = [history, history] <NEW_LINE> yield resp <NEW_LINE> <DEDENT> @asynccontextmanager <NEW_LINE> async def get(self, url, **kwargs): <NEW_LINE> <INDENT> async with self.request("get", url, **kwargs) as resp: <NEW_LINE> <INDENT> yield resp
Aiohttp session mock.
6259903521bff66bcd723d8b
class StaticTzInfo(BaseTzInfo): <NEW_LINE> <INDENT> def fromutc(self, dt): <NEW_LINE> <INDENT> return (dt + self._utcoffset).replace(tzinfo=self) <NEW_LINE> <DEDENT> def utcoffset(self, dt, is_dst=None): <NEW_LINE> <INDENT> return self._utcoffset <NEW_LINE> <DEDENT> def dst(self, dt, is_dst=None): <NEW_LINE> <INDENT> return _notime <NEW_LINE> <DEDENT> def tzname(self, dt, is_dst=None): <NEW_LINE> <INDENT> return self._tzname <NEW_LINE> <DEDENT> def localize(self, dt, is_dst=False): <NEW_LINE> <INDENT> if dt.tzinfo is not None: <NEW_LINE> <INDENT> raise ValueError('Not naive datetime (tzinfo is already set)') <NEW_LINE> <DEDENT> return dt.replace(tzinfo=self) <NEW_LINE> <DEDENT> def normalize(self, dt, is_dst=False): <NEW_LINE> <INDENT> if dt.tzinfo is self: <NEW_LINE> <INDENT> return dt <NEW_LINE> <DEDENT> if dt.tzinfo is None: <NEW_LINE> <INDENT> raise ValueError('Naive time - no tzinfo set') <NEW_LINE> <DEDENT> return dt.astimezone(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<StaticTzInfo %r>' % (self.zone,) <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return pytz._p, (self.zone,)
A timezone that has a constant offset from UTC These timezones are rare, as most locations have changed their offset at some point in their history
62599035d4950a0f3b1116d1
class ContextGusNotFound(GLException): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> GLException.error_message = "Not found a Context with the specified GUS identifier" <NEW_LINE> GLException.error_code = 12 <NEW_LINE> GLException.http_status = 404
The context_gus used do not exist in the database.
6259903566673b3332c31518
class VelocitySetter(PositionalControlNeedy): <NEW_LINE> <INDENT> def on_first_run(self, *args, **kwargs): <NEW_LINE> <INDENT> super().on_first_run(*args, **kwargs) <NEW_LINE> self.relative_to_current_setter = RelativeToCurrentSetter() <NEW_LINE> <DEDENT> def on_run(self, velocity, desire_setter, current, default_error, target=None, error=None, modulo_error=False, *args, **kwargs): <NEW_LINE> <INDENT> if error is None: <NEW_LINE> <INDENT> error = default_error <NEW_LINE> <DEDENT> velocity, current, target = call_if_function(velocity), call_if_function(current), call_if_function(target) <NEW_LINE> target_for_velocity = velocity * (self.this_run_time - self.last_run_time) <NEW_LINE> self.relative_to_current_setter.on_run(offset=target_for_velocity, desire_setter=desire_setter, current=current, error=error, modulo_error=modulo_error) <NEW_LINE> if target is not None or within_deadband(target, current, error, use_mod_error=modulo_error): <NEW_LINE> <INDENT> if target is not None: <NEW_LINE> <INDENT> desire_setter() <NEW_LINE> <DEDENT> self.finish() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> desire_setter()
Generic setter that simulates velocity controller using a positional controller
62599035cad5886f8bdc590e
class FrameStack4(gym.Wrapper): <NEW_LINE> <INDENT> STACK_SIZE = 4 <NEW_LINE> def __init__(self, env: gym.Env): <NEW_LINE> <INDENT> super(FrameStack4, self).__init__(env) <NEW_LINE> self._env = env <NEW_LINE> self._frames = collections.deque(maxlen=FrameStack4.STACK_SIZE) <NEW_LINE> space = self._env.observation_space <NEW_LINE> shape = space.shape[0:2] + (FrameStack4.STACK_SIZE,) <NEW_LINE> self.observation_space = gym.spaces.Box( low=0, high=255, shape=shape, dtype=np.uint8) <NEW_LINE> <DEDENT> def __getattr__(self, name: Text) -> Any: <NEW_LINE> <INDENT> return getattr(self._env, name) <NEW_LINE> <DEDENT> def _generate_observation(self): <NEW_LINE> <INDENT> return np.concatenate(self._frames, axis=2) <NEW_LINE> <DEDENT> def reset(self) -> np.ndarray: <NEW_LINE> <INDENT> observation = self._env.reset() <NEW_LINE> for _ in range(FrameStack4.STACK_SIZE): <NEW_LINE> <INDENT> self._frames.append(observation) <NEW_LINE> <DEDENT> return self._generate_observation() <NEW_LINE> <DEDENT> def step(self, action: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> observation, reward, done, info = self._env.step(action) <NEW_LINE> self._frames.append(observation) <NEW_LINE> return self._generate_observation(), reward, done, info
Stack previous four frames (must be applied to Gym env, not our envs).
625990351f5feb6acb163d17
class List(BaseContainerColumn): <NEW_LINE> <INDENT> def __init__(self, value_type, default=list, **kwargs): <NEW_LINE> <INDENT> self.db_type = 'list<{}>'.format(value_type.db_type) <NEW_LINE> return super(List, self).__init__(value_type=value_type, default=default, **kwargs) <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> val = super(List, self).validate(value) <NEW_LINE> if val is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not isinstance(val, (set, list, tuple)): <NEW_LINE> <INDENT> raise ValidationError('{} {} is not a list object'.format(self.column_name, val)) <NEW_LINE> <DEDENT> if None in val: <NEW_LINE> <INDENT> raise ValidationError("{} None is not allowed in a list".format(self.column_name)) <NEW_LINE> <DEDENT> return [self.value_col.validate(v) for v in val] <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return [self.value_col.to_python(v) for v in value] <NEW_LINE> <DEDENT> def to_database(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return [self.value_col.to_database(v) for v in value]
Stores a list of ordered values http://www.datastax.com/documentation/cql/3.1/cql/cql_using/use_list_t.html
625990356e29344779b01776
class TestUserViews(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> create_app().testing = True <NEW_LINE> self.app = create_app(config_class=config["test"]).test_client() <NEW_LINE> self.data = { "username": "pato", "name": "PitPat", "email": "[email protected]", "role": "Admin", "phone": "0733304050", "password": "qwerty", "retype_password": "qwerty" } <NEW_LINE> self.data1 = { "username": "", "name": "Pit Pat", "email": "[email protected]", "role": "Admin", "phone": "0712304050", "password": "qwerty", "retype_password": "qwerty" } <NEW_LINE> self.data2 = { "username": "pato", "name": "", "email": "[email protected]", "role": "Admin", "phone": "0712304050", "password": "qwerty", "retype_password": "qwerty" } <NEW_LINE> self.data3 = { "username": "pato", "name": "Pit Pat", "email": "pamail.com", "role": "Admin", "phone": "0712304050", "password": "qwerty", "retype_password": "qwerty" } <NEW_LINE> self.data4 = { "username": "pato", "name": "Pit Pat", "email": "[email protected]", "role": "ardmin", "phone": "0712304050", "password": "qwerty", "retype_password": "qwerty" } <NEW_LINE> self.login = { "email": "[email protected]", "password": "qwerty" } <NEW_LINE> <DEDENT> def test_create_user_with_incorrect_name(self): <NEW_LINE> <INDENT> response3 = self.app.post('/api/v2/register', data=json.dumps(self.data3), content_type='application/json;charset=utf-8') <NEW_LINE> result = json.loads(response3.data) <NEW_LINE> self.assertIn('Check your name.', str(result)) <NEW_LINE> self.assertEqual(response3.status_code, 400, msg="BAD REQUEST") <NEW_LINE> <DEDENT> def test_create_user_with_incorrect_role(self): <NEW_LINE> <INDENT> response4 = self.app.post('/api/v2/register', data=json.dumps(self.data4),content_type='application/json;charset=utf-8') <NEW_LINE> result = json.loads(response4.data) <NEW_LINE> self.assertIn('Check your name.', str(result)) <NEW_LINE> self.assertEqual(response4.status_code, 400, msg="BAD REQUEST")
Test the views for all http methods availed on test views.
62599035b830903b9686ed0c
class ObjectNode(SyntaxNodeBase): <NEW_LINE> <INDENT> def __init__(self, name, item, **kwargs): <NEW_LINE> <INDENT> super(ObjectNode, self).__init__(name, **kwargs) <NEW_LINE> self.__description = item['description'] <NEW_LINE> self.__parameters = item['parameters'] <NEW_LINE> self.__groups = dict() <NEW_LINE> self._locateGroupNames(item) <NEW_LINE> if 'tasks' in item: <NEW_LINE> <INDENT> for values in item['tasks'].itervalues(): <NEW_LINE> <INDENT> self._locateGroupNames(values) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self.__description <NEW_LINE> <DEDENT> @property <NEW_LINE> def parameters(self): <NEW_LINE> <INDENT> return self.__parameters <NEW_LINE> <DEDENT> def markdown(self, install, absolute=True): <NEW_LINE> <INDENT> folder = self.__groups.keys()[0] <NEW_LINE> path = os.path.join(install, self.full_name.strip('/')).split('/') <NEW_LINE> path.insert(-1, folder) <NEW_LINE> if absolute: <NEW_LINE> <INDENT> return os.path.join(self.root_directory, '/'.join(path) + '.md') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return os.path.join(*path) + '.md' <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def groups(self): <NEW_LINE> <INDENT> return self.__groups <NEW_LINE> <DEDENT> def _locateGroupNames(self, item): <NEW_LINE> <INDENT> if 'file_info' in item: <NEW_LINE> <INDENT> for info in item['file_info'].iterkeys(): <NEW_LINE> <INDENT> match = re.search(r'/(?P<group>\w+)(?:App|Syntax)\.C', info) <NEW_LINE> if match: <NEW_LINE> <INDENT> heading = re.sub(r'(?<=[a-z])([A-Z])', r' \1', match.group('group')) <NEW_LINE> folder = heading.replace(' ', '_').lower() <NEW_LINE> self.__groups[folder] = heading <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__groups['framework'] = 'Framework'
Base class for nodes associated with C++ objects (Action, MooseObjectAction, or MooseObject).
625990350a366e3fb87ddb0c
class evd_manager_2D(evd_manager_base): <NEW_LINE> <INDENT> def __init__(self, _file=None): <NEW_LINE> <INDENT> super(evd_manager_2D, self).__init__(_file) <NEW_LINE> self._drawableItems = datatypes.drawableItems() <NEW_LINE> <DEDENT> def redrawProduct(self, product, producer, view_manager): <NEW_LINE> <INDENT> if producer is None: <NEW_LINE> <INDENT> if product in self._drawnClasses: <NEW_LINE> <INDENT> self._drawnClasses[product].clearDrawnObjects(view_manager) <NEW_LINE> self._drawnClasses.pop(product) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if product in self._drawnClasses: <NEW_LINE> <INDENT> self._drawnClasses[product].setProducer(producer) <NEW_LINE> self._drawnClasses[product].clearDrawnObjects(view_manager) <NEW_LINE> self._drawnClasses[product].drawObjects(view_manager, self._io_manager, self.meta()) <NEW_LINE> return <NEW_LINE> <DEDENT> if product in self._drawableItems.getListOfTitles(): <NEW_LINE> <INDENT> drawingClass=self._drawableItems.getDict()[product][0]() <NEW_LINE> drawingClass.setProducer(producer) <NEW_LINE> self._drawnClasses.update({product: drawingClass}) <NEW_LINE> drawingClass.drawObjects(view_manager, self._io_manager, self.meta()) <NEW_LINE> <DEDENT> <DEDENT> def clearAll(self, view_manager): <NEW_LINE> <INDENT> for recoProduct in self._drawnClasses: <NEW_LINE> <INDENT> self._drawnClasses[recoProduct].clearDrawnObjects(view_manager) <NEW_LINE> <DEDENT> <DEDENT> def drawFresh(self, view_manager): <NEW_LINE> <INDENT> self.clearAll(view_manager) <NEW_LINE> order = self._drawableItems.getListOfTitles() <NEW_LINE> for item in order: <NEW_LINE> <INDENT> if item in self._drawnClasses: <NEW_LINE> <INDENT> self._drawnClasses[item].drawObjects(view_manager, self._io_manager, self.meta()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def getPlane(self, plane): <NEW_LINE> <INDENT> if self._drawWires: <NEW_LINE> <INDENT> return self._wireDrawer.getPlane(plane)
Class to handle the 2D specific aspects of viewer
6259903550485f2cf55dc0a3
class EqualityMixin(object): <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if other: <NEW_LINE> <INDENT> return vars(self) == vars(other) <NEW_LINE> <DEDENT> return False
Mixin for simple object equality testing -- ensures equality matches if all attributes match
62599035d164cc6175822099
class Sizeof(CExpression): <NEW_LINE> <INDENT> def __init__(self, base_type): <NEW_LINE> <INDENT> self.base_type = base_type <NEW_LINE> <DEDENT> def add_includes(self, program): <NEW_LINE> <INDENT> self.base_type.add_includes(program) <NEW_LINE> <DEDENT> def to_c_string(self, root=False): <NEW_LINE> <INDENT> return f"sizeof({self.base_type.to_c_string()})"
The C sizeof operator
62599035c432627299fa411d
class JsonGetter(object): <NEW_LINE> <INDENT> json = {} <NEW_LINE> def get(self, name, default=None): <NEW_LINE> <INDENT> return self.json.get(name, default) <NEW_LINE> <DEDENT> def dump(self, out_json_file, indent=2, **kwargs): <NEW_LINE> <INDENT> if hasattr(out_json_file, 'read'): <NEW_LINE> <INDENT> json.dump(self.json, out_json_file, indent=indent, **kwargs) <NEW_LINE> <DEDENT> elif isinstance(out_json_file, basestring): <NEW_LINE> <INDENT> head, tail = os.path.splitext(out_json_file) <NEW_LINE> if not tail == '.json': <NEW_LINE> <INDENT> out_json_file = head + '.json' <NEW_LINE> <DEDENT> with open(out_json_file, 'w') as f: <NEW_LINE> <INDENT> if not 'cls' in kwargs: <NEW_LINE> <INDENT> kwargs['cls'] = RestapiEncoder <NEW_LINE> <DEDENT> json.dump(self.json, f, indent=indent, ensure_ascii=False, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return out_json_file <NEW_LINE> <DEDENT> def dumps(self, **kwargs): <NEW_LINE> <INDENT> if not 'cls' in kwargs: <NEW_LINE> <INDENT> kwargs['cls'] = RestapiEncoder <NEW_LINE> <DEDENT> kwargs['ensure_ascii'] = False <NEW_LINE> return json.dumps(self.json, **kwargs) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> if name in self.json: <NEW_LINE> <INDENT> return self.json[name] <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return object.__getattribute__(self, name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> if name in self.json: <NEW_LINE> <INDENT> return self.json[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError(name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self.json, sort_keys=True, indent=2, cls=RestapiEncoder, ensure_ascii=False) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return json.dumps(self.json, sort_keys=True, indent=2, cls=NameEncoder, ensure_ascii=False)
override getters to also check its json property
625990355e10d32532ce4196
class PostDetailView(DetailView): <NEW_LINE> <INDENT> model = Post <NEW_LINE> template_name = "blog/detail.html" <NEW_LINE> context_object_name = "post" <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> response = super(PostDetailView, self).get(request, *args, **kwargs) <NEW_LINE> self.object.increase_views() <NEW_LINE> return response <NEW_LINE> <DEDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> post = super(PostDetailView, self).get_object(queryset=None) <NEW_LINE> md = markdown.Markdown(extensions=[ 'markdown.extensions.extra', 'markdown.extensions.codehilite', TocExtension(slugify=slugify), ]) <NEW_LINE> post.body = md.convert(post.body) <NEW_LINE> post.toc = md.toc <NEW_LINE> return post <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(PostDetailView, self).get_context_data(**kwargs) <NEW_LINE> form = CommentForm <NEW_LINE> comment_list = self.object.comment_set.all() <NEW_LINE> context.update({ "form": form, "comment_list": comment_list }) <NEW_LINE> return context
单条详情数据需要继承自DetailView类
62599035d99f1b3c44d067c9
class GoogleSheets: <NEW_LINE> <INDENT> def __init__(self, auth_mode, client_secrets, delegated_email_address=None, local_server_port=None): <NEW_LINE> <INDENT> oauth2_scopes = [ "https://www.googleapis.com/auth/spreadsheets" ] <NEW_LINE> credentials = get_credentials( auth_mode=auth_mode, client_secrets=client_secrets, oauth2_scopes=oauth2_scopes, delegated_email_address=delegated_email_address, local_server_port=local_server_port ) <NEW_LINE> self.sheets = build( serviceName="sheets", version="v4", credentials=credentials ) <NEW_LINE> <DEDENT> def get_values(self, spreadsheet_id, range, major_dimension="ROWS", value_render_option="FORMATTED_VALUE"): <NEW_LINE> <INDENT> return self.sheets.spreadsheets().values().get( spreadsheetId=spreadsheet_id, range=range, majorDimension=major_dimension, valueRenderOption=value_render_option ).execute()
Class to create and manage a Google Sheets resources. Args: auth_mode (str): Mode of authentication & authorization. Valid values are only 'server_side' and 'service_account'. client_secrets (str): The path to the credentials json file or credentials information in json format (only for auth_mode=service_account). delegated_email_address (str): Must be set if using 'service_account' as auth_mode. For domain-wide delegation, the email address of the user to for which to request delegated access. local_server_port (int): Must be set if using 'server_side' as auth_mode. The port for the local redirect server.
62599035a8ecb03325872345
class Filters(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def keep_missing(events): <NEW_LINE> <INDENT> return events <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def ignore_missing(events): <NEW_LINE> <INDENT> good = list() <NEW_LINE> for i in events: <NEW_LINE> <INDENT> if is_valid(i): <NEW_LINE> <INDENT> good.append(i) <NEW_LINE> <DEDENT> <DEDENT> return good <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def zero_missing(events): <NEW_LINE> <INDENT> filled = list() <NEW_LINE> for i in events: <NEW_LINE> <INDENT> if not is_valid(i): <NEW_LINE> <INDENT> filled.append(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filled.append(i) <NEW_LINE> <DEDENT> <DEDENT> return filled <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def propagate_missing(events): <NEW_LINE> <INDENT> for i in events: <NEW_LINE> <INDENT> if not is_valid(i): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> return events <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def none_if_empty(events): <NEW_LINE> <INDENT> if len(events) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return events
Filter functions to pass to aggregation function factory methods. These all control how the underlying aggregators handle missing/invalid values. Can pass things through (the default to all agg functions), ignore any bad values, transform any bad values to zero, or make the entire aggregation fail if there are any bad values.
62599035d18da76e235b79e2
class ChannelsError(MoopyError): <NEW_LINE> <INDENT> pass
The base channels error.
62599035be8e80087fbc01a6
class FeedbackNode(mdp.Node): <NEW_LINE> <INDENT> def __init__(self, n_timesteps=1, input_dim=None, dtype=None): <NEW_LINE> <INDENT> super(FeedbackNode, self).__init__(input_dim=input_dim, output_dim=input_dim, dtype=dtype) <NEW_LINE> self.n_timesteps = n_timesteps <NEW_LINE> self.last_value = None <NEW_LINE> self.current_timestep = 0 <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.current_timestep = 0 <NEW_LINE> <DEDENT> def is_trainable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def _train(self, x, y): <NEW_LINE> <INDENT> self.last_value = mdp.numx.atleast_2d(y[-1, :]) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> while self.current_timestep < self.n_timesteps: <NEW_LINE> <INDENT> self.current_timestep += 1 <NEW_LINE> yield self.last_value <NEW_LINE> <DEDENT> <DEDENT> def _execute(self, x): <NEW_LINE> <INDENT> self.last_value = mdp.numx.atleast_2d(x[-1, :]) <NEW_LINE> return x
FeedbackNode creates the ability to feed back a certain part of a flow as input to the flow. It both implements the Node API and the generator API and can thus be used as input for a flow. The duration that the feedback node feeds back data can be given. Prior to using the node as data generator, it should be executed so it can store the previous state. When a FeedbackNode is reused several times, reset() should be called prior to each use which resets the internal counter. Note that this node keeps state and can thus NOT be used in parallel using threads.
6259903571ff763f4b5e88c1
class ScaleApply(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.dazstudio_apply_scale" <NEW_LINE> bl_label = "Apply Scale" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return (context.active_object is not None) <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> objBackup = bpy.context.active_object <NEW_LINE> bpy.ops.object.mode_set(mode='OBJECT') <NEW_LINE> for obj in bpy.context.selected_objects: <NEW_LINE> <INDENT> bpy.context.scene.objects.active = obj <NEW_LINE> bpy.ops.object.transform_apply(location=False, rotation=False, scale=True) <NEW_LINE> <DEDENT> bpy.context.scene.objects.active = objBackup <NEW_LINE> return {'FINISHED'}
Apply scale transforms of the selected objects
6259903596565a6dacd2d820
class MyData(Dataset): <NEW_LINE> <INDENT> def __init__(self, data, labels, return_perturb=False, sample_size=None, augmentation=None, training=False): <NEW_LINE> <INDENT> self.labels = labels <NEW_LINE> self.data = data <NEW_LINE> self.return_perturb = return_perturb <NEW_LINE> self.augmentation = augmentation <NEW_LINE> self.sample_size = sample_size <NEW_LINE> self.training = training <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.data) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> X = self.data[index] <NEW_LINE> h, w = X.shape <NEW_LINE> y = self.labels[index] <NEW_LINE> if self.return_perturb==False: <NEW_LINE> <INDENT> X = X.reshape(-1) <NEW_LINE> return X,y <NEW_LINE> <DEDENT> elif self.sample_size > 1: <NEW_LINE> <INDENT> X = X.cpu() <NEW_LINE> y = y.cpu() <NEW_LINE> X_repeated = np.tile(X, [self.sample_size, 1, 1]) <NEW_LINE> y_repeated = np.tile(y, [self.sample_size, 1]) <NEW_LINE> X_aug = self.augmentation(images=X_repeated) <NEW_LINE> if self.training: <NEW_LINE> <INDENT> X_repeated = X_repeated.reshape(self.sample_size,-1) <NEW_LINE> X_aug = X_aug.reshape(self.sample_size,-1) <NEW_LINE> <DEDENT> return X_repeated, X_aug, y_repeated <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> X_aug = self.augmentation(images=X) <NEW_LINE> X_aug = X_aug.reshape(-1) <NEW_LINE> X = X.reshape(-1) <NEW_LINE> return X, X_aug, y
Characterizes a dataset for PyTorch
62599035287bf620b6272d11
class BaseTransport: <NEW_LINE> <INDENT> def __init__(self, extra=None): <NEW_LINE> <INDENT> if extra is None: <NEW_LINE> <INDENT> extra = {} <NEW_LINE> <DEDENT> self._extra = extra <NEW_LINE> <DEDENT> def get_extra_info(self, name, default=None): <NEW_LINE> <INDENT> return self._extra.get(name, default) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for transports.
6259903523e79379d538d633
class KaKe(object): <NEW_LINE> <INDENT> def __init__(self, a, b, c): <NEW_LINE> <INDENT> self.a, self.b, self.c = a, b, c <NEW_LINE> <DEDENT> def __call__(self, forbidden, Ha): <NEW_LINE> <INDENT> return (self.a / np.log10(forbidden / Ha) + self.b) + self.c <NEW_LINE> <DEDENT> def classify(self, forbidden, Ha, Oiii, Hb): <NEW_LINE> <INDENT> return np.log10(Oiii / Hb) > self.__call__(forbidden, Ha)
Kauffmann-Kewley-style separation line log(Oiii / Hb) = (a / log(forbidden / Ha) + b) + c
6259903530c21e258be99935
class Hand: <NEW_LINE> <INDENT> def __init__(self, name="Dealer"): <NEW_LINE> <INDENT> self.__name = name <NEW_LINE> self.__hand = [] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if len(self.__hand) == 0: <NEW_LINE> <INDENT> show = "empty" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> show = "" <NEW_LINE> for card in self.__hand: <NEW_LINE> <INDENT> show += str(card) + " " <NEW_LINE> <DEDENT> <DEDENT> return show <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> @property <NEW_LINE> def total(self): <NEW_LINE> <INDENT> point = 0 <NEW_LINE> number_of_ace = 0 <NEW_LINE> for card in self.__hand: <NEW_LINE> <INDENT> if card.rank == 'A': <NEW_LINE> <INDENT> point += 11 <NEW_LINE> number_of_ace += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> point += card.value <NEW_LINE> <DEDENT> <DEDENT> while point > 21 and number_of_ace > 0: <NEW_LINE> <INDENT> point -= 10 <NEW_LINE> number_of_ace -= 1 <NEW_LINE> <DEDENT> return point <NEW_LINE> <DEDENT> def get(self, card): <NEW_LINE> <INDENT> self.__hand.append(card) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.__hand = [] <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> for card in self.__hand: <NEW_LINE> <INDENT> if not card.face_up: <NEW_LINE> <INDENT> card.flip()
defines Hand class
6259903566673b3332c3151c
class Swarm(object): <NEW_LINE> <INDENT> def __init__(self, opts): <NEW_LINE> <INDENT> self.opts = opts <NEW_LINE> self.confs = set() <NEW_LINE> <DEDENT> def mkconf(self): <NEW_LINE> <INDENT> fd_, path = tempfile.mkstemp() <NEW_LINE> path = '{0}{1}'.format( path, hashlib.md5(str(random.randint(0, 999999))).hexdigest()) <NEW_LINE> os.close(fd_) <NEW_LINE> dpath = '{0}.d'.format(path) <NEW_LINE> os.makedirs(dpath) <NEW_LINE> data = {'id': os.path.basename(path), 'pki_dir': os.path.join(dpath, 'pki'), 'cache_dir': os.path.join(dpath, 'cache'), 'master': self.opts['master'], } <NEW_LINE> with open(path, 'w+') as fp_: <NEW_LINE> <INDENT> yaml.dump(data, fp_) <NEW_LINE> <DEDENT> self.confs.add(path) <NEW_LINE> <DEDENT> def start_minions(self): <NEW_LINE> <INDENT> for path in self.confs: <NEW_LINE> <INDENT> cmd = 'salt-minion -c {0} --pid-file {1}'.format( path, '{0}.pid'.format(path) ) <NEW_LINE> if self.opts['foreground']: <NEW_LINE> <INDENT> cmd += ' -l debug &' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd += ' -d &' <NEW_LINE> <DEDENT> subprocess.call(cmd, shell=True) <NEW_LINE> <DEDENT> <DEDENT> def prep_configs(self): <NEW_LINE> <INDENT> for ind in range(self.opts['minions']): <NEW_LINE> <INDENT> self.mkconf() <NEW_LINE> <DEDENT> <DEDENT> def clean_configs(self): <NEW_LINE> <INDENT> for path in self.confs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(path) <NEW_LINE> os.remove('{0}.pid'.format(path)) <NEW_LINE> shutil.rmtree('{0}.d'.format(path)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.prep_configs() <NEW_LINE> self.start_minions()
Create a swarm of minions
625990351f5feb6acb163d1b
class Halt(Exception): <NEW_LINE> <INDENT> pass
big red stop button
6259903550485f2cf55dc0a7
class LambdaEventSourceMapping(AwsObject): <NEW_LINE> <INDENT> def __init__(self, dict_src, from_cache=False): <NEW_LINE> <INDENT> self.vpc_config = None <NEW_LINE> self._region = None <NEW_LINE> self.function_identification = None <NEW_LINE> super().__init__(dict_src) <NEW_LINE> if from_cache: <NEW_LINE> <INDENT> self._init_object_from_cache(dict_src) <NEW_LINE> return <NEW_LINE> <DEDENT> init_options = { "FunctionArn": self.init_default_attr, "UUID": self.init_default_attr, "StartingPosition": self.init_default_attr, "BatchSize": self.init_default_attr, "MaximumBatchingWindowInSeconds": self.init_default_attr, "ParallelizationFactor": self.init_default_attr, "EventSourceArn": self.init_default_attr, "LastModified": self.init_default_attr, "LastProcessingResult": self.init_default_attr, "State": self.init_default_attr, "StateTransitionReason": self.init_default_attr, "DestinationConfig": self.init_default_attr, "MaximumRecordAgeInSeconds": self.init_default_attr, "BisectBatchOnFunctionError": self.init_default_attr, "MaximumRetryAttempts": self.init_default_attr, "TumblingWindowInSeconds": self.init_default_attr, "FunctionResponseTypes": self.init_default_attr, } <NEW_LINE> self.init_attrs(dict_src, init_options) <NEW_LINE> <DEDENT> def update_from_raw_response(self, dict_src): <NEW_LINE> <INDENT> init_options = { "FunctionArn": self.init_default_attr, "UUID": self.init_default_attr, "StartingPosition": self.init_default_attr, "BatchSize": self.init_default_attr, "MaximumBatchingWindowInSeconds": self.init_default_attr, "ParallelizationFactor": self.init_default_attr, "EventSourceArn": self.init_default_attr, "LastModified": self.init_default_attr, "LastProcessingResult": self.init_default_attr, "State": self.init_default_attr, "StateTransitionReason": self.init_default_attr, "DestinationConfig": self.init_default_attr, "MaximumRecordAgeInSeconds": self.init_default_attr, "BisectBatchOnFunctionError": self.init_default_attr, "MaximumRetryAttempts": self.init_default_attr, "TumblingWindowInSeconds": self.init_default_attr, "FunctionResponseTypes": self.init_default_attr, } <NEW_LINE> self.init_attrs(dict_src, init_options) <NEW_LINE> <DEDENT> def _init_object_from_cache(self, dict_src): <NEW_LINE> <INDENT> options = {} <NEW_LINE> self._init_from_cache(dict_src, options) <NEW_LINE> <DEDENT> def generate_create_request(self): <NEW_LINE> <INDENT> request = dict() <NEW_LINE> request["EventSourceArn"] = self.event_source_arn <NEW_LINE> request["FunctionName"] = self.function_identification <NEW_LINE> request["Enabled"] = self.enabled <NEW_LINE> return request <NEW_LINE> <DEDENT> @property <NEW_LINE> def region(self): <NEW_LINE> <INDENT> if self._region is not None: <NEW_LINE> <INDENT> return self._region <NEW_LINE> <DEDENT> if self.arn is not None: <NEW_LINE> <INDENT> self._region = Region.get_region(self.arn.split(":")[3]) <NEW_LINE> <DEDENT> return self._region <NEW_LINE> <DEDENT> @region.setter <NEW_LINE> def region(self, value): <NEW_LINE> <INDENT> if not isinstance(value, Region): <NEW_LINE> <INDENT> raise ValueError(value) <NEW_LINE> <DEDENT> self._region = value
lambda_event_source_mapping representation class
62599035ac7a0e7691f73612
class InstanceDouble(ObjectDouble): <NEW_LINE> <INDENT> def __init__(self, path, **kwargs): <NEW_LINE> <INDENT> module_path, class_name = get_path_components(path) <NEW_LINE> module = get_module(module_path, path) <NEW_LINE> self._doubles_target = _get_doubles_target(module, class_name, path) <NEW_LINE> for k, v in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, k, v)
A pure double representing an instance of the target class. Any kwargs supplied will be set as attributes on the instance that is created. :: user = InstanceDouble('myapp.User', name='Bob Barker') :param str path: The absolute module path to the class.
6259903596565a6dacd2d821
class VolumeClient(BaseClient): <NEW_LINE> <INDENT> service_type = "volume" <NEW_LINE> def __init__(self, http_client, extensions=None): <NEW_LINE> <INDENT> super(VolumeClient, self).__init__(http_client, extensions=extensions) <NEW_LINE> self.volumes = volumes.VolumeManager(self) <NEW_LINE> self.volume_snapshots = volume_snapshots.SnapshotManager(self) <NEW_LINE> self.volume_types = volume_types.VolumeTypeManager(self)
Client for the OpenStack Volume v2.0 API.
625990356fece00bbacccad4
class LetIn(AST): <NEW_LINE> <INDENT> def __init__(self, var, exp, body): <NEW_LINE> <INDENT> self.var = var <NEW_LINE> self.exp = exp <NEW_LINE> self.body = body
local variable declaration let var = exp in body
6259903515baa723494630c4
class AccountInfo(object): <NEW_LINE> <INDENT> def __init__(self, account_id, name, email, username): <NEW_LINE> <INDENT> self.account_id = account_id <NEW_LINE> self.name = name <NEW_LINE> self.email = email <NEW_LINE> self.username = username <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dict_): <NEW_LINE> <INDENT> return cls(account_id=dict_.get('_account_id', None), name=dict_.get('name', None), email=dict_.get('email', None), username=dict_.get('username', None)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<AccountInfo()>'
http://192.168.10.48/Documentation/rest-api-accounts.html#account-info parsed output example: {u'_account_id': 1000261, u'email': u'[email protected]', u'name': u'Gao Qijie(\u9ad8\u542f\u6770)', u'username': u'qijie.gao'}
62599035ac7a0e7691f73614
class QHelpSearchEngine(__PyQt5_QtCore.QObject): <NEW_LINE> <INDENT> def cancelIndexing(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def cancelSearching(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def childEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def connectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def customEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def disconnectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def hitCount(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def hits(self, p_int, p_int_1): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def indexingFinished(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def indexingStarted(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def isSignalConnected(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def query(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def queryWidget(self): <NEW_LINE> <INDENT> return QHelpSearchQueryWidget <NEW_LINE> <DEDENT> def receivers(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def reindexDocumentation(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def resultWidget(self): <NEW_LINE> <INDENT> return QHelpSearchResultWidget <NEW_LINE> <DEDENT> def search(self, *__args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def searchingFinished(self, p_int): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def searchingStarted(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def searchInput(self): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def searchResultCount(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def searchResults(self, p_int, p_int_1): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def sender(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def senderSignalIndex(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def timerEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, QHelpEngineCore, parent=None): <NEW_LINE> <INDENT> pass
QHelpSearchEngine(QHelpEngineCore, parent: QObject = None)
625990355e10d32532ce4199
class Transport(ReadTransport, WriteTransport): <NEW_LINE> <INDENT> pass
ABC representing a bidirectional transport. There may be several implementations, but typically, the user does not implement new transports; rather, the platform provides some useful transports that are implemented using the platform's best practices. The user never instantiates a transport directly; they call a utility function, passing it a protocol factory and other information necessary to create the transport and protocol. (E.g. EventLoop.create_connection() or EventLoop.create_server().) The utility function will asynchronously create a transport and a protocol and hook them up by calling the protocol's connection_made() method, passing it the transport. The implementation here raises NotImplemented for every method except writelines(), which calls write() in a loop.
62599035d99f1b3c44d067cf
class Operator(Generic[T], ABC): <NEW_LINE> <INDENT> symbol: str <NEW_LINE> operations: List[T] <NEW_LINE> @abstractmethod <NEW_LINE> def __init__(self, symbol: str, operations: List[T]) -> None: <NEW_LINE> <INDENT> self.symbol = symbol <NEW_LINE> self.operations = operations <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_symbol(symbol: str) -> "Operator": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return OPERATORS_MAP[symbol] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> msg = f"Operator {symbol} is not supported yet" <NEW_LINE> raise NotImplementedError(msg) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.symbol <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_operation(symbol: str, operation: Operation) -> "Operator": <NEW_LINE> <INDENT> operator_type: Union[pType[BinaryOperator], pType[UnaryOperator]] <NEW_LINE> if isinstance(operation, UnaryOperation): <NEW_LINE> <INDENT> return UnaryOperator(symbol, [operation]) <NEW_LINE> <DEDENT> elif isinstance(operation, BinaryOperation): <NEW_LINE> <INDENT> return BinaryOperator(symbol, [operation]) <NEW_LINE> <DEDENT> msg = f"Can't build Operator from type {type(operation)}" <NEW_LINE> raise RuntimeError(msg)
Abstract operator, be it unary or binary.
6259903530c21e258be99939
class NoResource(ErrorPage): <NEW_LINE> <INDENT> def __init__(self, message="Sorry. No luck finding that resource."): <NEW_LINE> <INDENT> ErrorPage.__init__(self, http.NOT_FOUND, "No Such Resource", message)
L{NoResource} is a specialization of L{ErrorPage} which returns the HTTP response code I{NOT FOUND}.
625990358da39b475be0431c
class UpdateDashboardStatsHandler(request_handler.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> datastore_hooks.SetPrivilegedRequest() <NEW_LINE> _FetchDashboardStats()
A simple request handler to refresh the cached test suites info.
6259903566673b3332c31520
class Context(cl.Context): <NEW_LINE> <INDENT> def __init__(self, devices=None, properties=None, dev_type=None, cache_dir=None): <NEW_LINE> <INDENT> super(Context, self).__init__(devices, properties, dev_type, cache_dir) <NEW_LINE> self.device_list = [] <NEW_LINE> self.program = None <NEW_LINE> for d in devices: <NEW_LINE> <INDENT> self.device_list.append(d) <NEW_LINE> d.context = self <NEW_LINE> d.queues = [] <NEW_LINE> d.create_queue() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def default_queue(self): <NEW_LINE> <INDENT> return self.default_device.queues[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def default_device(self): <NEW_LINE> <INDENT> return self.device_list[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def devices(self): <NEW_LINE> <INDENT> return super(Context, self).devices <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_devices(self): <NEW_LINE> <INDENT> return super(Context, self).num_devices <NEW_LINE> <DEDENT> @property <NEW_LINE> def properties(self): <NEW_LINE> <INDENT> return super(Context, self).properties <NEW_LINE> <DEDENT> @property <NEW_LINE> def reference_count(self): <NEW_LINE> <INDENT> return super(Context, self).reference_count <NEW_LINE> <DEDENT> def create_build_program(self, src=None, devices=None, options=[], cache_dir=None, src_file_name=None): <NEW_LINE> <INDENT> from _program import Program <NEW_LINE> import clip.cl <NEW_LINE> if src is None: <NEW_LINE> <INDENT> src = open(src_file_name).read() <NEW_LINE> <DEDENT> self.program = Program(self, src) <NEW_LINE> self.program.build(options, devices, cache_dir) <NEW_LINE> clip.cl.program = self.program <NEW_LINE> return self.program <NEW_LINE> <DEDENT> def compile_link_program(self, header_infos, source_infos, build_options=[], compile_options=[], link_options=[], info_is_filename=True): <NEW_LINE> <INDENT> from _program import Program <NEW_LINE> import clip.cl <NEW_LINE> import os <NEW_LINE> os.environ['PYOPENCL_NO_CACHE'] = 'TRUE' <NEW_LINE> from pyopencl._cffi import ffi <NEW_LINE> headers = [] <NEW_LINE> for info in header_infos: <NEW_LINE> <INDENT> if info_is_filename: <NEW_LINE> <INDENT> routine = open(info[0]).read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> routine = info[0] <NEW_LINE> <DEDENT> pgm = Program(self, routine) <NEW_LINE> head_name = ffi.new("char[]", info[1]) <NEW_LINE> headers += ((head_name, pgm._prg),) <NEW_LINE> <DEDENT> sources = [] <NEW_LINE> for info in source_infos: <NEW_LINE> <INDENT> if info_is_filename: <NEW_LINE> <INDENT> routine = open(info).read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> routine = info <NEW_LINE> <DEDENT> pgm = Program(self, routine) <NEW_LINE> pgm.compile(compile_options, headers=headers) <NEW_LINE> sources += (pgm._prg,) <NEW_LINE> <DEDENT> import pyopencl <NEW_LINE> options_bytes = '' <NEW_LINE> self.program = pyopencl.Program( pyopencl._Program.link(self, sources, options_bytes, None)) <NEW_LINE> clip.cl.program = self.program <NEW_LINE> self.program.__class__ = Program <NEW_LINE> return self.program <NEW_LINE> <DEDENT> def create_buffer(self, access_mode, size=0, hostbuf=None, host_ptr_mode=mem_host_ptr_mode.DEFAULT): <NEW_LINE> <INDENT> from _buffer import Buffer <NEW_LINE> buf = Buffer(self, access_mode | host_ptr_mode, size, hostbuf) <NEW_LINE> return buf <NEW_LINE> <DEDENT> pass
OpenCL Context Derived from pyopencl.Context Context can be created on one or more devices Properties: devices, num_devices, properties, reference_count
6259903573bcbd0ca4bcb3b5
class TestCmd(Command): <NEW_LINE> <INDENT> def do_discovery(self, *args): <NEW_LINE> <INDENT> hub_obj.discovery() <NEW_LINE> return 'Discovery Started' <NEW_LINE> <DEDENT> def do_broadcast(self, *args): <NEW_LINE> <INDENT> message = hub_obj.get_action('routing_table_request') <NEW_LINE> hub_obj.send_message(message, hub_obj.BROADCAST_LONG, hub_obj.BROADCAST_SHORT) <NEW_LINE> return 'broadcast' <NEW_LINE> <DEDENT> def do_nodes(self, *args): <NEW_LINE> <INDENT> if args[0] == 'list': <NEW_LINE> <INDENT> output = "List of Nodes: \n" <NEW_LINE> nodes = hub_obj.list_devices() <NEW_LINE> for id, node in nodes.iteritems(): <NEW_LINE> <INDENT> output += str(id) + " " + str(node) + "\n" <NEW_LINE> <DEDENT> return output <NEW_LINE> <DEDENT> if args[0][0].isdigit(): <NEW_LINE> <INDENT> node_id = args[0] <NEW_LINE> if args[1] == "rename": <NEW_LINE> <INDENT> name = " ".join(map(lambda s: '"%s"' % s if ' ' in s else s, args[2:])) <NEW_LINE> if name == '': <NEW_LINE> <INDENT> raise Exception('Name too short!') <NEW_LINE> <DEDENT> hub_obj.save_node_name(node_id, name) <NEW_LINE> return 'Node: ' + str(node_id) + ' Renamed: ' + name <NEW_LINE> <DEDENT> if args[1] == "state": <NEW_LINE> <INDENT> value = args[2] <NEW_LINE> this_device_obj = hub_obj.device_obj_from_id(node_id) <NEW_LINE> hub_obj.send_switch_state_request(this_device_obj, value) <NEW_LINE> return 'Node: ' + str(node_id) + ' State Changed: ' + value <NEW_LINE> <DEDENT> if args[1] == "mode": <NEW_LINE> <INDENT> value = args[2] <NEW_LINE> hub_obj.call_device_command(node_id, 'mode', value) <NEW_LINE> return 'Node: ' + str(node_id) + ' Mode: ' + value <NEW_LINE> <DEDENT> if args[1] == "attributes": <NEW_LINE> <INDENT> attrib_name = args[2] <NEW_LINE> return hub_obj.get_node_attribute_history(node_id, attrib_name, 338083200, 1537228800) <NEW_LINE> <DEDENT> if args[1] == "type": <NEW_LINE> <INDENT> device_obj = hub_obj.device_obj_from_id(node_id) <NEW_LINE> hub_obj.send_type_request(device_obj) <NEW_LINE> return 'Type Request Sent' <NEW_LINE> <DEDENT> if args[1] == "detail": <NEW_LINE> <INDENT> return hub_obj.device_obj_from_id(node_id) <NEW_LINE> <DEDENT> <DEDENT> return 'Unknown Argument' <NEW_LINE> <DEDENT> def do_halt(self, *args): <NEW_LINE> <INDENT> hub_obj.halt() <NEW_LINE> return Commander.Exit <NEW_LINE> <DEDENT> def do_echo(self, *args): <NEW_LINE> <INDENT> return ' '.join(args) <NEW_LINE> <DEDENT> def do_raise(self, *args): <NEW_LINE> <INDENT> raise Exception('Some Error')
This sets up the commands that commander will look for and what to do. The commands are - discovery - broadcast - nodes list * nodes <device_id> rename string - nodes <device_id> state [0/1] * nodes <device_id> mode [normal|range|locked|silent] * nodes <device_id> attributes ? * nodes <device_id> type * nodes <device_id> detail - halt The - point are working, the * need to be fixed the <device_id> is show in the nodes list (its a MAC address)
62599035cad5886f8bdc5912
class RSScriptWrapper(object): <NEW_LINE> <INDENT> def __init__(self, script_name): <NEW_LINE> <INDENT> self.name = os.path.basename(script_name) <NEW_LINE> self.inRS = 'IronPython' in sys.version <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.inRS: <NEW_LINE> <INDENT> self.start = datetime.datetime.now() <NEW_LINE> log_script_start(self.name) <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, ttype, value, traceback): <NEW_LINE> <INDENT> if self.inRS: <NEW_LINE> <INDENT> log_script_end(self.name, execution_time=datetime.datetime.now() - self.start) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
Wraps the execution of any script imported in RayStation. As convention, every script imported to RayStation should use this wrapper object around its code. This wrapper has two purposes: 1. It logs the start and end of every script launched in RayStation. 2. It prevents execution of the code if the script is launched from python and not IronPython, the latter we assume is connected to RayStation. This is essential to use the python-sphinx autodoc feature, which is not connected to RayStation and therefore will produce errors. Typical usage for a script imported into RayStation is:: import setpath import hmrlib.hmrlib as hmrlib with hmrlib.RSScriptWrapper(__file__): script_function_call_1_() script_function_call_2_() # ... script_function_call_n_() Args: script_name (str): the name, or file name, of the script that will execute.
6259903550485f2cf55dc0ab
class ErrorType(StructType): <NEW_LINE> <INDENT> def __init__(self, name, fields, binding_class=None): <NEW_LINE> <INDENT> StructType.__init__(self, name, fields, binding_class)
Representation of Error IDL type in Python Binding :type definition: :class:`vmware.vapi.data.ErrorDefinition` :ivar definition: type representation in the API runtime :type name: :class:`str` :ivar name: Name of the structure :type binding_class: :class:`vmware.vapi.bindings.error.VapiError` :ivar binding_class: Reference to the Python native class corresponding to this error
62599035287bf620b6272d16
class ValidationError(object): <NEW_LINE> <INDENT> def __init__(self, document_path, schema_path, code, rule, constraint, value, info): <NEW_LINE> <INDENT> self.document_path = document_path <NEW_LINE> self.schema_path = schema_path <NEW_LINE> self.code = code <NEW_LINE> self.rule = rule <NEW_LINE> self.constraint = constraint <NEW_LINE> self.value = value <NEW_LINE> self.info = info <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return hash(self) == hash(other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.document_path) ^ hash(self.schema_path) ^ hash(self.code) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if self.document_path != other.document_path: <NEW_LINE> <INDENT> return compare_paths_lt(self.document_path, other.document_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return compare_paths_lt(self.schema_path, other.schema_path) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{class_name} @ {memptr} ( document_path={document_path},schema_path={schema_path},code={code},constraint={constraint},value={value},info={info} )'.format(class_name=self.__class__.__name__, memptr=hex(id(self)), document_path=self.document_path, schema_path=self.schema_path, code=hex(self.code), constraint=quote_string(self.constraint), value=quote_string(self.value), info=self.info) <NEW_LINE> <DEDENT> @property <NEW_LINE> def child_errors(self): <NEW_LINE> <INDENT> if self.is_group_error: <NEW_LINE> <INDENT> return self.info[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def definitions_errors(self): <NEW_LINE> <INDENT> if not self.is_logic_error: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = defaultdict(list) <NEW_LINE> for error in self.child_errors: <NEW_LINE> <INDENT> i = error.schema_path[len(self.schema_path)] <NEW_LINE> result[i].append(error) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def field(self): <NEW_LINE> <INDENT> if self.document_path: <NEW_LINE> <INDENT> return self.document_path[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_group_error(self): <NEW_LINE> <INDENT> return bool(self.code & ERROR_GROUP.code) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_logic_error(self): <NEW_LINE> <INDENT> return bool(self.code & LOGICAL.code - ERROR_GROUP.code) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_normalization_error(self): <NEW_LINE> <INDENT> return bool(self.code & NORMALIZATION.code)
A simple class to store and query basic error information.
62599035c432627299fa4125
class Common(object): <NEW_LINE> <INDENT> _CODE_TO_LOCATION = 'https://api.weibo.com/2/common/code_to_location.json' <NEW_LINE> _GET_CITY = 'https://api.weibo.com/2/common/get_city.json' <NEW_LINE> _GET_PROVINCE = 'https://api.weibo.com/2/common/get_province.json' <NEW_LINE> _GET_COUNTRY = 'https://api.weibo.com/2/common/get_country.json' <NEW_LINE> _GET_TIMEZONE = 'https://api.weibo.com/2/common/get_timezone.json' <NEW_LINE> def __init__(self, oauth): <NEW_LINE> <INDENT> self.oauth = oauth <NEW_LINE> <DEDENT> @get_request <NEW_LINE> def code_to_location(self, codes: Iterable[str]): <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @get_request <NEW_LINE> def get_city(self, province: str, capital: str=None, language: str='zh-cn'): <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @get_request <NEW_LINE> def get_province(self, country: str, capital: str=None, language: str='zh-cn'): <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @get_request <NEW_LINE> def get_country(self, capital: str=None, language: str='zh-cn'): <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @get_request <NEW_LINE> def get_timezone(self, language: str='zh-cn'): <NEW_LINE> <INDENT> ...
公共服务接口
6259903596565a6dacd2d823
class DownloadPageMethodInjectionTests( MethodInjectionTestsMixin, unittest.SynchronousTestCase, ): <NEW_LINE> <INDENT> def attemptRequestWithMaliciousMethod(self, method): <NEW_LINE> <INDENT> uri = b'http://twisted.invalid' <NEW_LINE> client.downloadPage(uri, file=io.BytesIO(), method=method)
Test L{client.getPage} against HTTP method injections.
625990356fece00bbacccad8
class xpsiError(Exception): <NEW_LINE> <INDENT> pass
Base exception for xpsi-specific runtime errors.
6259903530c21e258be9993b
class TestIncludeTokenize(TestIncludeBase): <NEW_LINE> <INDENT> def testToken(self): <NEW_LINE> <INDENT> for i in range(4): <NEW_LINE> <INDENT> ast = self.root(i).ast <NEW_LINE> self.assertIsInstance(ast(0), tokens.Paragraph) <NEW_LINE> self.assertIsInstance(ast(0)(0), tokens.Word) <NEW_LINE> self.assertIsInstance(ast(0)(1), tokens.Space) <NEW_LINE> self.assertIsInstance(ast(0)(2), tokens.Number) <NEW_LINE> self.assertEqual(ast(0)(0).content, u'File') <NEW_LINE> self.assertEqual(ast(0)(2).content, unicode(i))
Test tokenization of Include
6259903523e79379d538d639
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE) <NEW_LINE> class MongoViewInStudioTest(ViewInStudioTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(MongoViewInStudioTest, self).setUp() <NEW_LINE> <DEDENT> def test_view_in_studio_link_studio_course(self): <NEW_LINE> <INDENT> self.setup_mongo_course() <NEW_LINE> result_fragment = self.module.render('student_view') <NEW_LINE> self.assertIn('View Unit in Studio', result_fragment.content) <NEW_LINE> <DEDENT> def test_view_in_studio_link_only_in_top_level_vertical(self): <NEW_LINE> <INDENT> self.setup_mongo_course() <NEW_LINE> result_fragment = self.module.render('student_view') <NEW_LINE> parts = result_fragment.content.split('xmodule_VerticalModule') <NEW_LINE> self.assertEqual(3, len(parts), "Did not find two vertical modules") <NEW_LINE> self.assertIn('View Unit in Studio', parts[0]) <NEW_LINE> self.assertNotIn('View Unit in Studio', parts[1]) <NEW_LINE> self.assertNotIn('View Unit in Studio', parts[2]) <NEW_LINE> <DEDENT> def test_view_in_studio_link_xml_authored(self): <NEW_LINE> <INDENT> self.setup_mongo_course(course_edit_method='XML') <NEW_LINE> result_fragment = self.module.render('student_view') <NEW_LINE> self.assertNotIn('View Unit in Studio', result_fragment.content)
Test the 'View in Studio' link visibility in a mongo backed course.
62599035a8ecb0332587234d
class InvalidName(SchemaParseException): <NEW_LINE> <INDENT> pass
User attempted to parse a schema with an invalid name.
6259903507d97122c4217dd6
class LifetimeWarning(Warning): <NEW_LINE> <INDENT> pass
Lifetime has been exceeded and is no longer supported.
62599035287bf620b6272d19
class DistributeVertices(bpy.types.Operator, VertexOperator): <NEW_LINE> <INDENT> bl_idname = "mesh.vertex_distribute" <NEW_LINE> bl_label = "Vertex Distribute" <NEW_LINE> bl_description = "Distribute selected vertices evenly along their loop" <NEW_LINE> def action(self, mesh): <NEW_LINE> <INDENT> distribute_vertices(mesh)
Distribute vertices evenly along interpolated shape of their polyline
625990351d351010ab8f4c4a
class FileWrapper(FileLikeBase): <NEW_LINE> <INDENT> _append_requires_overwrite = False <NEW_LINE> def __init__(self,wrapped_file,mode=None): <NEW_LINE> <INDENT> self.__closing = False <NEW_LINE> super(FileWrapper,self).__init__() <NEW_LINE> self.wrapped_file = wrapped_file <NEW_LINE> if mode is None: <NEW_LINE> <INDENT> self.mode = getattr(wrapped_file,"mode","r+") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mode = mode <NEW_LINE> <DEDENT> self._validate_mode() <NEW_LINE> if hasattr(wrapped_file,"name"): <NEW_LINE> <INDENT> self.name = wrapped_file.name <NEW_LINE> <DEDENT> if "a" in self.mode: <NEW_LINE> <INDENT> if self._check_mode("r"): <NEW_LINE> <INDENT> self.wrapped_file.seek(0) <NEW_LINE> <DEDENT> self.seek(0,2) <NEW_LINE> <DEDENT> <DEDENT> def _validate_mode(self): <NEW_LINE> <INDENT> if self._append_requires_overwrite: <NEW_LINE> <INDENT> if self._check_mode("w"): <NEW_LINE> <INDENT> if "a" in getattr(self.wrapped_file,"mode",""): <NEW_LINE> <INDENT> raise ValueError("Underlying file can't be in append mode") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if not hasattr(self,"wrapped_file"): <NEW_LINE> <INDENT> self.wrapped_file = None <NEW_LINE> <DEDENT> if not hasattr(self,"_FileWrapper__closing"): <NEW_LINE> <INDENT> self.__closing = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if FileWrapper is not None: <NEW_LINE> <INDENT> super(FileWrapper,self).close() <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if hasattr(getattr(self,"wrapped_file",None),"close"): <NEW_LINE> <INDENT> self.wrapped_file.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if not self.closed: <NEW_LINE> <INDENT> self.__closing = True <NEW_LINE> super(FileWrapper,self).close() <NEW_LINE> if hasattr(self.wrapped_file,"close"): <NEW_LINE> <INDENT> self.wrapped_file.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> super(FileWrapper,self).flush() <NEW_LINE> if not self.__closing and hasattr(self.wrapped_file,"flush"): <NEW_LINE> <INDENT> self.wrapped_file.flush() <NEW_LINE> <DEDENT> <DEDENT> def _read(self,sizehint=-1): <NEW_LINE> <INDENT> data = self.wrapped_file.read(sizehint) <NEW_LINE> if data == b(""): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def _write(self,string,flushing=False): <NEW_LINE> <INDENT> self.wrapped_file.write(string) <NEW_LINE> <DEDENT> def _seek(self,offset,whence): <NEW_LINE> <INDENT> self.wrapped_file.seek(offset,whence) <NEW_LINE> <DEDENT> def _tell(self): <NEW_LINE> <INDENT> return self.wrapped_file.tell() <NEW_LINE> <DEDENT> def _truncate(self,size): <NEW_LINE> <INDENT> return self.wrapped_file.truncate(size)
Base class for objects that wrap a file-like object. This class provides basic functionality for implementing file-like objects that wrap another file-like object to alter its functionality in some way. It takes care of house-keeping duties such as flushing and closing the wrapped file. Access to the wrapped file is given by the attribute wrapped_file. By convention, the subclass's constructor should accept this as its first argument and pass it to its superclass's constructor in the same position. This class provides a basic implementation of _read() and _write() which just calls read() and write() on the wrapped object. Subclasses will probably want to override these.
6259903550485f2cf55dc0ae
class VirtualMachineScaleSetVMInstanceView(Model): <NEW_LINE> <INDENT> _attribute_map = { 'platform_update_domain': {'key': 'platformUpdateDomain', 'type': 'int'}, 'platform_fault_domain': {'key': 'platformFaultDomain', 'type': 'int'}, 'rdp_thumb_print': {'key': 'rdpThumbPrint', 'type': 'str'}, 'vm_agent': {'key': 'vmAgent', 'type': 'VirtualMachineAgentInstanceView'}, 'disks': {'key': 'disks', 'type': '[DiskInstanceView]'}, 'extensions': {'key': 'extensions', 'type': '[VirtualMachineExtensionInstanceView]'}, 'boot_diagnostics': {'key': 'bootDiagnostics', 'type': 'BootDiagnosticsInstanceView'}, 'statuses': {'key': 'statuses', 'type': '[InstanceViewStatus]'}, } <NEW_LINE> def __init__(self, platform_update_domain=None, platform_fault_domain=None, rdp_thumb_print=None, vm_agent=None, disks=None, extensions=None, boot_diagnostics=None, statuses=None): <NEW_LINE> <INDENT> super(VirtualMachineScaleSetVMInstanceView, self).__init__() <NEW_LINE> self.platform_update_domain = platform_update_domain <NEW_LINE> self.platform_fault_domain = platform_fault_domain <NEW_LINE> self.rdp_thumb_print = rdp_thumb_print <NEW_LINE> self.vm_agent = vm_agent <NEW_LINE> self.disks = disks <NEW_LINE> self.extensions = extensions <NEW_LINE> self.boot_diagnostics = boot_diagnostics <NEW_LINE> self.statuses = statuses
The instance view of a virtual machine scale set VM. :param platform_update_domain: The Update Domain count. :type platform_update_domain: int :param platform_fault_domain: The Fault Domain count. :type platform_fault_domain: int :param rdp_thumb_print: The Remote desktop certificate thumbprint. :type rdp_thumb_print: str :param vm_agent: The VM Agent running on the virtual machine. :type vm_agent: ~azure.mgmt.compute.v2015_06_15.models.VirtualMachineAgentInstanceView :param disks: The disks information. :type disks: list[~azure.mgmt.compute.v2015_06_15.models.DiskInstanceView] :param extensions: The extensions information. :type extensions: list[~azure.mgmt.compute.v2015_06_15.models.VirtualMachineExtensionInstanceView] :param boot_diagnostics: Boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot to diagnose VM status. <br><br> For Linux Virtual Machines, you can easily view the output of your console log. <br><br> For both Windows and Linux virtual machines, Azure also enables you to see a screenshot of the VM from the hypervisor. :type boot_diagnostics: ~azure.mgmt.compute.v2015_06_15.models.BootDiagnosticsInstanceView :param statuses: The resource status information. :type statuses: list[~azure.mgmt.compute.v2015_06_15.models.InstanceViewStatus]
62599035b57a9660fecd2bb0
class DlpProjectsJobTriggersListRequest(_messages.Message): <NEW_LINE> <INDENT> orderBy = _messages.StringField(1) <NEW_LINE> pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32) <NEW_LINE> pageToken = _messages.StringField(3) <NEW_LINE> parent = _messages.StringField(4, required=True)
A DlpProjectsJobTriggersListRequest object. Fields: orderBy: Optional comma separated list of triggeredJob fields to order by, followed by 'asc/desc' postfix, i.e. `"create_time asc,name desc,schedule_mode asc"`. This list is case-insensitive. Example: `"name asc,schedule_mode desc, status desc"` Supported filters keys and values are: - `create_time`: corresponds to time the triggeredJob was created. - `update_time`: corresponds to time the triggeredJob was last updated. - `name`: corresponds to JobTrigger's display name. - `status`: corresponds to the triggeredJob status. pageSize: Optional size of the page, can be limited by a server. pageToken: Optional page token to continue retrieval. Comes from previous call to ListJobTriggers. `order_by` and `filter` should not change for subsequent calls, but can be omitted if token is specified. parent: The parent resource name, for example projects/my-project-id.
625990358c3a8732951f7689
class DCTCoefSWFilter(BaseStatSWFilter): <NEW_LINE> <INDENT> __logger = logging.getLogger(__name__) <NEW_LINE> def aggregate_windows(self, window_seq, coef=0, **kwargs): <NEW_LINE> <INDENT> for window in window_seq: <NEW_LINE> <INDENT> window_len = len(window) <NEW_LINE> spectrum = dct(window) <NEW_LINE> yield list(spectrum)[coef] / (2 * window_len)
Implements 1D Fast Discrete COS transform. Only for experiment.
6259903521bff66bcd723d97
class Identifier(fhirelement.FHIRElement): <NEW_LINE> <INDENT> resource_name = "Identifier" <NEW_LINE> def __init__(self, jsondict=None): <NEW_LINE> <INDENT> self.assigner = None <NEW_LINE> self.period = None <NEW_LINE> self.system = None <NEW_LINE> self.type = None <NEW_LINE> self.use = None <NEW_LINE> self.value = None <NEW_LINE> super(Identifier, self).__init__(jsondict) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(Identifier, self).elementProperties() <NEW_LINE> js.extend([ ("assigner", "assigner", fhirreference.FHIRReference, False), ("period", "period", period.Period, False), ("system", "system", str, False), ("type", "type", codeableconcept.CodeableConcept, False), ("use", "use", str, False), ("value", "value", str, False), ]) <NEW_LINE> return js
An identifier intended for computation. A technical identifier - identifies some entity uniquely and unambiguously.
62599035596a897236128dce
class TestUtil(InstanceRepresentation): <NEW_LINE> <INDENT> pass
Abstract class for all test util classes.
62599035796e427e5384f8ac
class Operations: <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer) -> None: <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> @distributed_trace <NEW_LINE> def list( self, **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> request = build_list_request( template_url=self.list.metadata['url'], ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request = build_list_request( template_url=next_link, ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> request.method = "GET" <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> async def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize("OperationListResult", pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, AsyncList(list_of_elem) <NEW_LINE> <DEDENT> async def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) <NEW_LINE> raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return AsyncItemPaged( get_next, extract_data ) <NEW_LINE> <DEDENT> list.metadata = {'url': '/providers/Microsoft.Devices/operations'}
Operations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.iothub.v2021_07_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
62599035e76e3b2f99fd9b3d
class MappingPagedResponse(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> lazy_import() <NEW_LINE> return { 'total': (int,), 'offset': (int,), 'limit': (int,), 'results': ([Mapping],), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'total': 'total', 'offset': 'offset', 'limit': 'limit', 'results': 'results', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value)
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values.
6259903573bcbd0ca4bcb3b9
class GetSharedDataRequest(rpc.Request): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.msg <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.msg = state <NEW_LINE> <DEDENT> def process_request(self, server_state): <NEW_LINE> <INDENT> assert self.msg == GET_SHARED_MSG <NEW_LINE> meta = {} <NEW_LINE> kv_store = server_state.kv_store <NEW_LINE> for name, data in kv_store.data_store.items(): <NEW_LINE> <INDENT> meta[name] = (F.shape(data), F.reverse_data_type_dict[F.dtype(data)], kv_store.part_policy[name].policy_str) <NEW_LINE> <DEDENT> if len(meta) == 0: <NEW_LINE> <INDENT> raise RuntimeError('There is no data on kvserver.') <NEW_LINE> <DEDENT> res = GetSharedDataResponse(meta) <NEW_LINE> return res
Send a signal (just a short string message) to get the meta data of shared-tensor from server. Parameters ---------- msg : string string message
62599035d53ae8145f919595
class ReceivablesListAPIView(TotalAnnotateMixin, SmartTransactionListMixin, ReceivablesQuerysetMixin, ListAPIView): <NEW_LINE> <INDENT> serializer_class = TransactionSerializer <NEW_LINE> pagination_class = TotalPagination
Lists provider receivables Returns a list of {{PAGE_SIZE}} transactions marked as receivables associated to to ``{organization}`` while the profile acts as a provider. The queryset can be further refined to match a search filter (``q``) and/or a range of dates ([``start_at``, ``ends_at``]), and sorted on specific fields (``o``). This API endpoint is typically used to find all sales for ``{organization}`` whether it was paid or not. **Tags**: billing, provider, transactionmodel **Examples** .. code-block:: http GET /api/billing/cowork/receivables/?start_at=2015-07-05T07:00:00.000Z HTTP/1.1 responds .. code-block:: json { "count": 1, "balance_amount": "112120", "balance_unit": "usd", "next": null, "previous": null, "results": [ { "created_at": "2015-08-01T00:00:00Z", "description": "Charge <a href='/billing/cowork/receipt/1123'>1123</a> distribution for demo562-premium", "amount": "112120", "is_debit": false, "orig_account": "Funds", "orig_organization": "stripe", "orig_amount": 112120, "orig_unit": "usd", "dest_account": "Funds", "dest_organization": "cowork", "dest_amount": 112120, "dest_unit": "usd" } ] }
62599035be8e80087fbc01b0
class UnexpectedMovement(object): <NEW_LINE> <INDENT> __slots__ = ( '_timestamp', '_movementType', ) <NEW_LINE> @property <NEW_LINE> def timestamp(self): <NEW_LINE> <INDENT> return self._timestamp <NEW_LINE> <DEDENT> @timestamp.setter <NEW_LINE> def timestamp(self, value): <NEW_LINE> <INDENT> self._timestamp = msgbuffers.validate_integer( 'UnexpectedMovement.timestamp', value, 0, 4294967295) <NEW_LINE> <DEDENT> @property <NEW_LINE> def movementType(self): <NEW_LINE> <INDENT> return self._movementType <NEW_LINE> <DEDENT> @movementType.setter <NEW_LINE> def movementType(self, value): <NEW_LINE> <INDENT> self._movementType = msgbuffers.validate_integer( 'UnexpectedMovement.movementType', value, 0, 255) <NEW_LINE> <DEDENT> def __init__(self, timestamp=0, movementType=Anki.Cozmo.UnexpectedMovementType.TURNED_BUT_STOPPED): <NEW_LINE> <INDENT> self.timestamp = timestamp <NEW_LINE> self.movementType = movementType <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack(cls, buffer): <NEW_LINE> <INDENT> reader = msgbuffers.BinaryReader(buffer) <NEW_LINE> value = cls.unpack_from(reader) <NEW_LINE> if reader.tell() != len(reader): <NEW_LINE> <INDENT> raise msgbuffers.ReadError( ('UnexpectedMovement.unpack received a buffer of length {length}, ' + 'but only {position} bytes were read.').format( length=len(reader), position=reader.tell())) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack_from(cls, reader): <NEW_LINE> <INDENT> _timestamp = reader.read('I') <NEW_LINE> _movementType = reader.read('B') <NEW_LINE> return cls(_timestamp, _movementType) <NEW_LINE> <DEDENT> def pack(self): <NEW_LINE> <INDENT> writer = msgbuffers.BinaryWriter() <NEW_LINE> self.pack_to(writer) <NEW_LINE> return writer.dumps() <NEW_LINE> <DEDENT> def pack_to(self, writer): <NEW_LINE> <INDENT> writer.write(self._timestamp, 'I') <NEW_LINE> writer.write(self._movementType, 'B') <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return (self._timestamp == other._timestamp and self._movementType == other._movementType) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return (msgbuffers.size(self._timestamp, 'I') + msgbuffers.size(self._movementType, 'B')) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{type}(timestamp={timestamp}, movementType={movementType})'.format( type=type(self).__name__, timestamp=self._timestamp, movementType=self._movementType) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{type}(timestamp={timestamp}, movementType={movementType})'.format( type=type(self).__name__, timestamp=repr(self._timestamp), movementType=repr(self._movementType))
Generated message-passing message.
62599035cad5886f8bdc5914
class CommentSerializer(NotEditableFieldsMixin, serializers.ModelSerializer): <NEW_LINE> <INDENT> content_type = serializers.PrimaryKeyRelatedField(queryset=ContentType.objects.filter(CONTENT_TYPE_CHOICES), required=True) <NEW_LINE> self_content_type = serializers.IntegerField(read_only=True) <NEW_LINE> user = UserSerializer(read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Comment <NEW_LINE> fields = ('id', 'content_type', 'object_id', 'text', 'parent', 'user', 'created_at', 'updated_at', 'self_content_type') <NEW_LINE> not_editable_fields = ('content_type', 'object_id') <NEW_LINE> extra_kwargs = { 'parent': {'read_only': True}, 'object_id': {'required': True}, } <NEW_LINE> <DEDENT> def validate(self, data): <NEW_LINE> <INDENT> data = super().validate(data) <NEW_LINE> content_type = data.get('content_type') <NEW_LINE> object_id = data.get('object_id') <NEW_LINE> if content_type and not content_type.model_class().objects.filter(id=object_id).exists(): <NEW_LINE> <INDENT> raise serializers.ValidationError({'object_id': ['Object does not exists']}) <NEW_LINE> <DEDENT> if content_type and content_type.model_class() is self.Meta.model: <NEW_LINE> <INDENT> data['parent_id'] = object_id <NEW_LINE> <DEDENT> return data
Serializer for present and save Comment model.
625990358c3a8732951f768a
class Log(BaseOps): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> temp_behav = [BehaviorFull(" 0 00:00.00 null either ")] <NEW_LINE> self.full = temp_behav <NEW_LINE> self.marks = [Mark(0, timedelta(0), "")] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_log(cls, log: "Log") -> "Log": <NEW_LINE> <INDENT> new_log = Log() <NEW_LINE> new_log.full = log.full.copy() <NEW_LINE> new_log.marks = log.marks.copy() <NEW_LINE> return new_log <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_raw_log(cls, log: "RawLog") -> "Log": <NEW_LINE> <INDENT> new_log = Log() <NEW_LINE> new_log.full = [BehaviorFull(line) for line in log.full] <NEW_LINE> new_log.marks = [Mark.from_line(line) for line in log.marks] <NEW_LINE> return new_log <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_file(cls, log_file) -> "Log": <NEW_LINE> <INDENT> raw = RawLog.from_file(log_file) <NEW_LINE> return cls.from_raw_log(raw) <NEW_LINE> <DEDENT> def sort_lists(self) -> None: <NEW_LINE> <INDENT> self.marks.sort() <NEW_LINE> self.full.sort() <NEW_LINE> <DEDENT> def extend(self, log: "Log") -> None: <NEW_LINE> <INDENT> self.marks.extend(log.marks) <NEW_LINE> self.full.extend(log.full)
Store a parsed version of a log file This version stores only the information contained in the log, not any information tied to a particular file (e.g. file name, reference to file, number of spaces separating columns). Attributes: full: A list of :py:class:`BehaviorFull` objects, each representing a line from the log file's ``FULL`` section marks: A list of :py:class:`Mark` objects, each representing a mark from the log file
6259903571ff763f4b5e88cb
@python_2_unicode_compatible <NEW_LINE> class GlobalPagePermission(AbstractPagePermission): <NEW_LINE> <INDENT> can_recover_page = models.BooleanField(_("can recover pages"), default=True, help_text=_("can recover any deleted page")) <NEW_LINE> sites = models.ManyToManyField(Site, null=True, blank=True, help_text=_('If none selected, user haves granted permissions to all sites.'), verbose_name=_('sites')) <NEW_LINE> objects = GlobalPagePermissionManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Page global permission') <NEW_LINE> verbose_name_plural = _('Pages global permissions') <NEW_LINE> app_label = 'cms' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s :: GLOBAL" % self.audience
Permissions for all pages (global).
62599035ec188e330fdf99c7