code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Bean(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> dict_attrs = dir(dict) <NEW_LINE> data = dict([(k, getattr(self, k)) for k in dir(self) if not k.startswith('__') and k not in dict_attrs]) <NEW_LINE> for k, v in kwargs.items(): <NEW_LINE> <INDENT> if not isinstance(v, Conditional): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not v.isValid(): <NEW_LINE> <INDENT> del kwargs[k] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs[k] = v.value <NEW_LINE> <DEDENT> <DEDENT> data.update(kwargs) <NEW_LINE> self.update(data) <NEW_LINE> if getattr(self, '__type__', None): <NEW_LINE> <INDENT> result = deepcopy(dict( type=self.__type__, result=self )) <NEW_LINE> for k in self.keys(): <NEW_LINE> <INDENT> del self[k] <NEW_LINE> <DEDENT> self.update(result)
Abstrakcyjna klasa, która stanowi rozszerzenie słownika. Klasa ma na cełu ułatwić generowanie słowników, które budują jsonowe.
6259905f7d847024c075da4e
class Action(object): <NEW_LINE> <INDENT> def __init__(self, name=None, scripts=None): <NEW_LINE> <INDENT> self.swagger_types = { 'name': 'str', 'scripts': 'list[int]' } <NEW_LINE> self.attribute_map = { 'name': 'name', 'scripts': 'scripts' } <NEW_LINE> self._name = name <NEW_LINE> self._scripts = scripts <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def scripts(self): <NEW_LINE> <INDENT> return self._scripts <NEW_LINE> <DEDENT> @scripts.setter <NEW_LINE> def scripts(self, scripts): <NEW_LINE> <INDENT> self._scripts = scripts <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259905f8e7ae83300eea708
class ChangeDimensionResolution(Resolution): <NEW_LINE> <INDENT> MARKER = "~+" <NEW_LINE> def get_adapters(self): <NEW_LINE> <INDENT> return [adapters.DimensionPriorChange( self.conflict.dimension.name, self.conflict.old_prior, self.conflict.new_prior)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def prefix(self): <NEW_LINE> <INDENT> return '{0}{1}'.format(standard_param_name(self.conflict.dimension.name), self.MARKER) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{0}{1}".format(self.prefix, self.conflict.new_prior)
Representation of a changed prior resolution .. seealso :: :class:`orion.core.evc.conflicts.Resolution`
6259905f0a50d4780f7068fc
class AESModeOfOperationECB(AESBlockModeOfOperation): <NEW_LINE> <INDENT> name = "Electronic Codebook (ECB)" <NEW_LINE> def encrypt(self, plaintext): <NEW_LINE> <INDENT> if len(plaintext) != 16: <NEW_LINE> <INDENT> raise ValueError('plaintext block must be 16 bytes') <NEW_LINE> <DEDENT> plaintext = bytearray(plaintext) <NEW_LINE> return bytes(bytearray(self._aes.encrypt(plaintext))) <NEW_LINE> <DEDENT> def decrypt(self, ciphertext): <NEW_LINE> <INDENT> if len(ciphertext) != 16: <NEW_LINE> <INDENT> raise ValueError('ciphertext block must be 16 bytes') <NEW_LINE> <DEDENT> ciphertext = bytearray(ciphertext) <NEW_LINE> return bytes(bytearray(self._aes.decrypt(ciphertext)))
AES Electronic Codebook Mode of Operation. o Block-cipher, so data must be padded to 16 byte boundaries Security Notes: o This mode is not recommended o Any two identical blocks produce identical encrypted values, exposing data patterns. (See the image of Tux on wikipedia) Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Electronic_codebook_.28ECB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.1
6259905f4a966d76dd5f056e
class historischerZeitraum (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'historischerZeitraum') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/Users/flipsi/projects/opengever/28/opengever/disposition/ech0160/schemas/base.xsd', 14, 2) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __von = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'von'), 'von', '__AbsentNamespace11_historischerZeitraum_von', False, pyxb.utils.utility.Location('/Users/flipsi/projects/opengever/28/opengever/disposition/ech0160/schemas/base.xsd', 16, 6), ) <NEW_LINE> von = property(__von.value, __von.set, None, None) <NEW_LINE> __bis = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'bis'), 'bis', '__AbsentNamespace11_historischerZeitraum_bis', False, pyxb.utils.utility.Location('/Users/flipsi/projects/opengever/28/opengever/disposition/ech0160/schemas/base.xsd', 17, 6), ) <NEW_LINE> bis = property(__bis.value, __bis.set, None, None) <NEW_LINE> _ElementMap.update({ __von.name() : __von, __bis.name() : __bis }) <NEW_LINE> _AttributeMap.update({ })
Complex type historischerZeitraum with content type ELEMENT_ONLY
6259905f15baa7234946360e
class SearchByNDCInputSet(InputSet): <NEW_LINE> <INDENT> def set_NDC(self, value): <NEW_LINE> <INDENT> super(SearchByNDCInputSet, self)._set_input('NDC', value) <NEW_LINE> <DEDENT> def set_OutputFormat(self, value): <NEW_LINE> <INDENT> super(SearchByNDCInputSet, self)._set_input('OutputFormat', value)
An InputSet with methods appropriate for specifying the inputs to the SearchByNDC Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259905f627d3e7fe0e08506
class ModelCollectionAPI(BaseModelAPI): <NEW_LINE> <INDENT> allowed_methods = ["GET", "POST"] <NEW_LINE> @overrides(APIView) <NEW_LINE> def check_permissions(self, request): <NEW_LINE> <INDENT> if request.method == "POST": <NEW_LINE> <INDENT> if not p.has_perms_shortcut(self.user_object, self.model, "c"): <NEW_LINE> <INDENT> raise e.PermissionDenied("You have no permission to perform POST.") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> queryset = self.filter_queryset(self.get_queryset()) <NEW_LINE> page = self.paginator.paginate_queryset(queryset, self.request, view=self) <NEW_LINE> return self.paginator.get_paginated_response([obj.json() for obj in page]) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> serializer = self.get_serializer( data=self.request_data, context={"request": request}, ) <NEW_LINE> serializer.is_valid(raise_exception=True) <NEW_LINE> for field_name, field_instance in serializer.validated_data.items(): <NEW_LINE> <INDENT> model_field = self.get_model_field(field_name) <NEW_LINE> if model_field and isinstance(model_field, ForeignKey) and field_instance: <NEW_LINE> <INDENT> related_name = model_field.related_query_name() <NEW_LINE> if not p.has_perms_shortcut( self.user_object, field_instance, "w", field_name=related_name ): <NEW_LINE> <INDENT> raise APIPermissionDenied(field_instance, "w", field=related_name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> instance = serializer.save() <NEW_LINE> if p.has_perms_shortcut(self.user_object, instance, "r"): <NEW_LINE> <INDENT> return Response( self.get_serializer( instance=instance, context={"request": request}, ).data, status=201, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response( { "success": True, "info": "The object has been created but you have no permission to view it.", }, status=201, )
handle request such as GET/POST /products
6259905f8e7ae83300eea709
class CourseContentVideos(models.Model): <NEW_LINE> <INDENT> course = models.ForeignKey(Courses, on_delete=models.CASCADE, related_name="course_content_videos") <NEW_LINE> course_heading = models.ForeignKey(CourseContentHeadings, on_delete=models.CASCADE, related_name="course_heading_videos") <NEW_LINE> video_name = models.CharField(max_length=700, blank=False, null=False, ) <NEW_LINE> video_slug = models.SlugField(max_length=700, editable=False) <NEW_LINE> video_link = models.CharField(max_length=250, null=True, blank=True, default='no-link') <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> modified_at = models.DateTimeField(auto_now=True) <NEW_LINE> video_uid = models.UUIDField(editable=False, null=True, blank=True, default=uuid.uuid4, unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '{} - {} - {}'.format(self.video_name, self.course_heading.topic_name, self.course.course_name) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.video_slug = slugify(self.video_name) <NEW_LINE> super(CourseContentVideos, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def get_video_url(self): <NEW_LINE> <INDENT> cat = self.course.course_category.category_name_slug <NEW_LINE> crs = self.course.course_name_slug <NEW_LINE> link = self.video_uid <NEW_LINE> return reverse('courses:course_videos', args=[cat, crs, link]) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> constraints = [ models.UniqueConstraint(fields=['course', 'course_heading', 'video_name'], name='course_video_unique_to_heading') ] <NEW_LINE> verbose_name = 'Course Content Video' <NEW_LINE> verbose_name_plural = 'Course Content Videos'
This model is to store the course videos
6259905f56b00c62f0fb3f47
class IssueConverter(UUIDConverter): <NEW_LINE> <INDENT> def to_python(self, value): <NEW_LINE> <INDENT> return Issue.get(value)
Performs URL parameter validation against a UUID. Example: @app.route('/<projectid:uid>')
6259905f009cb60464d02bb2
class MappingTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.U = Uniform([[0, 2], [1, 3]]) <NEW_LINE> self.M = MultivariateNormal([[self.U[1], self.U[0]], [[1, 0], [0, 1]]]) <NEW_LINE> <DEDENT> def test(self): <NEW_LINE> <INDENT> self.assertTrue(self.M.get_input_connector().get_model(0) == self.U) <NEW_LINE> self.assertTrue(self.M.get_input_connector().get_model(1) == self.U)
Tests whether the mapping created during initialization is done correctly.
6259905f435de62698e9d482
class MonitoredSession(_MonitoredSession): <NEW_LINE> <INDENT> def __init__(self, session_creator=None, hooks=None): <NEW_LINE> <INDENT> super(MonitoredSession, self).__init__( session_creator, hooks, should_recover=True)
Session-like object that handles initialization, recovery and hooks. Example usage: ```python saver_hook = CheckpointSaverHook(...) summary_hook = SummaryHook(...) with MonitoredSession(session_creator=ChiefSessionCreator(...), hooks=[saver_hook, summary_hook]) as sess: while not sess.should_stop(): sess.run(train_op) ``` Initialization: At creation time the monitored session does following things in given order: * calls `hook.begin()` for each given hook * finalizes the graph via `scaffold.finalize()` * create session * initializes the model via initialization ops provided by `Scaffold` * restores variables if a checkpoint exists * launches queue runners Run: When `run()` is called, the monitored session does following things: * calls `hook.before_run()` * calls TensorFlow `session.run()` with merged fetches and feed_dict * calls `hook.after_run()` * returns result of `session.run()` asked by user * if `AbortedError` occurs, it recovers or reinitializes the session before executing the run() call again Exit: At the `close()`, the monitored session does following things in order: * calls `hook.end()` * closes the queue runners and the session * suppresses `OutOfRange` error which indicates that all inputs have been processed if the monitored_session is used as a context How to set `tf.Session` arguments: * In most cases you can set session arguments as follows: ```python MonitoredSession( session_creator=ChiefSessionCreator(master=..., config=...)) ``` * In distributed setting for a non-chief worker, you can use following: ```python MonitoredSession( session_creator=WorkerSessionCreator(master=..., config=...)) ``` See `MonitoredTrainingSession` for an example usage based on chief or worker. Args: session_creator: A factory object to create session. Typically a `ChiefSessionCreator` which is the default one. hooks: An iterable of `SessionRunHook' objects. Returns: A MonitoredSession object.
6259905f2ae34c7f260ac763
class ReaderAbstract(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.consumer = None <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._connect() <NEW_LINE> for msg in self._handle_read(): <NEW_LINE> <INDENT> yield msg <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> logging.info("Performing cleanup before stopping.") <NEW_LINE> self._shutdown() <NEW_LINE> <DEDENT> <DEDENT> def _connect(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _shutdown(self): <NEW_LINE> <INDENT> if self.consumer: <NEW_LINE> <INDENT> self.consumer.close() <NEW_LINE> <DEDENT> <DEDENT> def _handle_read(self): <NEW_LINE> <INDENT> raise NotImplementedError
Abstract consumer
6259905fb7558d5895464a6b
class Port(Net): <NEW_LINE> <INDENT> def __init__(self, attrs): <NEW_LINE> <INDENT> Net.__init__(self, attrs) <NEW_LINE> self.__dir = self.get("direction") <NEW_LINE> if self.__dir != "in" and self.__dir != "out": <NEW_LINE> <INDENT> raise Exception("Bad port direction") <NEW_LINE> <DEDENT> self.__net = None <NEW_LINE> <DEDENT> direction = property(lambda self: self.__dir) <NEW_LINE> net = property(lambda self: self.__net) <NEW_LINE> def link(self, net): <NEW_LINE> <INDENT> self.__net = net
Defines a Verilog Module Port
6259905f8e71fb1e983bd147
class PostDetail(RetrieveAPIView): <NEW_LINE> <INDENT> queryset = Post.objects.all() <NEW_LINE> serializer_class = PostDetailSerializer <NEW_LINE> permission_classes = [AllowAny]
This view is for API get request details of posts. Attributes: queryset: Query that holds all of the Post objects serializer_class: The PostDetailSerializer is used permission_classes: Anyone is allowed to access Post details even unathenticated users
6259905ff548e778e596cc05
class NotFoundResponse(Response): <NEW_LINE> <INDENT> status = 51 <NEW_LINE> def __init__(self, reason=None): <NEW_LINE> <INDENT> if not reason: <NEW_LINE> <INDENT> reason = "NOT FOUND" <NEW_LINE> <DEDENT> self.reason = reason <NEW_LINE> <DEDENT> def __meta__(self): <NEW_LINE> <INDENT> meta = f"{self.status} {self.reason}" <NEW_LINE> return bytes(meta, encoding="utf-8")
Not Found Error response. Status code: 51.
6259905f097d151d1a2c26eb
class Solution: <NEW_LINE> <INDENT> def isSubsequence(self, s: str, t: str) -> bool: <NEW_LINE> <INDENT> position = [-1] <NEW_LINE> for i in s: <NEW_LINE> <INDENT> newPosition = t.find(i, position[-1] + 1) <NEW_LINE> if newPosition == -1 or newPosition >= len(t): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position.append(newPosition) <NEW_LINE> <DEDENT> <DEDENT> if position == sorted(position): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
刷两道简单题泄愤 基本思路-寻找每个字符的位置,如果都存在,那么要求数组有序,否则false time defeat: 82% space defeat: 88% time consuming: less than 6 mins
6259905f8da39b475be04864
class RateCardOperations(object): <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> def get( self, filter, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2015-06-01-preview" <NEW_LINE> accept = "application/json, text/json" <NEW_LINE> url = self.get.metadata['url'] <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> error = self._deserialize(_models.ErrorResponse, response) <NEW_LINE> raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> deserialized = self._deserialize('ResourceRateCardInfo', pipeline_response) <NEW_LINE> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized <NEW_LINE> <DEDENT> get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Commerce/RateCard'}
RateCardOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.commerce.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
6259905f4a966d76dd5f0570
class Spirit(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200, unique=True) <NEW_LINE> slug = models.SlugField(max_length=200, unique=True) <NEW_LINE> description = models.TextField("Kommentar", blank=True) <NEW_LINE> age = models.PositiveSmallIntegerField("Alter", blank=True, null=True, help_text='In Jahren') <NEW_LINE> web = models.URLField("Website", max_length=255, blank=True) <NEW_LINE> volume = models.PositiveSmallIntegerField("Vol. %", help_text="Alkoholgehalt") <NEW_LINE> distillery = models.ForeignKey( Distillery, on_delete=models.SET_NULL, verbose_name=u'Destille', null=True ) <NEW_LINE> spirittype = models.ForeignKey( Spirittype, on_delete=models.SET_NULL, verbose_name=u'Typ', null=True ) <NEW_LINE> public = models.BooleanField(default=True) <NEW_LINE> created = models.DateTimeField(auto_now_add=True, editable=False) <NEW_LINE> modified = models.DateTimeField(auto_now=True, editable=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Whisky' <NEW_LINE> verbose_name_plural = 'Whiskies' <NEW_LINE> ordering = ['name', 'age'] <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> if self.age: <NEW_LINE> <INDENT> age = ' (' + str(self.age) + ' Jahre)' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> age = '' <NEW_LINE> <DEDENT> return self.distillery.name + ' ' + self.name + age
Eine ganz bestimmte Spirituose, z.B. Ardbeg Ten, Balvenie Doublewood, etc.
6259905f7cff6e4e811b70c2
class ir_model_relation(Model): <NEW_LINE> <INDENT> _name = 'ir.model.relation' <NEW_LINE> _columns = { 'name': fields.char('Relation Name', required=True, select=1, help="PostgreSQL table name implementing a many2many relation."), 'model': fields.many2one('ir.model', string='Model', required=True, select=1), 'module': fields.many2one('ir.module.module', string='Module', required=True, select=1), 'date_update': fields.datetime('Update Date'), 'date_init': fields.datetime('Initialization Date') } <NEW_LINE> def _module_data_uninstall(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"): <NEW_LINE> <INDENT> raise AccessError(_('Administrator access is required to uninstall a module')) <NEW_LINE> <DEDENT> ids_set = set(ids) <NEW_LINE> to_drop_table = [] <NEW_LINE> ids.sort() <NEW_LINE> ids.reverse() <NEW_LINE> for data in self.browse(cr, uid, ids, context): <NEW_LINE> <INDENT> model = data.model <NEW_LINE> name = gce.tools.ustr(data.name) <NEW_LINE> cr.execute("""SELECT id from ir_model_relation where name = %s""", (data.name,)) <NEW_LINE> external_ids = [x[0] for x in cr.fetchall()] <NEW_LINE> if set(external_ids)-ids_set: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cr.execute("SELECT 1 FROM information_schema.tables WHERE table_name=%s", (name,)) <NEW_LINE> if cr.fetchone() and not name in to_drop_table: <NEW_LINE> <INDENT> to_drop_table.append(name) <NEW_LINE> <DEDENT> <DEDENT> self.unlink(cr, uid, ids, context) <NEW_LINE> for table in to_drop_table: <NEW_LINE> <INDENT> cr.execute('DROP TABLE %s CASCADE'% table,) <NEW_LINE> _logger.info('Dropped table %s', table) <NEW_LINE> <DEDENT> cr.commit()
This model tracks PostgreSQL tables used to implement gce many2many relations.
6259905fa8ecb03325872895
class JsonEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> get_state = value.__getstate__ <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return super(JsonEncoder, self).default(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return get_state()
Customizable JSON encoder. If the object implements __getstate__, then that method is invoked, and its result is serialized instead of the object itself.
6259905f16aa5153ce401b5a
class Grid(object): <NEW_LINE> <INDENT> def __init__(self, nx=10, ny=10, xmin=0, xmax=10, ymin=0, ymax=10): <NEW_LINE> <INDENT> super(Grid, self).__init__() <NEW_LINE> self.nx = nx <NEW_LINE> self.ny = ny <NEW_LINE> self.xmin = xmin <NEW_LINE> self.xmax = xmax <NEW_LINE> self.ymin = ymin <NEW_LINE> self.ymax = ymax <NEW_LINE> self.dx = (xmax - xmin) / (nx - 1) <NEW_LINE> self.dy = (ymax - xmin) / (ny - 1) <NEW_LINE> self.u = np.zeros((nx, ny)) <NEW_LINE> self.old_u = self.u.copy() <NEW_LINE> <DEDENT> def compute_error(self): <NEW_LINE> <INDENT> v = (self.u - self.old_u).flat <NEW_LINE> return np.sqrt(np.dot(v, v)) <NEW_LINE> <DEDENT> def laplace(self): <NEW_LINE> <INDENT> dx2, dy2 = self.dx**2, self.dy**2 <NEW_LINE> dnr = 2 * (dx2 + dy2) <NEW_LINE> u = self.u <NEW_LINE> self.old_u = u.copy() <NEW_LINE> u[1:-1, 1:-1] = np.where(u[1:-1, 1:-1] < 1, ((u[0:-2, 1:-1] + u[2:, 1:-1]) * dy2 + (u[1:-1, 0:-2] + u[1:-1, 2:]) * dx2) * 1/dnr, 1) <NEW_LINE> return self.compute_error() <NEW_LINE> <DEDENT> def solve(self, n_iter=0, eps=1.0e-3): <NEW_LINE> <INDENT> err = self.laplace() <NEW_LINE> c = 0 <NEW_LINE> while err > eps: <NEW_LINE> <INDENT> if n_iter and c >= n_iter: <NEW_LINE> <INDENT> return err <NEW_LINE> <DEDENT> err = self.laplace() <NEW_LINE> c += 1 <NEW_LINE> <DEDENT> return c <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.u.fill(0) <NEW_LINE> return self
docstring for Grid
6259905f56b00c62f0fb3f49
class ReplayBuffer: <NEW_LINE> <INDENT> def __init__(self, action_size, buffer_size, batch_size, seed): <NEW_LINE> <INDENT> self.action_size = action_size <NEW_LINE> self.memory = deque(maxlen=buffer_size) <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"]) <NEW_LINE> <DEDENT> def add(self, state, action, reward, next_state, done): <NEW_LINE> <INDENT> e = self.experience(state, action, reward, next_state, done) <NEW_LINE> self.memory.append(e) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> experiences = random.sample(self.memory, k=self.batch_size) <NEW_LINE> states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device) <NEW_LINE> actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).float().to(device) <NEW_LINE> rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device) <NEW_LINE> next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device) <NEW_LINE> dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device) <NEW_LINE> return (states, actions, rewards, next_states, dones) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.memory)
Fixed-size buffer to store experience tuples.
6259905fbaa26c4b54d5091e
class JSONCLI(CLI): <NEW_LINE> <INDENT> name = 'json' <NEW_LINE> description = "JSON output options for 'run' command" <NEW_LINE> def configure(self, parser): <NEW_LINE> <INDENT> run_subcommand_parser = parser.subcommands.choices.get('run', None) <NEW_LINE> if run_subcommand_parser is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> settings.add_argparser_to_option( namespace='job.run.result.json.output', action=FileOrStdoutAction, metavar='FILE', parser=run_subcommand_parser, long_arg='--json') <NEW_LINE> settings.add_argparser_to_option( namespace='job.run.result.json.enabled', choices=('on', 'off'), parser=run_subcommand_parser, long_arg='--json-job-result') <NEW_LINE> <DEDENT> def run(self, config): <NEW_LINE> <INDENT> pass
JSON output
6259905f99cbb53fe683255e
class Customer(object): <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, email, password): <NEW_LINE> <INDENT> self.first_name = first_name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.email = email <NEW_LINE> self.hash_password = hash(password) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Customer: {first}, {last}, {email}, {password}>".format( first=self.first_name, last=self.last_name, email=self.email, password=self.hash_password) <NEW_LINE> <DEDENT> def is_correct_password(self, password): <NEW_LINE> <INDENT> return hash(password) == self.hash_password
Ubermelon customer.
6259905f07f4c71912bb0abb
class ApplicationGatewayHttpListener(SubResource): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'}, 'frontend_port': {'key': 'properties.frontendPort', 'type': 'SubResource'}, 'protocol': {'key': 'properties.protocol', 'type': 'str'}, 'host_name': {'key': 'properties.hostName', 'type': 'str'}, 'ssl_certificate': {'key': 'properties.sslCertificate', 'type': 'SubResource'}, 'require_server_name_indication': {'key': 'properties.requireServerNameIndication', 'type': 'bool'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'custom_error_configurations': {'key': 'properties.customErrorConfigurations', 'type': '[ApplicationGatewayCustomError]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewayHttpListener, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = kwargs.get('etag', None) <NEW_LINE> self.type = kwargs.get('type', None) <NEW_LINE> self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None) <NEW_LINE> self.frontend_port = kwargs.get('frontend_port', None) <NEW_LINE> self.protocol = kwargs.get('protocol', None) <NEW_LINE> self.host_name = kwargs.get('host_name', None) <NEW_LINE> self.ssl_certificate = kwargs.get('ssl_certificate', None) <NEW_LINE> self.require_server_name_indication = kwargs.get('require_server_name_indication', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) <NEW_LINE> self.custom_error_configurations = kwargs.get('custom_error_configurations', None)
Http listener of an application gateway. :param id: Resource ID. :type id: str :param name: Name of the HTTP listener that is unique within an Application Gateway. :type name: str :param etag: A unique read-only string that changes whenever the resource is updated. :type etag: str :param type: Type of the resource. :type type: str :param frontend_ip_configuration: Frontend IP configuration resource of an application gateway. :type frontend_ip_configuration: ~azure.mgmt.network.v2019_06_01.models.SubResource :param frontend_port: Frontend port resource of an application gateway. :type frontend_port: ~azure.mgmt.network.v2019_06_01.models.SubResource :param protocol: Protocol of the HTTP listener. Possible values include: "Http", "Https". :type protocol: str or ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayProtocol :param host_name: Host name of HTTP listener. :type host_name: str :param ssl_certificate: SSL certificate resource of an application gateway. :type ssl_certificate: ~azure.mgmt.network.v2019_06_01.models.SubResource :param require_server_name_indication: Applicable only if protocol is https. Enables SNI for multi-hosting. :type require_server_name_indication: bool :param provisioning_state: Provisioning state of the HTTP listener resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :type provisioning_state: str :param custom_error_configurations: Custom error configurations of the HTTP listener. :type custom_error_configurations: list[~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayCustomError]
6259905f2ae34c7f260ac764
class HipChatApi(object): <NEW_LINE> <INDENT> def __init__(self, auth_token, name=None, gets=GETS, posts=POSTS, base_url=BASE_URL, api_version=API_VERSION): <NEW_LINE> <INDENT> self._auth_token = auth_token <NEW_LINE> self._name = name <NEW_LINE> self._gets = gets <NEW_LINE> self._posts = posts <NEW_LINE> self._base_url = base_url <NEW_LINE> self._api_version = api_version <NEW_LINE> <DEDENT> def _request(self, method, params={}): <NEW_LINE> <INDENT> if 'auth_token' not in params: <NEW_LINE> <INDENT> params['auth_token'] = self._auth_token <NEW_LINE> <DEDENT> url = self._base_url % { 'version': self._api_version, 'section': self._name, 'method': method } <NEW_LINE> if method in self._gets[self._name]: <NEW_LINE> <INDENT> r = requests.get(url, params=params) <NEW_LINE> <DEDENT> elif method in self._posts[self._name]: <NEW_LINE> <INDENT> r = requests.post(url, data=params) <NEW_LINE> <DEDENT> return json.loads(r.content) <NEW_LINE> <DEDENT> def __getattr__(self, attr_name): <NEW_LINE> <INDENT> if self._name is None: <NEW_LINE> <INDENT> return super(HipChatApi, self).__self_class__( auth_token=self._auth_token, name=attr_name ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> return self._request(attr_name, *args, **kwargs) <NEW_LINE> <DEDENT> return wrapper
Lightweight Hipchat.com REST API wrapper
6259905f4e4d562566373a85
class FileSystemAccess: <NEW_LINE> <INDENT> def exists(self, path): <NEW_LINE> <INDENT> return os.path.exists(str(path)) <NEW_LINE> <DEDENT> def isfile(self, path): <NEW_LINE> <INDENT> isFile = os.path.isfile(str(path)) <NEW_LINE> isLink = os.path.islink(str(path)) <NEW_LINE> return isFile and not isLink <NEW_LINE> <DEDENT> def isdir(self, path): <NEW_LINE> <INDENT> return os.path.isdir(str(path)) <NEW_LINE> <DEDENT> def mkdir(self, path): <NEW_LINE> <INDENT> return os.mkdir(str(path)) <NEW_LINE> <DEDENT> def mkdirs(self, path): <NEW_LINE> <INDENT> return os.makedirs(str(path), 0o777, True) <NEW_LINE> <DEDENT> def listdir(self, path): <NEW_LINE> <INDENT> return os.listdir(str(path)) <NEW_LINE> <DEDENT> def rmtree(self, path): <NEW_LINE> <INDENT> system = platform.system() <NEW_LINE> if system == 'Windows': <NEW_LINE> <INDENT> for root, dirs, files in os.walk(str(path), topdown=False): <NEW_LINE> <INDENT> for name in files: <NEW_LINE> <INDENT> filename = os.path.join(root, name) <NEW_LINE> os.chmod(filename, stat.S_IWUSR) <NEW_LINE> os.remove(filename) <NEW_LINE> <DEDENT> for name in dirs: <NEW_LINE> <INDENT> os.rmdir(os.path.join(root, name)) <NEW_LINE> <DEDENT> <DEDENT> os.rmdir(str(path)) <NEW_LINE> <DEDENT> elif system == 'Linux': <NEW_LINE> <INDENT> shutil.rmtree(str(path)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Unknown OS') <NEW_LINE> <DEDENT> <DEDENT> def copyfile(self, path_from, path_to): <NEW_LINE> <INDENT> shutil.copyfile(str(path_from), str(path_to)) <NEW_LINE> <DEDENT> def move(self, path_from, path_to): <NEW_LINE> <INDENT> shutil.move(str(path_from), str(path_to)) <NEW_LINE> <DEDENT> def remove(self, path): <NEW_LINE> <INDENT> os.remove(str(path)) <NEW_LINE> <DEDENT> def copytree(self, src, dst, symlinks = False, ignore = None): <NEW_LINE> <INDENT> dst = str(dst) <NEW_LINE> src = str(src) <NEW_LINE> if not os.path.exists(dst): <NEW_LINE> <INDENT> os.makedirs(dst) <NEW_LINE> shutil.copystat(src, dst) <NEW_LINE> <DEDENT> lst = os.listdir(src) <NEW_LINE> if ignore: <NEW_LINE> <INDENT> excl = ignore(src, lst) <NEW_LINE> lst = [x for x in lst if x not in excl] <NEW_LINE> <DEDENT> for item in lst: <NEW_LINE> <INDENT> s = os.path.join(src, item) <NEW_LINE> d = os.path.join(dst, item) <NEW_LINE> if symlinks and os.path.islink(s): <NEW_LINE> <INDENT> if os.path.lexists(d): <NEW_LINE> <INDENT> os.remove(d) <NEW_LINE> <DEDENT> os.symlink(os.readlink(s), d) <NEW_LINE> try: <NEW_LINE> <INDENT> st = os.lstat(s) <NEW_LINE> mode = stat.S_IMODE(st.st_mode) <NEW_LINE> os.lchmod(d, mode) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> elif os.path.isdir(s): <NEW_LINE> <INDENT> self.copytree(s, d, symlinks, ignore) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shutil.copy2(s, d) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def touch_file(self, file_path): <NEW_LINE> <INDENT> file_string = str(file_path) <NEW_LINE> with open(file_string, 'a'): <NEW_LINE> <INDENT> os.utime(file_string, None) <NEW_LINE> <DEDENT> <DEDENT> def addfile(self, path, content): <NEW_LINE> <INDENT> with open(path, 'w') as f: <NEW_LINE> <INDENT> f.write(content)
This class wraps the accesses to the file-system to allow the usage of a fake file-system for tests.
6259905f379a373c97d9a6a2
class BibCatalogSystemDummy(object): <NEW_LINE> <INDENT> def check_system(self, uid=None): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def ticket_search(self, uid, recordid=-1, subject="", text="", creator="", owner="", date_from="", date_until="", status="", priority="", queue=""): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def ticket_submit(self, uid=None, subject="", recordid=-1, text="", queue="", priority="", owner="", requestor=""): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ticket_assign(self, uid, ticketid, to_user): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ticket_steal(self, uid, ticketid): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ticket_set_attribute(self, uid, ticketid, attribute, new_value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ticket_get_attribute(self, uid, ticketid, attribute): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ticket_get_info(self, uid, ticketid, attributes = None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_queues(self, uid): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ticket_comment(self, uid, ticketid, comment): <NEW_LINE> <INDENT> pass
A dummy class for ticket support.
6259905f2ae34c7f260ac765
class MozillaURLProvider(Processor): <NEW_LINE> <INDENT> description = __doc__ <NEW_LINE> input_variables = { "product_name": { "required": True, "description": "Product to fetch URL for. One of 'firefox', 'thunderbird'.", }, "release": { "required": False, "default": 'esr-latest', "description": ( "Which release to download. Examples: 'latest', " "'esr-latest', 'beta-latest'. Defaults to 'latest'"), }, "locale": { "required": False, "default": 'en-US', "description": "Which localization to download, default is 'en-US'.", }, "base_url": { "required": False, "description": "Default is '%s." % MOZ_BASE_URL, }, } <NEW_LINE> output_variables = { "url": { "description": "URL to the latest Mozilla product release.", }, } <NEW_LINE> def get_mozilla_dmg_url(self, base_url, product_name, release, locale): <NEW_LINE> <INDENT> locale = locale.replace("_", "-") <NEW_LINE> if release == 'latest-esr': <NEW_LINE> <INDENT> release = 'esr-latest' <NEW_LINE> <DEDENT> if release == 'latest-beta': <NEW_LINE> <INDENT> release = 'beta-latest' <NEW_LINE> <DEDENT> return base_url % (product_name, release, locale) <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> product_name = self.env["product_name"] <NEW_LINE> release = self.env.get("release", "esr-latest") <NEW_LINE> locale = self.env.get("locale", "fr-FR") <NEW_LINE> base_url = self.env.get("base_url", MOZ_BASE_URL) <NEW_LINE> self.env["url"] = self.get_mozilla_dmg_url( base_url, product_name, release, locale) <NEW_LINE> self.output("Found URL %s" % self.env["url"])
Provides URL to the latest Firefox release.
6259905f8a43f66fc4bf380c
class GuestCommentForm(forms.ModelForm): <NEW_LINE> <INDENT> captcha = CaptchaField(label='Введите текст с картинки', error_messages={'invalid': 'Неправильный текст'}) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Comment <NEW_LINE> exclude = ('is_active',) <NEW_LINE> widgets = {'bb': forms.HiddenInput}
Форма для добавления комментария гостем
6259905f3617ad0b5ee077cb
class Dataset(object): <NEW_LINE> <INDENT> def __init__(self, dim, contents=None): <NEW_LINE> <INDENT> assert type(dim) == int and dim > 0 <NEW_LINE> assert is_point_list(contents) <NEW_LINE> self._dimension = dim <NEW_LINE> if contents is None: <NEW_LINE> <INDENT> self._contents = [] <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self._contents = [] <NEW_LINE> for x in contents: <NEW_LINE> <INDENT> self._contents.append(x[:]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def getDimension(self): <NEW_LINE> <INDENT> return self._dimension <NEW_LINE> <DEDENT> def getSize(self): <NEW_LINE> <INDENT> return len(self._contents) <NEW_LINE> <DEDENT> def getContents(self): <NEW_LINE> <INDENT> return self._contents <NEW_LINE> <DEDENT> def getPoint(self, i): <NEW_LINE> <INDENT> assert type(i)==int and 0 <= i <= self.getSize()-1 <NEW_LINE> copypoint = self._contents[i][:] <NEW_LINE> return copypoint <NEW_LINE> <DEDENT> def addPoint(self,point): <NEW_LINE> <INDENT> assert is_point(point) <NEW_LINE> newpoint = point[:] <NEW_LINE> self._contents.append(newpoint) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._contents) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.__class__) + str(self)
Instance is a dataset for k-means clustering. The data is stored as a list of list of numbers (ints or floats). Each component list is a data point. Instance Attributes: _dimension: the point dimension for this dataset [int > 0. Value never changes after initialization] _contents: the dataset contents [a 2D list of numbers (float or int), possibly empty]: ADDITIONAL INVARIANT: The number of columns in _contents is equal to _dimension. That is, for every item _contents[i] in the list _contents, len(_contents[i]) == dimension. None of the attributes should be accessed directly outside of the class Dataset (e.g. in the methods of class Cluster or KMeans). Instead, this class has getter and setter style methods (with the appropriate preconditions) for modifying these values.
6259905f8e71fb1e983bd149
class User(models.Model): <NEW_LINE> <INDENT> username = models.CharField(max_length=50, primary_key=True) <NEW_LINE> first_name = models.CharField(max_length=30) <NEW_LINE> last_name = models.CharField(max_length=40) <NEW_LINE> gonzaga_email = models.EmailField(unique=True) <NEW_LINE> pref_email = models.EmailField(null=True,blank=True) <NEW_LINE> phone = models.CharField(max_length=11,null=True,blank=True) <NEW_LINE> recent_post_deletion = models.BooleanField(default=False)
User schema class model. Holds information on the user and the user's contact information.
6259905f4f6381625f199fe2
class _NumericSubject(_ComparableSubject): <NEW_LINE> <INDENT> def IsZero(self): <NEW_LINE> <INDENT> if self._actual != 0: <NEW_LINE> <INDENT> self._FailWithProposition('is zero') <NEW_LINE> <DEDENT> <DEDENT> def IsNonZero(self): <NEW_LINE> <INDENT> if self._actual == 0: <NEW_LINE> <INDENT> self._FailWithProposition('is non-zero') <NEW_LINE> <DEDENT> <DEDENT> def IsFinite(self): <NEW_LINE> <INDENT> if math.isinf(self._actual) or math.isnan(self._actual): <NEW_LINE> <INDENT> self._FailWithSubject('should have been finite') <NEW_LINE> <DEDENT> <DEDENT> def IsPositiveInfinity(self): <NEW_LINE> <INDENT> self.IsEqualTo(POSITIVE_INFINITY) <NEW_LINE> <DEDENT> def IsNegativeInfinity(self): <NEW_LINE> <INDENT> self.IsEqualTo(NEGATIVE_INFINITY) <NEW_LINE> <DEDENT> def IsNan(self): <NEW_LINE> <INDENT> if not math.isnan(self._actual): <NEW_LINE> <INDENT> self._FailComparingValues('is equal to', NAN) <NEW_LINE> <DEDENT> <DEDENT> def IsNotNan(self): <NEW_LINE> <INDENT> if math.isnan(self._actual): <NEW_LINE> <INDENT> self._FailWithSubject('should not have been <{0}>'.format(NAN)) <NEW_LINE> <DEDENT> <DEDENT> def IsWithin(self, tolerance): <NEW_LINE> <INDENT> return _TolerantNumericSubject(self._actual, tolerance, True) <NEW_LINE> <DEDENT> def IsNotWithin(self, tolerance): <NEW_LINE> <INDENT> return _TolerantNumericSubject(self._actual, tolerance, False)
Subject for all types of numbers--int, long, float, and complex.
6259905fcc0a2c111447c60e
class VariableSocket(bpy.types.NodeSocket, UMOGSocket): <NEW_LINE> <INDENT> bl_idname = 'umog_VariableSocketType' <NEW_LINE> bl_label = 'Variable Socket' <NEW_LINE> dataType = "Variable" <NEW_LINE> allowedInputTypes = ["All"] <NEW_LINE> text = "" <NEW_LINE> useIsUsedProperty = True <NEW_LINE> defaultDrawType = "TEXT_PROPERTY" <NEW_LINE> drawColor = (1, 1, 1, 0.5) <NEW_LINE> def textChanged(self, context): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> text : StringProperty(default = "", update = textChanged) <NEW_LINE> value : StringProperty(default = "", update = textChanged) <NEW_LINE> socketCreationType : StringProperty(default = "") <NEW_LINE> def drawProperty(self, context, layout, layoutParent, text, node): <NEW_LINE> <INDENT> layout.label(text=text) <NEW_LINE> row = layout.row() <NEW_LINE> self.invokeFunction(row, node, "addIntegerNode", icon = "PLUS", emboss = False, description = "Create a new node node") <NEW_LINE> self.invokeFunction(row, node, "addIntegerNode", icon = "PLUS", emboss = False, description = "Create a new node node") <NEW_LINE> <DEDENT> def addIntegerNode(self): <NEW_LINE> <INDENT> node = newNodeAtCursor("umog_IntegerNode") <NEW_LINE> self.linkWith(node.outputs[0]) <NEW_LINE> <DEDENT> def getProperty(self): <NEW_LINE> <INDENT> return self.value
Variable socket type
6259905fa8ecb03325872897
class feval(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def calleval(self): <NEW_LINE> <INDENT> return _gnuradio_core_general.feval_calleval(self) <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> if self.__class__ == feval: <NEW_LINE> <INDENT> _self = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _self = self <NEW_LINE> <DEDENT> this = _gnuradio_core_general.new_feval(_self, ) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _gnuradio_core_general.delete_feval <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def __disown__(self): <NEW_LINE> <INDENT> self.this.disown() <NEW_LINE> _gnuradio_core_general.disown_feval(self) <NEW_LINE> return weakref_proxy(self) <NEW_LINE> <DEDENT> def eval(self): <NEW_LINE> <INDENT> return _gnuradio_core_general.feval_eval(self)
Proxy of C++ gr_py_feval class
6259905fdd821e528d6da4c0
class Multiplier(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def calc(operand_1, operand_2): <NEW_LINE> <INDENT> return operand_1 * operand_2
Provide a class responsible for multiplying two numbers.
6259905fd486a94d0ba2d647
class CPP(LoadableMembers): <NEW_LINE> <INDENT> pass
defines classRef
6259905fa219f33f346c7e85
class Story: <NEW_LINE> <INDENT> def __init__(self, words, text): <NEW_LINE> <INDENT> self.prompts = words <NEW_LINE> self.template = text <NEW_LINE> <DEDENT> def generate(self, answers): <NEW_LINE> <INDENT> text = self.template <NEW_LINE> for (key, val) in answers.items(): <NEW_LINE> <INDENT> text = text.replace("{" + key + "}", val) <NEW_LINE> <DEDENT> return text
Madlibs story. To make a story, pass a list of prompts, and the text of the template. >>> s = Story(["noun", "verb"], ... "I love to {verb} a good {noun}.") To generate text from a story, pass in a dictionary-like thing of {prompt: answer, promp:answer): >>> ans = {"verb": "eat", "noun": "mango"} >>> s.generate(ans) 'I love to eat a good mango.'echo
6259905fcb5e8a47e493ccc5
class tck2connectome(BaseMtrixCLI): <NEW_LINE> <INDENT> class Flags(BaseMtrixCLI.Flags): <NEW_LINE> <INDENT> assignment_radial_search = "-assignment_radial_search" <NEW_LINE> assignment_end_voxels = "-assignment_end_voxels" <NEW_LINE> scale_length = "-scale_length" <NEW_LINE> stat_edge = "-stat_edge" <NEW_LINE> <DEDENT> class stat_edge(enum.Enum): <NEW_LINE> <INDENT> sum = "sum" <NEW_LINE> mean = "mean" <NEW_LINE> min = "min" <NEW_LINE> max = "max" <NEW_LINE> <DEDENT> exe = 'tck2connectome'
The tck2connectome command from the mtrix package.
6259905f435de62698e9d486
class Taggable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.tags = None <NEW_LINE> self.tag_string = "" <NEW_LINE> self.tag_seperator = "," <NEW_LINE> self.spliter = re.compile(u'[,, ]') <NEW_LINE> <DEDENT> def get_tags(self): <NEW_LINE> <INDENT> return Tag.get_tags_for(self.key()) <NEW_LINE> <DEDENT> def get_tags_as_string(self): <NEW_LINE> <INDENT> tags = self.get_tags() <NEW_LINE> return self.tag_seperator.join(tags) if tags else '' <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if (self.tag_string == None) or (len(self.tag_string) == 0): <NEW_LINE> <INDENT> self.get_tags_as_string() <NEW_LINE> <DEDENT> return self.tag_string <NEW_LINE> <DEDENT> def set_tags(self, tag_list): <NEW_LINE> <INDENT> for each_tag in tag_list: <NEW_LINE> <INDENT> each_tag = string.strip(each_tag) <NEW_LINE> if len(each_tag) > 0: <NEW_LINE> <INDENT> existing_tag = Tag.get_by_tag_value(each_tag) <NEW_LINE> if existing_tag == None: <NEW_LINE> <INDENT> new_tag = Tag(tag = each_tag, tagged = [self.key(),]) <NEW_LINE> new_tag.put() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (self.key() not in existing_tag.tagged): <NEW_LINE> <INDENT> existing_tag.tagged.append(self.key()) <NEW_LINE> existing_tag.num_tagged += 1 <NEW_LINE> existing_tag.put() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def set_tags_from_string(self, tag_list): <NEW_LINE> <INDENT> tags = self.spliter.split(tag_list) <NEW_LINE> self.set_tags(tags)
A mixin class that is used for making Google AppEnigne Model classes taggable. Usage: class Post(db.Model, taggable.Taggable): body = db.TextProperty(required = True) title = db.StringProperty() added = db.DateTimeProperty(auto_now_add=True) edited = db.DateTimeProperty() def __init__(self, parent=None, key_name=None, app=None, **entity_values): db.Model.__init__(self, parent, key_name, app, **entity_values) taggable.Taggable.__init__(self)
6259905f4e4d562566373a87
class GAE(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, encoder, decoder=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.encoder = encoder <NEW_LINE> self.decoder = InnerProductDecoder() if decoder is None else decoder <NEW_LINE> GAE.reset_parameters(self) <NEW_LINE> <DEDENT> def reset_parameters(self): <NEW_LINE> <INDENT> reset(self.encoder) <NEW_LINE> reset(self.decoder) <NEW_LINE> <DEDENT> def encode(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.encoder(*args, **kwargs) <NEW_LINE> <DEDENT> def decode(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.decoder(*args, **kwargs) <NEW_LINE> <DEDENT> def recon_loss(self, z, pos_edge_index, neg_edge_index=None): <NEW_LINE> <INDENT> pos_loss = -torch.log( self.decoder(z, pos_edge_index, sigmoid=True) + EPS).mean() <NEW_LINE> pos_edge_index, _ = remove_self_loops(pos_edge_index) <NEW_LINE> pos_edge_index, _ = add_self_loops(pos_edge_index) <NEW_LINE> if neg_edge_index is None: <NEW_LINE> <INDENT> neg_edge_index = negative_sampling(pos_edge_index, z.size(0)) <NEW_LINE> <DEDENT> neg_loss = -torch.log(1 - self.decoder(z, neg_edge_index, sigmoid=True) + EPS).mean() <NEW_LINE> return pos_loss + neg_loss <NEW_LINE> <DEDENT> def test(self, z, pos_edge_index, neg_edge_index): <NEW_LINE> <INDENT> from sklearn.metrics import average_precision_score, roc_auc_score <NEW_LINE> pos_y = z.new_ones(pos_edge_index.size(1)) <NEW_LINE> neg_y = z.new_zeros(neg_edge_index.size(1)) <NEW_LINE> y = torch.cat([pos_y, neg_y], dim=0) <NEW_LINE> pos_pred = self.decoder(z, pos_edge_index, sigmoid=True) <NEW_LINE> neg_pred = self.decoder(z, neg_edge_index, sigmoid=True) <NEW_LINE> pred = torch.cat([pos_pred, neg_pred], dim=0) <NEW_LINE> y, pred = y.detach().cpu().numpy(), pred.detach().cpu().numpy() <NEW_LINE> return roc_auc_score(y, pred), average_precision_score(y, pred)
The Graph Auto-Encoder model from the `"Variational Graph Auto-Encoders" <https://arxiv.org/abs/1611.07308>`_ paper based on user-defined encoder and decoder models. Args: encoder (Module): The encoder module. decoder (Module, optional): The decoder module. If set to :obj:`None`, will default to the :class:`torch_geometric.nn.models.InnerProductDecoder`. (default: :obj:`None`)
6259905f9c8ee82313040cca
class FeedbackThreadModel(base_models.BaseModel): <NEW_LINE> <INDENT> exploration_id = ndb.StringProperty(required=True, indexed=True) <NEW_LINE> state_name = ndb.StringProperty(indexed=True) <NEW_LINE> original_author_id = ndb.StringProperty(indexed=True) <NEW_LINE> status = ndb.StringProperty( default=STATUS_CHOICES_OPEN, choices=STATUS_CHOICES, required=True, indexed=True, ) <NEW_LINE> subject = ndb.StringProperty(indexed=False) <NEW_LINE> summary = ndb.TextProperty(indexed=False) <NEW_LINE> has_suggestion = ndb.BooleanProperty(indexed=True, default=False) <NEW_LINE> message_count = ndb.IntegerProperty(indexed=True) <NEW_LINE> last_updated = ndb.DateTimeProperty(indexed=True) <NEW_LINE> def put(self, update_last_updated_time=True): <NEW_LINE> <INDENT> if update_last_updated_time: <NEW_LINE> <INDENT> self.last_updated = datetime.datetime.utcnow() <NEW_LINE> <DEDENT> return super(FeedbackThreadModel, self).put() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def generate_new_thread_id(cls, exploration_id): <NEW_LINE> <INDENT> for _ in range(_MAX_RETRIES): <NEW_LINE> <INDENT> thread_id = ( utils.base64_from_int(utils.get_current_time_in_millisecs()) + utils.base64_from_int(utils.get_random_int(_RAND_RANGE))) <NEW_LINE> if not cls.get_by_exp_and_thread_id(exploration_id, thread_id): <NEW_LINE> <INDENT> return thread_id <NEW_LINE> <DEDENT> <DEDENT> raise Exception( 'New thread id generator is producing too many collisions.') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def generate_full_thread_id(cls, exploration_id, thread_id): <NEW_LINE> <INDENT> return '.'.join([exploration_id, thread_id]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_exploration_and_thread_ids(cls, full_thread_ids): <NEW_LINE> <INDENT> exploration_and_thread_ids = ( [thread_id.split('.') for thread_id in full_thread_ids]) <NEW_LINE> return zip(*exploration_and_thread_ids) <NEW_LINE> <DEDENT> @property <NEW_LINE> def thread_id(self): <NEW_LINE> <INDENT> return self.id.split('.')[1] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls, exploration_id, thread_id): <NEW_LINE> <INDENT> instance_id = cls.generate_full_thread_id(exploration_id, thread_id) <NEW_LINE> if cls.get_by_id(instance_id): <NEW_LINE> <INDENT> raise Exception('Feedback thread ID conflict on create.') <NEW_LINE> <DEDENT> return cls(id=instance_id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_by_exp_and_thread_id(cls, exploration_id, thread_id): <NEW_LINE> <INDENT> return cls.get_by_id(cls.generate_full_thread_id( exploration_id, thread_id)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_threads(cls, exploration_id, limit=feconf.DEFAULT_QUERY_LIMIT): <NEW_LINE> <INDENT> return cls.get_all().filter( cls.exploration_id == exploration_id).order( cls.last_updated).fetch(limit)
Threads for each exploration. The id of instances of this class has the form [EXPLORATION_ID].[THREAD_ID]
6259905f498bea3a75a5913e
class TestPatchUser: <NEW_LINE> <INDENT> resource = 'users' <NEW_LINE> @pytest.allure.severity(pytest.allure.severity_level.CRITICAL) <NEW_LINE> @pytest.mark.parametrize('data', [ {'job': 'worker'} ]) <NEW_LINE> @pytest.mark.parametrize('resource_id', [1]) <NEW_LINE> def test_update(self, base_request, data, resource_id): <NEW_LINE> <INDENT> response = base_request.method(method_name='PATCH', resource=self.resource, data=data, msg='User is updated (200)') <NEW_LINE> assert response.status_code == 200 <NEW_LINE> assert response.json()['job'] == data['job'] and 'updatedAt' in response.json()
Testing PATCH method.
6259905f1f037a2d8b9e53ab
class Action: <NEW_LINE> <INDENT> APPLY = 1 <NEW_LINE> REVERT = 2 <NEW_LINE> CLEAR_CACHE = 3 <NEW_LINE> @staticmethod <NEW_LINE> def choices(): <NEW_LINE> <INDENT> choices = {} <NEW_LINE> for choice in Action.__dict__: <NEW_LINE> <INDENT> if hasattr(Action, choice): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(getattr(Action, choice)) <NEW_LINE> choices[value] = choice <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return choices
Different possible actions
6259905f1b99ca4002290076
class ExplorationMigrationJobManager(jobs.BaseMapReduceOneOffJobManager): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def entity_classes_to_map_over(cls): <NEW_LINE> <INDENT> return [exp_models.ExplorationModel] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def map(item): <NEW_LINE> <INDENT> if item.deleted: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> old_exploration = exp_services.get_exploration_by_id(item.id) <NEW_LINE> try: <NEW_LINE> <INDENT> old_exploration.validate() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error( 'Exploration %s failed non-strict validation: %s' % (item.id, e)) <NEW_LINE> return <NEW_LINE> <DEDENT> if (item.states_schema_version != feconf.CURRENT_EXPLORATION_STATES_SCHEMA_VERSION): <NEW_LINE> <INDENT> commit_cmds = [exp_domain.ExplorationChange({ 'cmd': exp_domain.CMD_MIGRATE_STATES_SCHEMA_TO_LATEST_VERSION, 'from_version': str(item.states_schema_version), 'to_version': str( feconf.CURRENT_EXPLORATION_STATES_SCHEMA_VERSION) })] <NEW_LINE> exp_services.update_exploration( feconf.MIGRATION_BOT_USERNAME, item.id, commit_cmds, 'Update exploration states from schema version %d to %d.' % ( item.states_schema_version, feconf.CURRENT_EXPLORATION_STATES_SCHEMA_VERSION)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def reduce(key, values): <NEW_LINE> <INDENT> yield (key, values)
A reusable one-time job that may be used to migrate exploration schema versions. This job will load all existing explorations from the data store and immediately store them back into the data store. The loading process of an exploration in exp_services automatically performs schema updating. This job persists that conversion work, keeping explorations up-to-date and improving the load time of new explorations.
6259905fa79ad1619776b5fd
class DiceCoefficient: <NEW_LINE> <INDENT> def __init__(self, epsilon=1e-6, **kwargs): <NEW_LINE> <INDENT> self.epsilon = epsilon <NEW_LINE> <DEDENT> def __call__(self, input, target): <NEW_LINE> <INDENT> if isinstance(input, torch.Tensor): <NEW_LINE> <INDENT> input = (input > 0.0).long() <NEW_LINE> target = (target > 0.0).long() <NEW_LINE> return torch.mean(compute_per_channel_dice(input, target, epsilon=self.epsilon)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target = (target > 0.0).long() <NEW_LINE> alldice = [] <NEW_LINE> for i, aseg in enumerate(input): <NEW_LINE> <INDENT> aseg = (aseg > 0).long() <NEW_LINE> alldice.append(compute_per_channel_dice( aseg, target[:, i:i+1, :, :, :], epsilon=self.epsilon)) <NEW_LINE> <DEDENT> return torch.mean(torch.stack(alldice, -1))
Computes Dice Coefficient. Generalized to multiple channels by computing per-channel Dice Score (as described in https://arxiv.org/pdf/1707.03237.pdf) and theTn simply taking the average. Input is expected to be probabilities instead of logits. This metric is mostly useful when channels contain the same semantic class (e.g. affinities computed with different offsets). DO NOT USE this metric when training with DiceLoss, otherwise the results will be biased towards the loss.
6259905ff7d966606f7493f9
class NoiseOnlyPosterior(Posterior): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(NoiseOnlyPosterior, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def header(self): <NEW_LINE> <INDENT> header = [] <NEW_LINE> for d in self.detectors: <NEW_LINE> <INDENT> for i in range(self.npsdfit): <NEW_LINE> <INDENT> header.append('{0:s}PSD{1:02d}'.format(d,i)) <NEW_LINE> <DEDENT> <DEDENT> return ' '.join(header) <NEW_LINE> <DEDENT> @property <NEW_LINE> def no_nparams(self): <NEW_LINE> <INDENT> return self.ndetectors*self.npsdfit <NEW_LINE> <DEDENT> def to_params(self, params): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return params.view([('psdfit', np.float, (self.ndetectors, self.npsdfit))]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return super(NoiseOnlyPosterior, self).to_params(params) <NEW_LINE> <DEDENT> <DEDENT> def generate_waveform(self, params): <NEW_LINE> <INDENT> if params.view(float).shape[0] == self.no_nparams: <NEW_LINE> <INDENT> hs = [] <NEW_LINE> for d in self.data: <NEW_LINE> <INDENT> hs.append(0.0*d) <NEW_LINE> <DEDENT> return hs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(NoiseOnlyPosterior, self).generate_waveform(params) <NEW_LINE> <DEDENT> <DEDENT> def log_prior(self, params): <NEW_LINE> <INDENT> return np.sum(u.norm_logpdf(params)) <NEW_LINE> <DEDENT> def draw_prior(self, shape=(1,)): <NEW_LINE> <INDENT> return self.to_params(np.random.normal(size=shape+(self.ndetectors*self.npsdfit,)))
Represents the posterior for a noise-only model.
6259905f462c4b4f79dbd086
class UserDefinedField(atom_core.XmlElement): <NEW_LINE> <INDENT> _qname = CONTACTS_TEMPLATE % 'userDefinedField' <NEW_LINE> key = 'key' <NEW_LINE> value = 'value'
Represents an arbitrary key-value pair attached to the contact.
6259905f38b623060ffaa390
class ElementError(SpecterError): <NEW_LINE> <INDENT> pass
Error raised when Specter is unable to find an element.
6259905fd53ae8145f919ae3
class Player(): <NEW_LINE> <INDENT> def __init__(self, name:str, initial_hand:list): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.hand = initial_hand <NEW_LINE> <DEDENT> def draw(self, deck:list): <NEW_LINE> <INDENT> self.hand.append(deck.pop())
Player object containing name and hand (list of cards) Draw function adds another card to the hand
6259905fd268445f2663a69d
class Expander(object): <NEW_LINE> <INDENT> def __init__(self, searcher, fieldname, model = Bo1Model): <NEW_LINE> <INDENT> self.fieldname = fieldname <NEW_LINE> if callable(model): <NEW_LINE> <INDENT> model = model(searcher, fieldname) <NEW_LINE> <DEDENT> self.model = model <NEW_LINE> term_reader = searcher.term_reader <NEW_LINE> self.collection_freq = dict((word, freq) for word, _, freq in term_reader.iter_field(fieldname)) <NEW_LINE> self.topN_weight = defaultdict(float) <NEW_LINE> self.top_total = 0 <NEW_LINE> <DEDENT> def add(self, vector): <NEW_LINE> <INDENT> total_weight = 0 <NEW_LINE> topN_weight = self.topN_weight <NEW_LINE> for word, weight in vector: <NEW_LINE> <INDENT> total_weight += weight <NEW_LINE> topN_weight[word] += weight <NEW_LINE> <DEDENT> self.top_total += total_weight <NEW_LINE> <DEDENT> def expanded_terms(self, number, normalize = True): <NEW_LINE> <INDENT> model = self.model <NEW_LINE> tlist = [] <NEW_LINE> maxweight = 0 <NEW_LINE> collection_freq = self.collection_freq <NEW_LINE> for word, weight in self.topN_weight.iteritems(): <NEW_LINE> <INDENT> score = model.score(weight, collection_freq[word], self.top_total) <NEW_LINE> if score > maxweight: maxweight = score <NEW_LINE> tlist.append((score, word)) <NEW_LINE> <DEDENT> if normalize: <NEW_LINE> <INDENT> norm = model.normalizer(maxweight, self.top_total) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> norm = maxweight <NEW_LINE> <DEDENT> tlist = [(weight / norm, t) for weight, t in tlist] <NEW_LINE> tlist.sort(reverse = True) <NEW_LINE> return [(t, weight) for weight, t in tlist[:number]]
Uses an ExpansionModel to expand the set of query terms based on the top N result documents.
6259905ff548e778e596cc0a
class Continuous_Encoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, cont_dim, out_dim): <NEW_LINE> <INDENT> self.name = 'Continuous_Encoder' <NEW_LINE> super(Continuous_Encoder, self).__init__() <NEW_LINE> self.cont_dim = cont_dim <NEW_LINE> self.h_dim = 10 <NEW_LINE> self.out_dim = out_dim <NEW_LINE> self.h = FC_Block(self.cont_dim, self.h_dim) <NEW_LINE> self.fc = FC_Block(self.h_dim, self.out_dim) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.h(x) <NEW_LINE> x = self.fc(x) <NEW_LINE> return x
Model for encoding continuous features Args: cont_dim (int): dimension of input continuous features out_dim (int): outpit dimension of the encoded features
6259905f442bda511e95d89a
class Department(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200) <NEW_LINE> address = models.CharField(max_length=400) <NEW_LINE> website = models.URLField(max_length=200, blank=True) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('hr:department-detail', kwargs={'pk': self.pk}) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Отдел или подразделение предприятия
6259905f99cbb53fe6832563
class GalleryImageList(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'required': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[GalleryImage]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: List["GalleryImage"], next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(GalleryImageList, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link
The List Gallery Images operation response. All required parameters must be populated in order to send to Azure. :ivar value: Required. A list of Shared Image Gallery images. :vartype value: list[~azure.mgmt.compute.v2020_09_30.models.GalleryImage] :ivar next_link: The uri to fetch the next page of Image Definitions in the Shared Image Gallery. Call ListNext() with this to fetch the next page of gallery image definitions. :vartype next_link: str
6259905f3539df3088ecd91e
class TreatmentEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, AttributeTreatment): <NEW_LINE> <INDENT> return AttributeTreatmentEncoder().default(obj) <NEW_LINE> <DEDENT> if isinstance(obj, EntityTreatment): <NEW_LINE> <INDENT> return EntityTreatmentEncoder().default(obj) <NEW_LINE> <DEDENT> return super().default(obj)
A JSONEncoder to serialize an instance of {Treatment}.
6259905fcb5e8a47e493ccc6
class BaseRegistry(object): <NEW_LINE> <INDENT> def __init__(self, storage={}): <NEW_LINE> <INDENT> self._collectors = {} <NEW_LINE> self._storage = storage <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage(self): <NEW_LINE> <INDENT> return self._storage <NEW_LINE> <DEDENT> def register(self, collector): <NEW_LINE> <INDENT> if collector.uid in self._collectors: <NEW_LINE> <INDENT> raise RuntimeError(u"Collector {0} already registered".format(collector.uid)) <NEW_LINE> <DEDENT> self._collectors[collector.uid] = collector <NEW_LINE> <DEDENT> def unregister(self, collector): <NEW_LINE> <INDENT> self._collectors.pop(collector.uid, None) <NEW_LINE> <DEDENT> def collect(self, clean=True): <NEW_LINE> <INDENT> data = dict(self._storage.items()) <NEW_LINE> for uid, collector in self.collectors(): <NEW_LINE> <INDENT> if clean and hasattr(collector, 'clear_samples'): <NEW_LINE> <INDENT> collector.clear_samples() <NEW_LINE> <DEDENT> if hasattr(collector, 'collect'): <NEW_LINE> <INDENT> for item in collector.collect(): <NEW_LINE> <INDENT> yield item <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yield collector.build_samples(data.get(collector.name, [])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def collectors(self): <NEW_LINE> <INDENT> return self._collectors.items() <NEW_LINE> <DEDENT> def is_registered(self, collector): <NEW_LINE> <INDENT> return collector.uid in self._collectors <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._collectors) <NEW_LINE> <DEDENT> def get_samples(self): <NEW_LINE> <INDENT> for collector in self.collect(): <NEW_LINE> <INDENT> yield collector, collector.get_samples()
Link with metrics collectors
6259905f29b78933be26ac05
class FileSystemList(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'filesystems': {'key': 'filesystems', 'type': '[FileSystem]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(FileSystemList, self).__init__(**kwargs) <NEW_LINE> self.filesystems = kwargs.get('filesystems', None)
FileSystemList. :ivar filesystems: :vartype filesystems: list[~azure.storage.filedatalake.models.FileSystem]
6259905fadb09d7d5dc0bbec
class TestXyzToPlottingColourspace(unittest.TestCase): <NEW_LINE> <INDENT> def test_XYZ_to_plotting_colourspace(self): <NEW_LINE> <INDENT> XYZ = np.random.random(3) <NEW_LINE> np.testing.assert_almost_equal( XYZ_to_sRGB(XYZ), XYZ_to_plotting_colourspace(XYZ), decimal=7 )
Define :func:`colour.plotting.common.XYZ_to_plotting_colourspace` definition unit tests methods.
6259905f3c8af77a43b68a82
class Bassoon(Instrument): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__( self, instrument_name='bassoon', short_instrument_name='bsn.', instrument_name_markup=None, short_instrument_name_markup=None, allowable_clefs=('bass', 'tenor'), pitch_range='[Bb1, Eb5]', sounding_pitch_of_written_middle_c=None, ): <NEW_LINE> <INDENT> Instrument.__init__( self, instrument_name=instrument_name, short_instrument_name=short_instrument_name, instrument_name_markup=instrument_name_markup, short_instrument_name_markup=short_instrument_name_markup, allowable_clefs=allowable_clefs, pitch_range=pitch_range, sounding_pitch_of_written_middle_c= sounding_pitch_of_written_middle_c, ) <NEW_LINE> self._performer_names.extend([ 'wind player', 'reed player', 'double reed player', 'bassoonist', ]) <NEW_LINE> self._starting_clefs = indicatortools.ClefInventory(['bass']) <NEW_LINE> self._is_primary_instrument = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def allowable_clefs(self): <NEW_LINE> <INDENT> return Instrument.allowable_clefs.fget(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def instrument_name(self): <NEW_LINE> <INDENT> return Instrument.instrument_name.fget(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def instrument_name_markup(self): <NEW_LINE> <INDENT> return Instrument.instrument_name_markup.fget(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pitch_range(self): <NEW_LINE> <INDENT> return Instrument.pitch_range.fget(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def short_instrument_name(self): <NEW_LINE> <INDENT> return Instrument.short_instrument_name.fget(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def short_instrument_name_markup(self): <NEW_LINE> <INDENT> return Instrument.short_instrument_name_markup.fget(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sounding_pitch_of_written_middle_c(self): <NEW_LINE> <INDENT> return Instrument.sounding_pitch_of_written_middle_c.fget(self)
A bassoon. :: >>> staff = Staff("c'4 d'4 e'4 fs'4") >>> clef = Clef(name='bass') >>> attach(clef, staff) >>> bassoon = instrumenttools.Bassoon() >>> attach(bassoon, staff) >>> show(staff) # doctest: +SKIP .. doctest:: >>> print(format(staff)) \new Staff { \clef "bass" \set Staff.instrumentName = \markup { Bassoon } \set Staff.shortInstrumentName = \markup { Bsn. } c'4 d'4 e'4 fs'4 }
6259905f3cc13d1c6d466dc3
class TestLbSourceIpPersistenceProfile(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testLbSourceIpPersistenceProfile(self): <NEW_LINE> <INDENT> pass
LbSourceIpPersistenceProfile unit test stubs
6259905f7d847024c075da56
class EventHandlers: <NEW_LINE> <INDENT> def process_review_event(self, request: ReviewEvent) -> EventResponse: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def process_push_event(self, request: PushEvent) -> EventResponse: <NEW_LINE> <INDENT> raise NotImplementedError
Interface of the classes which process Lookout gRPC events.
6259905f4428ac0f6e659bbb
class ScheduleTicketSchema(Schema): <NEW_LINE> <INDENT> schema = [{ "$schema": "http://json-schema.org/draft-04/schema#", "title": "schedule_tickets", "description": "Запись на прием", "type": "object", "properties": { "schedule_id": { "description": "id рабочего промежутка", "type": "string" }, "schedule_time_begin": { "description": "Время начала промежутка", "type": "string", "pattern": "^([0-9]|0[0-9]|1[0-9]|2[0-3]):([0-5][0-9])$" }, "schedule_ticket_id": { "description": "id записи на прием", "type": "string" }, "hospital": { "description": "ЛПУ (код ЛПУ)", "type": "string" }, "doctor": { "description": "Врач (код врача)", "type": "string" }, "patient": { "description": "Пациент (id пациента)", "type": "string" }, "date": { "description": "Дата приема", "type": "string", "format": "date" }, "time_begin": { "description": "Время приема", "type": "string", "pattern": "^([0-9]|0[0-9]|1[0-9]|2[0-3]):([0-5][0-9])$" }, "time_end": { "description": "Время приема", "type": "string", "pattern": "^([0-9]|0[0-9]|1[0-9]|2[0-3]):([0-5][0-9])$" }, "schedule_ticket_type": { "description": "Тип записи на прием", "type": "string" } }, "required": ["hospital","doctor","patient","date","schedule_ticket_type"] }]
Схемы для проверки валидности данных организаций и лпу
6259905f1f037a2d8b9e53ac
class ProcessDataThread(Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> self.name = "Binary Codec Process Data Thread" <NEW_LINE> self.alive = True <NEW_LINE> self.MAX_TIMEOUT = 5 <NEW_LINE> self.timeout = 0 <NEW_LINE> self.DELIMITER = b'\x80' * 16 <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> self.alive = False <NEW_LINE> with global_condition: <NEW_LINE> <INDENT> global_condition.notify() <NEW_LINE> <DEDENT> if self.is_alive(): <NEW_LINE> <INDENT> self.join() <NEW_LINE> <DEDENT> <DEDENT> @event <NEW_LINE> def ensemble_event(self, ens): <NEW_LINE> <INDENT> if ens.IsEnsembleData: <NEW_LINE> <INDENT> logging.debug(str(ens.EnsembleData.EnsembleNumber)) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> global buffer <NEW_LINE> while self.alive: <NEW_LINE> <INDENT> with global_condition: <NEW_LINE> <INDENT> if self.DELIMITER in buffer: <NEW_LINE> <INDENT> chunks = buffer.split(self.DELIMITER) <NEW_LINE> buffer = chunks.pop() <NEW_LINE> for chunk in chunks: <NEW_LINE> <INDENT> self.verify_and_decode(self.DELIMITER + chunk) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def verify_and_decode(self, ens_bin): <NEW_LINE> <INDENT> if BinaryCodec.verify_ens_data(ens_bin): <NEW_LINE> <INDENT> ens = BinaryCodec.decode_data_sets(ens_bin) <NEW_LINE> if ens: <NEW_LINE> <INDENT> self.ensemble_event(ens)
Process the incoming data. This will take the shared buffer. The AddDataThread will wakeup this thread with "condition". It will then process the incoming data in the buffer and look for ensemble data. When ensemble data is decoded it will passed to the subscribers of the event "ensemble_event".
6259905fa79ad1619776b5fe
class NetworkRuleCondition(FirewallPolicyRuleCondition): <NEW_LINE> <INDENT> _validation = { 'rule_condition_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'rule_condition_type': {'key': 'ruleConditionType', 'type': 'str'}, 'ip_protocols': {'key': 'ipProtocols', 'type': '[str]'}, 'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'}, 'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'}, 'destination_ports': {'key': 'destinationPorts', 'type': '[str]'}, } <NEW_LINE> def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, ip_protocols: Optional[List[Union[str, "FirewallPolicyRuleConditionNetworkProtocol"]]] = None, source_addresses: Optional[List[str]] = None, destination_addresses: Optional[List[str]] = None, destination_ports: Optional[List[str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(NetworkRuleCondition, self).__init__(name=name, description=description, **kwargs) <NEW_LINE> self.rule_condition_type = 'NetworkRuleCondition' <NEW_LINE> self.ip_protocols = ip_protocols <NEW_LINE> self.source_addresses = source_addresses <NEW_LINE> self.destination_addresses = destination_addresses <NEW_LINE> self.destination_ports = destination_ports
Rule condition of type network. All required parameters must be populated in order to send to Azure. :param name: Name of the rule condition. :type name: str :param description: Description of the rule condition. :type description: str :param rule_condition_type: Required. Rule Condition Type.Constant filled by server. Possible values include: "ApplicationRuleCondition", "NetworkRuleCondition". :type rule_condition_type: str or ~azure.mgmt.network.v2019_11_01.models.FirewallPolicyRuleConditionType :param ip_protocols: Array of FirewallPolicyRuleConditionNetworkProtocols. :type ip_protocols: list[str or ~azure.mgmt.network.v2019_11_01.models.FirewallPolicyRuleConditionNetworkProtocol] :param source_addresses: List of source IP addresses for this rule. :type source_addresses: list[str] :param destination_addresses: List of destination IP addresses or Service Tags. :type destination_addresses: list[str] :param destination_ports: List of destination ports. :type destination_ports: list[str]
6259905f32920d7e50bc76c8
class PinPWMUnsupported(PinPWMError, AttributeError): <NEW_LINE> <INDENT> pass
Error raised when attempting to activate PWM on unsupported pins
6259905f8da39b475be0486a
class Super4DigitArbitraryFarhiModel1(Super4DigitShareFrontQFCModel1): <NEW_LINE> <INDENT> class QLayer(Super4DigitShareFrontQFCModel1.QLayer): <NEW_LINE> <INDENT> def __init__(self, arch: dict = None): <NEW_LINE> <INDENT> super().__init__(arch=arch) <NEW_LINE> <DEDENT> def build_super_layers(self): <NEW_LINE> <INDENT> super_layers_all = tq.QuantumModuleList() <NEW_LINE> for k in range(self.arch['n_blocks']): <NEW_LINE> <INDENT> super_layers_all.append( tq.Super2QAllLayer( op=tq.RZX, n_wires=self.n_wires, has_params=True, trainable=True, jump=1, circular=True)) <NEW_LINE> super_layers_all.append( tq.Super2QAllLayer( op=tq.RXX, n_wires=self.n_wires, has_params=True, trainable=True, jump=1, circular=True)) <NEW_LINE> <DEDENT> return super_layers_all
zx and xx blocks arbitrary n gates, from Farhi paper https://arxiv.org/pdf/1802.06002.pdf
6259905f0a50d4780f706900
@base.ReleaseTracks(base.ReleaseTrack.BETA) <NEW_LINE> class CreateBeta(Create): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> Create.disks_arg = disks_flags.MakeDiskArg(plural=True) <NEW_LINE> _CommonArgs(parser, disks_flags.SOURCE_SNAPSHOT_ARG) <NEW_LINE> labels_util.AddCreateLabelsFlags(parser)
Create Google Compute Engine persistent disks.
6259905f462c4b4f79dbd088
class PytPingPortConfigError(PytPingError): <NEW_LINE> <INDENT> def __init__(self, port): <NEW_LINE> <INDENT> super().__init__("Port \"{}\" not valid!".format(port))
Error: the port is not validated.
6259905f0fa83653e46f656a
class ScalarResult: <NEW_LINE> <INDENT> def __init__(self, result, analysis_case): <NEW_LINE> <INDENT> self.result = result <NEW_LINE> self.analysis_case = analysis_case
Class for storing a scalar result for a specific analysis case. :cvar float result: Scalar result :cvar analysis_case: Analysis case relating to the result :vartype analysis_case: :class:`~feastruct.fea.cases.AnalysisCase`
6259905f56b00c62f0fb3f4f
class AuthenticationBackend(BasePlugin): <NEW_LINE> <INDENT> def authenticate(self, username, password): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def member_of(self): <NEW_LINE> <INDENT> user = User.from_session() <NEW_LINE> if user: <NEW_LINE> <INDENT> return user.groups <NEW_LINE> <DEDENT> return [] <NEW_LINE> <DEDENT> def is_logged_in(self): <NEW_LINE> <INDENT> user = User.from_session() <NEW_LINE> if user: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def login(self, user): <NEW_LINE> <INDENT> session['user'] = user.as_json() <NEW_LINE> <DEDENT> def logout(self): <NEW_LINE> <INDENT> session.clear() <NEW_LINE> <DEDENT> def get_forbidden_url(self): <NEW_LINE> <INDENT> return url_for('auth.forbidden_view') <NEW_LINE> <DEDENT> def get_login_url(self, path): <NEW_LINE> <INDENT> base = url_for('auth.login_view') <NEW_LINE> if path: <NEW_LINE> <INDENT> return '{}?{}'.format(base, urllib.urlencode({'url': path})) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return base
All authentication backends should subclass this. only one important method to override: authenticate(username, password) -> returns either a user details dictionary containing a 'username' key and possibly other info or - return None in case authentication failed.
6259905f8e71fb1e983bd14e
class SMEAdminGroup(ModelAdminGroup): <NEW_LINE> <INDENT> menu_label = "SMEAdmin" <NEW_LINE> items = (CarnetDAdresseAdmin,EnveloppeView, LettreView)
SME Admin menu
6259905fa8370b77170f1a51
class PermissionDeniedError(BookIOErrors): <NEW_LINE> <INDENT> pass
операция не позволяется
6259905f7b25080760ed8822
class bit_field_wrapper_t( code_creator.code_creator_t , declaration_based.declaration_based_t ): <NEW_LINE> <INDENT> indent = code_creator.code_creator_t.indent <NEW_LINE> GET_TEMPLATE =os.linesep.join([ 'static %(type)s get_%(name)s(%(cls_type)s inst ){' , indent( 'return inst.%(name)s;' ) , '}' , '' ]) <NEW_LINE> SET_TEMPLATE = os.linesep.join([ 'static void set_%(name)s( %(cls_type)s inst, %(type)s new_value ){ ' , indent( 'inst.%(name)s = new_value;' ) , '}' , '' ]) <NEW_LINE> def __init__(self, variable ): <NEW_LINE> <INDENT> code_creator.code_creator_t.__init__( self ) <NEW_LINE> declaration_based.declaration_based_t.__init__( self, declaration=variable) <NEW_LINE> <DEDENT> def _get_getter_full_name(self): <NEW_LINE> <INDENT> return self.parent.full_name + '::' + 'get_' + self.declaration.name <NEW_LINE> <DEDENT> getter_full_name = property( _get_getter_full_name ) <NEW_LINE> def inst_arg_type( self, has_const ): <NEW_LINE> <INDENT> inst_arg_type = declarations.declarated_t( self.declaration.parent ) <NEW_LINE> if has_const: <NEW_LINE> <INDENT> inst_arg_type = declarations.const_t(inst_arg_type) <NEW_LINE> <DEDENT> inst_arg_type = declarations.reference_t(inst_arg_type) <NEW_LINE> return inst_arg_type <NEW_LINE> <DEDENT> def _get_getter_type(self): <NEW_LINE> <INDENT> return declarations.free_function_type_t.create_decl_string( return_type=self.declaration.decl_type , arguments_types=[ self.inst_arg_type(True) ] , with_defaults=False) <NEW_LINE> <DEDENT> getter_type = property( _get_getter_type ) <NEW_LINE> def _get_setter_full_name(self): <NEW_LINE> <INDENT> return self.parent.full_name + '::' + 'set_' + self.declaration.name <NEW_LINE> <DEDENT> setter_full_name = property(_get_setter_full_name) <NEW_LINE> def _get_setter_type(self): <NEW_LINE> <INDENT> return declarations.free_function_type_t.create_decl_string( return_type=declarations.void_t() , arguments_types=[ self.inst_arg_type(False), self.declaration.decl_type ] , with_defaults=False) <NEW_LINE> <DEDENT> setter_type = property( _get_setter_type ) <NEW_LINE> def _get_has_setter( self ): <NEW_LINE> <INDENT> return not declarations.is_const( self.declaration.decl_type ) <NEW_LINE> <DEDENT> has_setter = property( _get_has_setter ) <NEW_LINE> def _create_impl(self): <NEW_LINE> <INDENT> answer = [] <NEW_LINE> answer.append( self.GET_TEMPLATE % { 'type' : self.declaration.decl_type.decl_string , 'name' : self.declaration.name , 'cls_type' : self.inst_arg_type( has_const=True ) }) <NEW_LINE> if self.has_setter: <NEW_LINE> <INDENT> answer.append( self.SET_TEMPLATE % { 'type' : self.declaration.decl_type.decl_string , 'name' : self.declaration.name , 'cls_type' : self.inst_arg_type( has_const=False ) }) <NEW_LINE> <DEDENT> return os.linesep.join( answer ) <NEW_LINE> <DEDENT> def _get_system_files_impl( self ): <NEW_LINE> <INDENT> return []
creates get/set accessors for bit fields
6259905f1f5feb6acb16426e
class Button(Drawable): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def __init__(self, position, width, height, text): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> super().__init__(position, width, height, True, "Button.png") <NEW_LINE> self.text = text <NEW_LINE> font = pygame.font.Font("freesansbold.ttf", 32) <NEW_LINE> txtColor = (150,150,150) <NEW_LINE> self.surface.blit(font.render(text, True,txtColor), (13,8)) <NEW_LINE> <DEDENT> def event(self, coord): <NEW_LINE> <INDENT> if self.surface.get_rect().collidepoint(coord[0]-self.x, coord[1]-self.y): <NEW_LINE> <INDENT> self.eventAction() <NEW_LINE> <DEDENT> <DEDENT> @abc.abstractmethod <NEW_LINE> def eventAction(self): <NEW_LINE> <INDENT> pass
*Creates a button in at a given position with a given size *Button has the text passed in written on it (centered)
6259905f4428ac0f6e659bbd
class Experience(database.Model): <NEW_LINE> <INDENT> def __init__(self, id_, name_): <NEW_LINE> <INDENT> self.id = id_ <NEW_LINE> self.name = name_ <NEW_LINE> <DEDENT> __tablename__ = 'experiences' <NEW_LINE> id = database.Column(database.Integer, primary_key=True) <NEW_LINE> name = database.Column(database.String(64), unique=True) <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, self.__class__): <NEW_LINE> <INDENT> return self.id == other.id and self.name == other.name <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Experience \'{}\', id: {}>'.format(self.name, self.id)
Values of this table: Könnyű, Közepes, Közepesen Nehéz, Nehéz
6259905f23e79379d538db80
class SaltApi: <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.username = "saltapi" <NEW_LINE> self.password = "Xdhg002539" <NEW_LINE> self.headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36", "Content-type": "application/json" } <NEW_LINE> self.params = {'client': 'local', 'fun': '', 'tgt': ''} <NEW_LINE> self.login_url = salt_api + "login" <NEW_LINE> self.login_params = {'username': self.username, 'password': self.password, 'eauth': 'pam'} <NEW_LINE> self.token = self.get_data(self.login_url, self.login_params)['token'] <NEW_LINE> self.headers['X-Auth-Token'] = self.token <NEW_LINE> <DEDENT> def get_data(self, url, params): <NEW_LINE> <INDENT> send_data = json.dumps(params) <NEW_LINE> request = requests.post(url, data=send_data, headers=self.headers, verify=False) <NEW_LINE> if request.status_code != 200: <NEW_LINE> <INDENT> logger.error("salt服务" + salt_api + "连接失败。 http code: " + str(request.status_code)) <NEW_LINE> print("salt服务" + salt_api + "连接失败。 http code: " + str(request.status_code)) <NEW_LINE> <DEDENT> assert request.status_code == 200 <NEW_LINE> response = request.json() <NEW_LINE> result = dict(response) <NEW_LINE> return result['return'][0] <NEW_LINE> <DEDENT> def salt_command(self, tgt, method, arg=None): <NEW_LINE> <INDENT> if arg: <NEW_LINE> <INDENT> params = {'client': 'local', 'fun': method, 'tgt': tgt, 'arg': arg} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params = {'client': 'local', 'fun': method, 'tgt': tgt} <NEW_LINE> <DEDENT> result = self.get_data(self.url, params) <NEW_LINE> return result
定义salt api接口的类 初始化获得token
6259905f91af0d3eaad3b4ac
class Listener(Thread): <NEW_LINE> <INDENT> instance = None <NEW_LINE> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if cls.instance is None: <NEW_LINE> <INDENT> cls.instance = object.__new__(cls, *args) <NEW_LINE> Thread.__init__(cls.instance) <NEW_LINE> cls.instance.running = True <NEW_LINE> <DEDENT> return cls.instance <NEW_LINE> <DEDENT> def __init__(self, service, matchMessageType=None, matchSchemaClass=None, matchSchemaType=None): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> self.service = service <NEW_LINE> self.running = True <NEW_LINE> self.matchMessageType = matchMessageType <NEW_LINE> self.matchSchemaClass = matchSchemaClass <NEW_LINE> self.matchSchemaType = matchSchemaType <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> logging.debug("xpllistener: thread started") <NEW_LINE> while self.running: <NEW_LINE> <INDENT> data = self.service.net.read() <NEW_LINE> logging.debug("xpllistener: message received") <NEW_LINE> msg = Message() <NEW_LINE> msg.parse(data) <NEW_LINE> if str(msg.source) != str(self.service.source): <NEW_LINE> <INDENT> if ((msg.type == self.matchMessageType) or (self.matchMessageType == MsgType.xPL_ANY) or (self.matchMessageType is None)) and ((msg.schema.sclass == self.matchSchemaClass) or (self.matchSchemaClass is None)) and ((msg.schema.stype == self.matchSchemaType) or (self.matchSchemaType is None)): <NEW_LINE> <INDENT> self.service.receive(msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.running = False <NEW_LINE> self.close()
Asynchronous listener. Sends filtered messages to parent service.
6259905f24f1403a92686410
class Post(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> title = db.Column(db.Text, nullable=False) <NEW_LINE> content = db.Column(db.Text) <NEW_LINE> date = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> @staticmethod <NEW_LINE> def page(number=10, offset=0): <NEW_LINE> <INDENT> p = Post.query.order_by(desc(Post.date)).limit(number).offset(offset) <NEW_LINE> result = [i.serialize for i in p.all()] <NEW_LINE> return result <NEW_LINE> <DEDENT> @property <NEW_LINE> def serialize(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'title': self.title, 'content': self.content, 'date': int(self.date.timestamp()*1000) } <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Post: {} - {}".format(self.id, self.title)
Our Post Model
6259905f0fa83653e46f656c
class BaseElement(object): <NEW_LINE> <INDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> driver = obj.driver <NEW_LINE> if self.iframe_locator: <NEW_LINE> <INDENT> iframe=WebDriverWait(driver, 100).until(EC.presence_of_element_located(self.iframe_locator)) <NEW_LINE> driver.switch_to.frame(iframe) <NEW_LINE> driver.find_element_by_name(self.locator).clear() <NEW_LINE> driver.find_element_by_name(self.locator).send_keys(value) <NEW_LINE> driver.switch_to_default_content() <NEW_LINE> <DEDENT> <DEDENT> def __get__(self, obj, owner): <NEW_LINE> <INDENT> driver = obj.driver <NEW_LINE> if self.iframe_locator: <NEW_LINE> <INDENT> iframe=WebDriverWait(driver, 100).until(EC.presence_of_element_located(self.iframe_locator)) <NEW_LINE> driver.switch_to.frame(iframe) <NEW_LINE> element=WebDriverWait(driver, 100).until(EC.presence_of_element_located(self.locator)) <NEW_LINE> driver.switch_to_default_content() <NEW_LINE> <DEDENT> return element.get_attribute("value")
Base page class that is initialized on every page object class.
6259905ff548e778e596cc0e
class Recorder(object): <NEW_LINE> <INDENT> def __init__(self, channels=1, rate=44100, frames_per_buffer=1024): <NEW_LINE> <INDENT> self.channels = channels <NEW_LINE> self.rate = rate <NEW_LINE> self.frames_per_buffer = frames_per_buffer <NEW_LINE> <DEDENT> def open(self, fname, input_device_index=0, mode='wb'): <NEW_LINE> <INDENT> return RecordingFile(fname, mode, self.channels, self.rate, input_device_index, self.frames_per_buffer)
A recorder class for recording audio to a WAV file. Records in mono by default.
6259905fd486a94d0ba2d64d
class CreateNotificationConfigurationRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.AutoScalingGroupId = None <NEW_LINE> self.NotificationTypes = None <NEW_LINE> self.NotificationUserGroupIds = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.AutoScalingGroupId = params.get("AutoScalingGroupId") <NEW_LINE> self.NotificationTypes = params.get("NotificationTypes") <NEW_LINE> self.NotificationUserGroupIds = params.get("NotificationUserGroupIds")
CreateNotificationConfiguration请求参数结构体
6259905f627d3e7fe0e08510
class Entry: <NEW_LINE> <INDENT> def __init__( self, start_dt: str = None, end_dt: str = None, title: str = None, body: str = None ): <NEW_LINE> <INDENT> now = pendulum.now() <NEW_LINE> self.start_dt = parse_dt_local_tz(start_dt) if start_dt else now <NEW_LINE> self.end_dt = parse_dt_local_tz(end_dt) if end_dt else now <NEW_LINE> self.created_dt = now <NEW_LINE> self.modified_dt = now <NEW_LINE> self.title = title if title else self.start_dt.to_day_datetime_string() <NEW_LINE> self.body = body <NEW_LINE> self.uuid = str(uuid()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'<Entry uuid: {self.uuid} start_dt: {self.start_dt.to_datetime_string()} body: "{self.body}">'
An entry on the timeline; where all content is stored and linked.
6259905f7b25080760ed8823
class DayRecord(models.Model): <NEW_LINE> <INDENT> date_reference = models.DateField() <NEW_LINE> day_in_advance = models.IntegerField() <NEW_LINE> source = models.CharField(max_length=6) <NEW_LINE> max_temp = models.IntegerField() <NEW_LINE> min_temp = models.IntegerField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return ', '.join([ str(self.date_reference), str(self.day_in_advance), self.source, str(self.max_temp), str(self.min_temp) ]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'DayRecord(date={!r}, day in advance={!r}, source={!r}, ' 'max temp={!r}, min temp={!r}'.format( self.date_reference, self.day_in_advance, self.source, self.max_temp, self.min_temp )
Record (forecasted) for an individual day. date_reference is the date a temperature forecast applies to. day_in_advance is the number of days in advance the forecast was made (0-7) source identifies the forecaster; it is a member of SOURCES
6259905f3539df3088ecd922
class DescribeUsgRuleRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.SgIds = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.SgIds = params.get("SgIds")
DescribeUsgRule请求参数结构体
6259905f29b78933be26ac07
class Field(object): <NEW_LINE> <INDENT> def __init__(self, name=None, key=None, description=None, default=None, type=None, pk=False): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.key = key <NEW_LINE> self.description = description <NEW_LINE> self.value = default <NEW_LINE> self.temp = None <NEW_LINE> self.type = type <NEW_LINE> self.pk = pk <NEW_LINE> <DEDENT> @property <NEW_LINE> def xml_key(self): <NEW_LINE> <INDENT> return './' + str(self.key) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return '%s' % self.value.encode('utf8') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return str(self.value)
The class defining each field in the model Important instance variables: - name - The name of the field - key - The key used for the field in the original data - description - the description for the field - temp - used for holding temp variables in the cleanup process - type - type of the field, e.g. string, list, etc.
6259905f2ae34c7f260ac76d
class calculate_result(object): <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.I32, 'success', None, None, ), (1, TType.STRUCT, 'ouch', (InvalidOperation, InvalidOperation.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None, ouch=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.ouch = ouch <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.success = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.ouch = InvalidOperation() <NEW_LINE> self.ouch.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('calculate_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.I32, 0) <NEW_LINE> oprot.writeI32(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.ouch is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('ouch', TType.STRUCT, 1) <NEW_LINE> self.ouch.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success - ouch
6259905f23e79379d538db82
class UserLoginSerializer(ModelSerializer): <NEW_LINE> <INDENT> token = CharField(allow_blank=True, read_only=True) <NEW_LINE> username = CharField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = [ "username", "password", "token", ] <NEW_LINE> extra_kwargs = {"password": {"write_only": True,}} <NEW_LINE> <DEDENT> def validate(self, data): <NEW_LINE> <INDENT> username = data["username"] <NEW_LINE> password = data["password"] <NEW_LINE> user = User.objects.filter(username=username).first() <NEW_LINE> if not user: <NEW_LINE> <INDENT> raise ValidationError("This username is not valid") <NEW_LINE> <DEDENT> if not user.check_password(password): <NEW_LINE> <INDENT> raise ValidationError("Incorrect credential, please try again") <NEW_LINE> <DEDENT> data["token"] = "SOMERANDOMTOKEN" <NEW_LINE> return data
Serializer for user login
6259905ff548e778e596cc0f
class DWord(Encoder): <NEW_LINE> <INDENT> def bytes_length(self, values): <NEW_LINE> <INDENT> return len(values) * 4
constant word size = 32 bits
6259905f32920d7e50bc76cd
class BlogAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> search_fields = ('title','entry',) <NEW_LINE> list_display = ('id','title','published') <NEW_LINE> list_filter = ('published',)
Blog Admin
6259905fd6c5a102081e37aa
class FWWorker(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, observers): <NEW_LINE> <INDENT> super(FWWorker, self).__init__() <NEW_LINE> self.l = logging.getLogger(__name__+"."+self.__class__.__name__) <NEW_LINE> self._stopit = threading.Event() <NEW_LINE> self.l.info("Initialized FileWatch worker") <NEW_LINE> self.observers = observers <NEW_LINE> self.activeTasks = [] <NEW_LINE> self.queue = [] <NEW_LINE> self.lock = threading.Lock() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self.isStopped(): <NEW_LINE> <INDENT> now = time.time() <NEW_LINE> for key, obj in self.observers.items(): <NEW_LINE> <INDENT> if now - obj.get("lastcheck") > obj.get("interval"): <NEW_LINE> <INDENT> obj["lastcheck"]=time.time(); <NEW_LINE> self.checkFile(key) <NEW_LINE> <DEDENT> <DEDENT> for aT in self.activeTasks: <NEW_LINE> <INDENT> if not aT.is_alive(): <NEW_LINE> <INDENT> self.activeTasks.remove(aT) <NEW_LINE> <DEDENT> <DEDENT> if len(self.queue) > 0 and len(self.activeTasks) < multiprocessing.cpu_count(): <NEW_LINE> <INDENT> job = self.queue.pop(0) <NEW_LINE> self.activeTasks.append(job) <NEW_LINE> job.daemon = True <NEW_LINE> job.start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(0.05) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def checkFile(self, filepath): <NEW_LINE> <INDENT> fPI = threading.Thread(target=self.__checkFile, args=(filepath,)) <NEW_LINE> self.queue.append(fPI) <NEW_LINE> <DEDENT> def __checkFile(self, filepath): <NEW_LINE> <INDENT> if not os.path.exists(filepath): <NEW_LINE> <INDENT> self.l.info("File "+filepath+" doesn't exist anymore - removing observers") <NEW_LINE> self.lock.acquire(True) <NEW_LINE> detached=self.observers.pop(filepath) <NEW_LINE> self.lock.release() <NEW_LINE> if detached.get("ondestroy"): <NEW_LINE> <INDENT> self.l.info("calling gone callback") <NEW_LINE> detached.get("ondestroy")() <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> mt = os.path.getmtime(filepath) <NEW_LINE> if self.observers.get(filepath).get("modified") != mt: <NEW_LINE> <INDENT> self.l.info("File "+filepath+" changed - calling observer") <NEW_LINE> self.lock.acquire(True) <NEW_LINE> self.observers.get(filepath)["modified"] = mt <NEW_LINE> self.observers.get(filepath).get("observer")() <NEW_LINE> self.lock.release() <NEW_LINE> <DEDENT> <DEDENT> def stopIt(self): <NEW_LINE> <INDENT> self._stopit.set() <NEW_LINE> <DEDENT> def isStopped(self): <NEW_LINE> <INDENT> return self._stopit.isSet()
Do the work within a thread to not block anything else.
6259905f38b623060ffaa393
class NatronBreakdownSceneResource(str): <NEW_LINE> <INDENT> def __new__(cls, node, parameter): <NEW_LINE> <INDENT> text = "%s" % node <NEW_LINE> obj = str.__new__(cls, text) <NEW_LINE> obj.parameter = parameter <NEW_LINE> return obj
Helper Class to store metadata per update item. tk-multi-breakdown requires item['node'] to be a str. This is what is displayed in the list of recognized items to update. We want to add metadata to each item as what we want to pass to update is the parameter and not the node itself. python friendly object + __repr__ magic method.
6259905f2c8b7c6e89bd4e76
class ArgusAuthException(ArgusException): <NEW_LINE> <INDENT> pass
An exception type that is thrown for Argus authentication errors.
6259905fa8ecb0332587289f
class OneRoomMaze(Maze): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.currentCell = Cell() <NEW_LINE> self.contents = self.currentCell
a one room maze contains only one cell
6259905fd486a94d0ba2d64f
class GuruMeditation(object): <NEW_LINE> <INDENT> timestamp_fmt = "%Y%m%d%H%M%S" <NEW_LINE> def __init__(self, version_obj, sig_handler_tb=None, *args, **kwargs): <NEW_LINE> <INDENT> self.version_obj = version_obj <NEW_LINE> self.traceback = sig_handler_tb <NEW_LINE> super(GuruMeditation, self).__init__(*args, **kwargs) <NEW_LINE> self.start_section_index = len(self.sections) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register_section(cls, section_title, generator): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cls.persistent_sections.append([section_title, generator]) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> cls.persistent_sections = [[section_title, generator]] <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def setup_autorun(cls, version, service_name=None, log_dir=None, signum=None): <NEW_LINE> <INDENT> if not signum and hasattr(signal, 'SIGUSR1'): <NEW_LINE> <INDENT> signum = signal.SIGUSR1 <NEW_LINE> <DEDENT> if signum: <NEW_LINE> <INDENT> signal.signal(signum, lambda sn, tb: cls.handle_signal( version, service_name, log_dir, tb)) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def handle_signal(cls, version, service_name, log_dir, traceback): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = cls(version, traceback).run() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> print("Unable to run Guru Meditation Report!", file=sys.stderr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if log_dir: <NEW_LINE> <INDENT> service_name = service_name or os.path.basename( inspect.stack()[-1][1]) <NEW_LINE> filename = "%s_gurumeditation_%s" % ( service_name, timeutils.strtime(fmt=cls.timestamp_fmt)) <NEW_LINE> filepath = os.path.join(log_dir, filename) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(filepath, "w") as dumpfile: <NEW_LINE> <INDENT> dumpfile.write(res) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> print("Unable to dump Guru Meditation Report to file %s" % (filepath,), file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(res, file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _readd_sections(self): <NEW_LINE> <INDENT> del self.sections[self.start_section_index:] <NEW_LINE> self.add_section('Package', pgen.PackageReportGenerator(self.version_obj)) <NEW_LINE> self.add_section('Threads', tgen.ThreadReportGenerator(self.traceback)) <NEW_LINE> self.add_section('Green Threads', tgen.GreenThreadReportGenerator()) <NEW_LINE> self.add_section('Processes', prgen.ProcessReportGenerator()) <NEW_LINE> self.add_section('Configuration', cgen.ConfigReportGenerator()) <NEW_LINE> try: <NEW_LINE> <INDENT> for section_title, generator in self.persistent_sections: <NEW_LINE> <INDENT> self.add_section(section_title, generator) <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self._readd_sections() <NEW_LINE> return super(GuruMeditation, self).run()
A Guru Meditation Report Mixin/Base Class This class is a base class for Guru Meditation Reports. It provides facilities for registering sections and setting up functionality to auto-run the report on a certain signal. This class should always be used in conjunction with a Report class via multiple inheritance. It should always come first in the class list to ensure the MRO is correct.
6259905f8e7ae83300eea715
class GoogleSpider(RedisSpider): <NEW_LINE> <INDENT> name = 'google-spider' <NEW_LINE> allowed_domains = ['google.com.ua'] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(GoogleSpider, self).__init__() <NEW_LINE> <DEDENT> def parse(self, response): <NEW_LINE> <INDENT> quantity = response.meta.get('quantity', 0) <NEW_LINE> for td in response.css('.images_table tr td'): <NEW_LINE> <INDENT> if quantity < settings.QUANTITY_IMAGES: <NEW_LINE> <INDENT> item = ImageItem() <NEW_LINE> item['image_url'] = td.xpath('.//a/img/@src').extract()[0] <NEW_LINE> item['rank'] = 1 <NEW_LINE> item['site'] = 1 <NEW_LINE> item['keyword'] = response.meta['keyword'] <NEW_LINE> quantity += 1 <NEW_LINE> yield item <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Task.objects.filter(keywords=response.meta['keyword']).update( google_status='done') <NEW_LINE> r = redis.StrictRedis(host='localhost', port=6379, db=0) <NEW_LINE> r.publish('google', response.meta['keyword']) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> next_href = response.css('#nav td.b a.fl') <NEW_LINE> if next_href: <NEW_LINE> <INDENT> url = response.urljoin(next_href.xpath('@href').extract()[0]) <NEW_LINE> yield scrapy.Request(url, self.parse, meta={'keyword': response.meta['keyword'], 'quantity': quantity}) <NEW_LINE> <DEDENT> <DEDENT> def make_request_from_data(self, data): <NEW_LINE> <INDENT> new_url = 'https://www.google.com.ua/search?q=%s&tbm=isch' % data <NEW_LINE> if '://' in new_url: <NEW_LINE> <INDENT> return Request(new_url, dont_filter=True, meta={'keyword': data}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.error("Unexpected URL from '%s': %r", self.redis_key, new_url)
Spider for scraping the page in google.com. It based at RedisSpider. Comes the keyword and formed a link. It perform request end return response. Later it parse the page. Later it send message at chanel in redis server. Attributes: name: a name of the spider. allowed_domains: allowed domains. quantity: image counter.
6259905f627d3e7fe0e08512
class Refs(dict): <NEW_LINE> <INDENT> def __setitem__(self, i, y): <NEW_LINE> <INDENT> if i in self and self[i] is not y: <NEW_LINE> <INDENT> raise ValueError('You must not set the same id twice!!') <NEW_LINE> <DEDENT> return dict.__setitem__(self, i, y) <NEW_LINE> <DEDENT> def gen_default_name(self, obj): <NEW_LINE> <INDENT> base_name = obj.obj_type.split('.')[-1] <NEW_LINE> num_of_same_type = [v for v in self.values() if v.obj_type == obj.obj_type] <NEW_LINE> return base_name + num_of_same_type + 1
Class to store and handle references during saving/loading. Provides some convenience functions
6259905fbaa26c4b54d50928
class DetectGameMaster(Engine): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> Engine.__init__(self, client) <NEW_LINE> for item in client.world.iter_items(): <NEW_LINE> <INDENT> self._check_item(item) <NEW_LINE> <DEDENT> <DEDENT> def _panic(self, entity): <NEW_LINE> <INDENT> print("\x1b[41m ____ __ __ _ _ _ _ \x1b[0m") <NEW_LINE> print("\x1b[41m / ___| \/ | __| | ___| |_ ___ ___| |_ ___ __| |\x1b[0m") <NEW_LINE> print("\x1b[41m| | _| |\/| | / _` |/ _ \ __/ _ \/ __| __/ _ \/ _` |\x1b[0m") <NEW_LINE> print("\x1b[41m| |_| | | | | | (_| | __/ || __/ (__| || __/ (_| |\x1b[0m") <NEW_LINE> print("\x1b[41m \____|_| |_| \__,_|\___|\__\___|\___|\__\___|\__,_|\x1b[0m") <NEW_LINE> all_entities = self._client.world.entities <NEW_LINE> while True: <NEW_LINE> <INDENT> print(entity) <NEW_LINE> if not isinstance(entity, Item): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> parent = entity.parent_serial <NEW_LINE> if parent is None or parent not in all_entities: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> entity = all_entities[parent] <NEW_LINE> <DEDENT> reactor.crash() <NEW_LINE> <DEDENT> def _check_item(self, item): <NEW_LINE> <INDENT> if item.item_id == ITEM_GM_ROBE: <NEW_LINE> <INDENT> self._panic(item) <NEW_LINE> <DEDENT> <DEDENT> def on_world_item(self, item): <NEW_LINE> <INDENT> self._check_item(item) <NEW_LINE> <DEDENT> def on_container_item(self, item): <NEW_LINE> <INDENT> self._check_item(item) <NEW_LINE> <DEDENT> def on_equip_item(self, item): <NEW_LINE> <INDENT> self._check_item(item) <NEW_LINE> <DEDENT> def on_mobile_incoming(self, mobile): <NEW_LINE> <INDENT> if mobile.body == 0x3db or mobile.serial in GM_SERIALS: <NEW_LINE> <INDENT> self._panic(mobile)
Detect the presence of a GameMaster, and stop the macro immediately.
6259905f99cbb53fe6832569
class LinkedList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__root = None <NEW_LINE> <DEDENT> def get_root(self): <NEW_LINE> <INDENT> return self.__root <NEW_LINE> <DEDENT> def add_to_list(self, node): <NEW_LINE> <INDENT> if self.__root: <NEW_LINE> <INDENT> node.set_next(self.__root) <NEW_LINE> <DEDENT> self.__root = node <NEW_LINE> <DEDENT> def print_list(self): <NEW_LINE> <INDENT> marker = self.__root <NEW_LINE> while marker: <NEW_LINE> <INDENT> marker.print_details() <NEW_LINE> marker = marker.get_next() <NEW_LINE> <DEDENT> <DEDENT> def find(self, name): <NEW_LINE> <INDENT> marker = self.__root <NEW_LINE> while marker: <NEW_LINE> <INDENT> if marker.name == name: <NEW_LINE> <INDENT> return marker <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> marker = marker.get_next() <NEW_LINE> <DEDENT> <DEDENT> raise LookupError("{} wasn't found".format(name))
This class is the one you should be modifying! Don't change the name of the class or any of the methods. Implement those methods that current raise a NotImplementedError
6259905f07f4c71912bb0ac4
class ModelerTests(unittest.TestCase): <NEW_LINE> <INDENT> maxDiff = None
Dynamic modelers test case. Test methods are automatically added by the add_checks function below. See the module's docstring for details.
6259905f67a9b606de5475e5
class NonClusterableLayer(layers.Layer): <NEW_LINE> <INDENT> def __init__(self, units=10): <NEW_LINE> <INDENT> super(NonClusterableLayer, self).__init__() <NEW_LINE> self.add_weight(shape=(1, units), initializer='uniform', name='kernel') <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> return tf.matmul(inputs, self.weights)
"A custom layer with weights that is not clusterable.
6259905fe5267d203ee6cf03