code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class BlogTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> APP = create_app(config_name="testing") <NEW_LINE> APP.testing = True <NEW_LINE> self.app = APP.test_client() <NEW_LINE> create_tables() <NEW_LINE> self.test_user = test_user <NEW_LINE> payload = { "user_name": self.test_user['user_name'], "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=60) } <NEW_LINE> token = jwt.encode( payload=payload, key=secret, algorithm='HS256') <NEW_LINE> self.headers = {'Content-Type': 'application/json', 'token': token } <NEW_LINE> self.headers_invalid = { 'Content-Type': 'application/json', 'token': 'Tokenmbaya'} <NEW_LINE> self.blog = blog_data <NEW_LINE> <DEDENT> def test_get_all_questions_no_token(self): <NEW_LINE> <INDENT> response = self.app.get("/api/v2/blogs") <NEW_LINE> result = json.loads(response.data) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertEqual(result['message'], "Token is missing") <NEW_LINE> <DEDENT> def test_post_blog(self): <NEW_LINE> <INDENT> response = self.app.post( "/api/v2/blogs", headers=self.headers, data=json.dumps(self.blog)) <NEW_LINE> result = json.loads(response.data) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertEqual(result['status'], 201) <NEW_LINE> <DEDENT> def test_delete_blog(self): <NEW_LINE> <INDENT> self.app.post("/api/v2/blogs/1", headers=self.headers, data=json.dumps(self.blog)) <NEW_LINE> response = self.app.delete("/api/v2/blogs/1", headers=self.headers) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_delete_nonExistingBlog(self): <NEW_LINE> <INDENT> self.app.post("/api/v2/blogs/1", headers=self.headers, data=json.dumps(self.blog)) <NEW_LINE> response = self.app.delete("/api/v2/blogs/18", headers=self.headers) <NEW_LINE> self.assertEqual(response.status_code, 200)
This class represents the questions test cases
625990804428ac0f6e659fea
class mesh(np.ndarray): <NEW_LINE> <INDENT> def __new__(cls, init, val=0.0, offset=0, buffer=None, strides=None, order=None): <NEW_LINE> <INDENT> if isinstance(init, mesh): <NEW_LINE> <INDENT> obj = np.ndarray.__new__(cls, shape=init.shape, dtype=init.dtype, buffer=buffer, offset=offset, strides=strides, order=order) <NEW_LINE> obj[:] = init[:] <NEW_LINE> obj._comm = init._comm <NEW_LINE> <DEDENT> elif isinstance(init, tuple) and (init[1] is None or isinstance(init[1], MPI.Intracomm)) and isinstance(init[2], np.dtype): <NEW_LINE> <INDENT> obj = np.ndarray.__new__(cls, init[0], dtype=init[2], buffer=buffer, offset=offset, strides=strides, order=order) <NEW_LINE> obj.fill(val) <NEW_LINE> obj._comm = init[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError(type(init)) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> @property <NEW_LINE> def comm(self): <NEW_LINE> <INDENT> return self._comm <NEW_LINE> <DEDENT> def __array_finalize__(self, obj): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._comm = getattr(obj, '_comm', None) <NEW_LINE> <DEDENT> def __array_ufunc__(self, ufunc, method, *inputs, out=None, **kwargs): <NEW_LINE> <INDENT> args = [] <NEW_LINE> comm = None <NEW_LINE> for _, input_ in enumerate(inputs): <NEW_LINE> <INDENT> if isinstance(input_, mesh): <NEW_LINE> <INDENT> args.append(input_.view(np.ndarray)) <NEW_LINE> comm = input_.comm <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args.append(input_) <NEW_LINE> <DEDENT> <DEDENT> results = super(mesh, self).__array_ufunc__(ufunc, method, *args, **kwargs).view(mesh) <NEW_LINE> if not method == 'reduce': <NEW_LINE> <INDENT> results._comm = comm <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def __abs__(self): <NEW_LINE> <INDENT> local_absval = float(np.amax(np.ndarray.__abs__(self))) <NEW_LINE> if self.comm is not None: <NEW_LINE> <INDENT> if self.comm.Get_size() > 1: <NEW_LINE> <INDENT> global_absval = 0.0 <NEW_LINE> global_absval = max(self.comm.allreduce(sendobj=local_absval, op=MPI.MAX), global_absval) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> global_absval = local_absval <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> global_absval = local_absval <NEW_LINE> <DEDENT> return float(global_absval) <NEW_LINE> <DEDENT> def isend(self, dest=None, tag=None, comm=None): <NEW_LINE> <INDENT> return comm.Issend(self[:], dest=dest, tag=tag) <NEW_LINE> <DEDENT> def irecv(self, source=None, tag=None, comm=None): <NEW_LINE> <INDENT> return comm.Irecv(self[:], source=source, tag=tag) <NEW_LINE> <DEDENT> def bcast(self, root=None, comm=None): <NEW_LINE> <INDENT> comm.Bcast(self[:], root=root) <NEW_LINE> return self
Numpy-based datatype for serial or parallel meshes. Can include a communicator and expects a dtype to allow complex data. Attributes: _comm: MPI communicator or None
625990807c178a314d78e948
class Thermal(base.ResourceBase): <NEW_LINE> <INDENT> identity = base.Field('Id') <NEW_LINE> name = base.Field('Name') <NEW_LINE> status = common.StatusField('Status') <NEW_LINE> fans = FansListField('Fans', default=[]) <NEW_LINE> temperatures = TemperaturesListField('Temperatures', default=[])
This class represents a Thermal resource.
6259908063b5f9789fe86c23
class CloudForgotPasswordView(HomeAssistantView): <NEW_LINE> <INDENT> url = "/api/cloud/forgot_password" <NEW_LINE> name = "api:cloud:forgot_password" <NEW_LINE> @_handle_cloud_errors <NEW_LINE> @RequestDataValidator(vol.Schema({vol.Required("email"): str})) <NEW_LINE> async def post(self, request, data): <NEW_LINE> <INDENT> hass = request.app["hass"] <NEW_LINE> cloud = hass.data[DOMAIN] <NEW_LINE> with async_timeout.timeout(REQUEST_TIMEOUT): <NEW_LINE> <INDENT> await cloud.auth.async_forgot_password(data["email"]) <NEW_LINE> <DEDENT> return self.json_message("ok")
View to start Forgot Password flow..
625990805fcc89381b266eba
class ConfigurationManipulator(ConfigurationManipulatorBase): <NEW_LINE> <INDENT> def __init__(self, params=None, config_type=dict, seed_config=None, **kwargs): <NEW_LINE> <INDENT> if params is None: <NEW_LINE> <INDENT> params = [] <NEW_LINE> <DEDENT> self.params = list(params) <NEW_LINE> self.config_type = config_type <NEW_LINE> self.search_driver = None <NEW_LINE> self._seed_config = seed_config <NEW_LINE> super(ConfigurationManipulator, self).__init__(**kwargs) <NEW_LINE> for p in self.params: <NEW_LINE> <INDENT> p.parent = self <NEW_LINE> <DEDENT> <DEDENT> def add_parameter(self, p): <NEW_LINE> <INDENT> p.set_parent(self) <NEW_LINE> self.params.append(p) <NEW_LINE> sub_params = p.sub_parameters() <NEW_LINE> for sp in sub_params: <NEW_LINE> <INDENT> sp.set_parent(p) <NEW_LINE> <DEDENT> self.params.extend(sub_params) <NEW_LINE> <DEDENT> def set_search_driver(self, search_driver): <NEW_LINE> <INDENT> self.search_driver = search_driver <NEW_LINE> <DEDENT> def seed_config(self): <NEW_LINE> <INDENT> if self._seed_config: <NEW_LINE> <INDENT> cfg = copy.deepcopy(self._seed_config) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cfg = self.config_type() <NEW_LINE> for p in self.params: <NEW_LINE> <INDENT> if not isinstance(p.name, str) or '/' not in p.name: <NEW_LINE> <INDENT> cfg[p.name] = p.seed_value() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return cfg <NEW_LINE> <DEDENT> def random(self): <NEW_LINE> <INDENT> cfg = self.seed_config() <NEW_LINE> for p in self.parameters(cfg): <NEW_LINE> <INDENT> p.randomize(cfg) <NEW_LINE> <DEDENT> return cfg <NEW_LINE> <DEDENT> def parameters(self, config): <NEW_LINE> <INDENT> if type(config) is not self.config_type: <NEW_LINE> <INDENT> log.error("wrong type, expected %s got %s", str(self.config_type), str(type(config))) <NEW_LINE> raise TypeError() <NEW_LINE> <DEDENT> return self.params <NEW_LINE> <DEDENT> def hash_config(self, config): <NEW_LINE> <INDENT> m = hashlib.sha256() <NEW_LINE> params = list(self.parameters(config)) <NEW_LINE> params.sort(key=_.name) <NEW_LINE> for i, p in enumerate(params): <NEW_LINE> <INDENT> m.update(str(p.name)) <NEW_LINE> m.update(p.hash_value(config)) <NEW_LINE> m.update(str(i)) <NEW_LINE> m.update("|") <NEW_LINE> <DEDENT> return m.hexdigest() <NEW_LINE> <DEDENT> def search_space_size(self): <NEW_LINE> <INDENT> return reduce(_ * _, [x.search_space_size() for x in self.params]) <NEW_LINE> <DEDENT> def difference(self, cfg1, cfg2): <NEW_LINE> <INDENT> cfg = self.copy(cfg1) <NEW_LINE> for param in self.parameters(cfg1): <NEW_LINE> <INDENT> if param.is_primitive(cfg1): <NEW_LINE> <INDENT> param.set_value(cfg, param.get_value(cfg1)-param.get_value(cfg2)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return cfg <NEW_LINE> <DEDENT> def applySVs(self, cfg, sv_map, args): <NEW_LINE> <INDENT> param_dict = self.parameters_dict(cfg) <NEW_LINE> for param in sv_map: <NEW_LINE> <INDENT> getattr(param, sv_map[param])(cfg, *args[param])
a configuration manipulator using a fixed set of parameters and storing configs in a dict-like object
625990801f5feb6acb1646b5
class QueueDepthByPriority(models.Model): <NEW_LINE> <INDENT> priority = models.IntegerField() <NEW_LINE> depth_time = models.DateTimeField(db_index=True) <NEW_LINE> depth = models.IntegerField() <NEW_LINE> objects = QueueDepthByPriorityManager() <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> db_table = 'queue_depth_priority'
Represents the queue depth counts for each priority level at various points in time :keyword priority: The priority level :type priority: :class:`django.db.models.IntegerField` :keyword depth_time: When the depth was measured :type depth_time: :class:`django.db.models.DateTimeField` :keyword depth: The queue depth for this priority at this time :type depth: :class:`django.db.models.IntegerField`
6259908092d797404e3898ba
class NotFound(Exception): <NEW_LINE> <INDENT> pass
Throw when the Resource could not be found
625990802c8b7c6e89bd52a1
class HttpRequest(object): <NEW_LINE> <INDENT> def __init__(self, http_method, query_url, headers = None, query_parameters = None, parameters = None, files = None, username = None, password = None): <NEW_LINE> <INDENT> self.http_method = http_method <NEW_LINE> self.query_url = query_url <NEW_LINE> self.headers = headers <NEW_LINE> self.query_parameters = query_parameters <NEW_LINE> self.parameters = parameters <NEW_LINE> self.files = files <NEW_LINE> self.username = username <NEW_LINE> self.password = password
Information about an HTTP Request including its method, headers, parameters, URL, and Basic Auth details Attributes: http_method (HttpMethodEnum): The HTTP Method that this request should perform when called. headers (dict): A dictionary of headers (key : value) that should be sent along with the request. query_url (string): The URL that the request should be sent to. parameters (dict): A dictionary of parameters that are to be sent along with the request in the form body of the request username (string): If this request is to use Basic Authentication for authentication then this property represents the corresponding username. password (string): If this request is to use Basic Authentication for authentication then this property represents the corresponding password.
62599080d486a94d0ba2da73
class PersonTelephone(BaseModel): <NEW_LINE> <INDENT> person_telephone_id = models.AutoField( 'person_telephone_id', primary_key=True ) <NEW_LINE> person = models.ForeignKey("Person", verbose_name='person') <NEW_LINE> telephone_number = models.CharField('telephone_number', max_length=24, null=False, blank=False) <NEW_LINE> ref_person_telephone_number_type = models.ForeignKey( "RefPersonTelephoneNumberType", verbose_name='ref_person_telephone_number_type_id', blank=True, null=True, on_delete=models.SET_NULL ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'person_telephone' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.person.__str__() + ': ' + self.telephone_number + ' (' + self.ref_person_telephone_number_type.__str__() + ')'
A person may be linked to zero or more telephone numbers.
625990804527f215b58eb6fe
class Motocicleta(Vehiculo): <NEW_LINE> <INDENT> def __init__(self, arg): <NEW_LINE> <INDENT> self.cilindradas = "" <NEW_LINE> <DEDENT> def set_cilindradas(self, cilindradas): <NEW_LINE> <INDENT> self.cilindradas = cilindradas <NEW_LINE> <DEDENT> def get_cilindradas(self): <NEW_LINE> <INDENT> return self.cilindradas
docstring for Motocicleta.
62599080f548e778e596d04e
class GeoPXRecord(PXRecord): <NEW_LINE> <INDENT> def __init__(self, label=None, ttl=None, *args, **kwargs): <NEW_LINE> <INDENT> kwargs['create'] = False <NEW_LINE> super(GeoPXRecord, self).__init__(*args, **kwargs) <NEW_LINE> self.label = label <NEW_LINE> self._ttl = ttl
An :class:`PXRecord` object which is able to store additional data for use by a :class:`Geo` service.
62599080656771135c48ad8e
class JceTransformationError(DynamodbEncryptionSdkError): <NEW_LINE> <INDENT> pass
Otherwise undifferentiated errors encountered when attempting to read a JCE transformation.
625990803346ee7daa3383c0
class FormComponentViewset(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> serializer_class = FormComponentSerializer <NEW_LINE> filterset_class = FormComponentFilter <NEW_LINE> queryset = ( FormComponent.objects .all() .order_by('area__area_num', 'component_num', 'component_letter') .select_related('area') )
PER Form Components Viewset
6259908071ff763f4b5e9269
class MAVLink_filt_rot_vel_message(MAVLink_message): <NEW_LINE> <INDENT> def __init__(self, rotVel): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLINK_MSG_ID_FILT_ROT_VEL, 'FILT_ROT_VEL') <NEW_LINE> self._fieldnames = ['rotVel'] <NEW_LINE> self.rotVel = rotVel <NEW_LINE> <DEDENT> def pack(self, mav): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 79, struct.pack('<3f', self.rotVel[0], self.rotVel[1], self.rotVel[2]))
filtered rotational velocity
6259908066673b3332c31ebd
class SetLampStrikes(TestMixins.SetUInt32Mixin, OptionalParameterTestFixture): <NEW_LINE> <INDENT> CATEGORY = TestCategory.POWER_LAMP_SETTINGS <NEW_LINE> PID = 'LAMP_STRIKES' <NEW_LINE> EXPECTED_FIELD = 'strikes' <NEW_LINE> PROVIDES = ['set_lamp_strikes_supported'] <NEW_LINE> REQUIRES = ['lamp_strikes'] <NEW_LINE> def OldValue(self): <NEW_LINE> <INDENT> return self.Property('lamp_strikes') <NEW_LINE> <DEDENT> def VerifyResult(self, response, fields): <NEW_LINE> <INDENT> if response.command_class == PidStore.RDM_SET: <NEW_LINE> <INDENT> self.SetProperty('set_lamp_strikes_supported', response.WasAcked())
Attempt to SET the lamp strikes.
62599080796e427e53850238
class RtreeContainer(Rtree): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> if isinstance(args[0], rtree.index.string_types) or isinstance(args[0], bytes) or isinstance(args[0], rtree.index.ICustomStorage): <NEW_LINE> <INDENT> raise ValueError('%s supports only in-memory indexes' % self.__class__) <NEW_LINE> <DEDENT> <DEDENT> self._objects = {} <NEW_LINE> return super(RtreeContainer, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def insert(self, obj, coordinates): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> count = self._objects[id(obj)] + 1 <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> count = 1 <NEW_LINE> <DEDENT> self._objects[id(obj)] = (count, obj) <NEW_LINE> return super(RtreeContainer, self).insert(id(obj), coordinates, None) <NEW_LINE> <DEDENT> add = insert <NEW_LINE> def intersection(self, coordinates, bbox=False): <NEW_LINE> <INDENT> if bbox == False: <NEW_LINE> <INDENT> for id in super(RtreeContainer, self).intersection(coordinates, bbox): <NEW_LINE> <INDENT> yield self._objects[id][1] <NEW_LINE> <DEDENT> <DEDENT> elif bbox == True: <NEW_LINE> <INDENT> for value in super(RtreeContainer, self).intersection(coordinates, bbox): <NEW_LINE> <INDENT> value.object = self._objects[value.id][1] <NEW_LINE> value.id = None <NEW_LINE> yield value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "valid values for the bbox argument are True and False") <NEW_LINE> <DEDENT> <DEDENT> def nearest(self, coordinates, num_results = 1, bbox=False): <NEW_LINE> <INDENT> if bbox == False: <NEW_LINE> <INDENT> for id in super(RtreeContainer, self).nearest(coordinates, num_results, bbox): <NEW_LINE> <INDENT> yield self._objects[id][1] <NEW_LINE> <DEDENT> <DEDENT> elif bbox == True: <NEW_LINE> <INDENT> for value in super(RtreeContainer, self).nearest(coordinates, num_results, bbox): <NEW_LINE> <INDENT> value.object = self._objects[value.id][1] <NEW_LINE> value.id = None <NEW_LINE> yield value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "valid values for the bbox argument are True and False") <NEW_LINE> <DEDENT> <DEDENT> def delete(self, obj, coordinates): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> count = self._objects[id(obj)] - 1 <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise IndexError('object is not in the index') <NEW_LINE> <DEDENT> if count == 0: <NEW_LINE> <INDENT> del self._objects[obj] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._objects[id(obj)] = (count, obj) <NEW_LINE> <DEDENT> return super(RtreeContainer, self).delete(id, coordinates) <NEW_LINE> <DEDENT> def leaves(self): <NEW_LINE> <INDENT> return [(self._objects[id][1], [self._objects[child_id][1] for child_id in child_ids], bounds) for id, child_ids, bounds in super(RtreeContainer, self).leaves()]
An R-Tree, MVR-Tree, or TPR-Tree indexed container for python objects
625990805fdd1c0f98e5fa3d
class SMB2CreateEABuffer(Structure): <NEW_LINE> <INDENT> NAME = CreateContextName.SMB2_CREATE_EA_BUFFER <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.fields = OrderedDict([ ('next_entry_offset', IntField(size=4)), ('flags', FlagField( size=1, flag_type=EAFlags )), ('ea_name_length', IntField( size=1, default=lambda s: len(s['ea_name']) - 1 )), ('ea_value_length', IntField( size=2, default=lambda s: len(s['ea_value']) )), ('ea_name', BytesField( size=lambda s: s['ea_name_length'].get_value() + 1 )), ('ea_value', BytesField( size=lambda s: s['ea_value_length'].get_value() )), ('padding', BytesField( size=lambda s: self._padding_size(s), default=lambda s: b"\x00" * self._padding_size(s) )) ]) <NEW_LINE> super(SMB2CreateEABuffer, self).__init__() <NEW_LINE> <DEDENT> def _padding_size(self, structure): <NEW_LINE> <INDENT> if structure['next_entry_offset'].get_value() == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> data_length = len(structure['ea_name']) + len(structure['ea_value']) <NEW_LINE> mod = data_length % 4 <NEW_LINE> return mod if mod == 0 else 4 - mod <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def pack_multiple(messages): <NEW_LINE> <INDENT> data = b"" <NEW_LINE> msg_count = len(messages) <NEW_LINE> for i, msg in enumerate(messages): <NEW_LINE> <INDENT> if i == msg_count - 1: <NEW_LINE> <INDENT> msg['next_entry_offset'] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg['next_entry_offset'] = 1 <NEW_LINE> msg['next_entry_offset'] = len(msg) <NEW_LINE> <DEDENT> data += msg.pack() <NEW_LINE> <DEDENT> return data
[MS-SMB2] 2.2.13.2.1 SMB2_CREATE_EA_BUFFER [MS-FSCC] 2.4.15 FileFullEaInformation Used to apply extended attributes as part of creating a new file.
625990804f88993c371f1281
class MicrosoftAzureTestUrl(object): <NEW_LINE> <INDENT> def __init__( self, credentials, subscription_id, base_url=None): <NEW_LINE> <INDENT> self.config = MicrosoftAzureTestUrlConfiguration(credentials, subscription_id, base_url) <NEW_LINE> self._client = ServiceClient(self.config.credentials, self.config) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self.api_version = '2014-04-01-preview' <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.group = GroupOperations( self._client, self.config, self._serialize, self._deserialize)
Some cool documentation. :ivar config: Configuration for client. :vartype config: MicrosoftAzureTestUrlConfiguration :ivar group: Group operations :vartype group: .operations.GroupOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials object<msrestazure.azure_active_directory>` :param subscription_id: Subscription Id. :type subscription_id: str :param str base_url: Service URL
625990803346ee7daa3383c1
class Smmry(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__stemmer = Stemmer() <NEW_LINE> self.__splitter = SentenceSplitter() <NEW_LINE> <DEDENT> def __create_counter(self, words): <NEW_LINE> <INDENT> stemmed = [] <NEW_LINE> for word in words: <NEW_LINE> <INDENT> stemmed.append(self.__stemmer.stem(word)) <NEW_LINE> <DEDENT> stemmed = filter(lambda x: len(x) > 2, stemmed) <NEW_LINE> counter = Counter(stemmed) <NEW_LINE> return counter <NEW_LINE> <DEDENT> def summarize(self, text, top_sentences=5, top_words=5): <NEW_LINE> <INDENT> text = text.replace(u'“', '"').replace( u'„', '"').replace(u'–', '-').replace('\n', '') <NEW_LINE> cleaned = re.sub(r'[?|$|.|!|-|,|"|:|;]', r'', text).lower() <NEW_LINE> words = cleaned.split(' ') <NEW_LINE> tokens_counter = self.__create_counter(words) <NEW_LINE> sentences = self.__splitter.split(text) <NEW_LINE> sentences_counter = Counter() <NEW_LINE> for idx, sentence in enumerate(sentences): <NEW_LINE> <INDENT> cleaned_sentence = re.sub( r'[?|$|.|!|-|,|"|:|;]', r'', sentence).lower() <NEW_LINE> splitted_sentence = cleaned_sentence.split(' ') <NEW_LINE> total = 0 <NEW_LINE> for word in splitted_sentence: <NEW_LINE> <INDENT> total += tokens_counter[self.__stemmer.stem(word)] <NEW_LINE> <DEDENT> sentences_counter[idx] = total <NEW_LINE> <DEDENT> top_items = sentences_counter.most_common(top_sentences) <NEW_LINE> in_order = sorted(top_items, key=lambda x: x[0]) <NEW_LINE> common_sentences = [] <NEW_LINE> for item in in_order: <NEW_LINE> <INDENT> common_sentences.append((sentences[item[0]], item[1])) <NEW_LINE> <DEDENT> common_words = tokens_counter.most_common(top_words) <NEW_LINE> return common_sentences, common_words
Notes: Class to summarize given text. Returns most popular sentences and words Attributes: __stemmer: Stemmer object to tokenize words in given text __splitter: SentenceSplitter object to split given text into sentences
625990805fc7496912d48fca
class BaseModel(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> if name == _METACLASS_ or bases[0].__name__ == _METACLASS_: <NEW_LINE> <INDENT> return super(BaseModel, cls).__new__(cls, name, bases, attrs) <NEW_LINE> <DEDENT> meta_options = {} <NEW_LINE> meta = attrs.pop('Meta', None) <NEW_LINE> if meta: <NEW_LINE> <INDENT> for k, v in meta.__dict__.items(): <NEW_LINE> <INDENT> if not k.startswith('_'): <NEW_LINE> <INDENT> meta_options[k] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> cls = super(BaseModel, cls).__new__(cls, name, bases, attrs) <NEW_LINE> cls._meta = ModelOptions(cls, **meta_options) <NEW_LINE> if cls._meta.many_to_many: <NEW_LINE> <INDENT> links = [] <NEW_LINE> if cls._meta.order: <NEW_LINE> <INDENT> for attr in cls._meta.order: <NEW_LINE> <INDENT> if attr in attrs: <NEW_LINE> <INDENT> links.append((attr, attrs[attr])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for key, value in attrs.items(): <NEW_LINE> <INDENT> if not key.startswith('_'): <NEW_LINE> <INDENT> links.append((key, value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> links[0][1].related_name = links[1][0] <NEW_LINE> links[0][1].add_to_model(cls, links[0][0]) <NEW_LINE> links[1][1].related_name = links[0][0] <NEW_LINE> links[1][1].add_to_model(cls, links[1][0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if cls._meta.primary_key: <NEW_LINE> <INDENT> cls.id = fields.PrimaryKeyField() <NEW_LINE> cls.id.add_to_model(cls, PrimaryKeyField.name) <NEW_LINE> <DEDENT> if cls._meta.order: <NEW_LINE> <INDENT> for attr in cls._meta.order: <NEW_LINE> <INDENT> if attr in attrs: <NEW_LINE> <INDENT> attrs[attr].add_to_model(cls, attr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for key, value in attrs.items(): <NEW_LINE> <INDENT> if not key.startswith('_'): <NEW_LINE> <INDENT> value.add_to_model(cls, key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return cls
Metaclass for all models.
62599080099cdd3c63676159
class DecomposeAssembly(BaseAssembly): <NEW_LINE> <INDENT> def _set_mapping(self): <NEW_LINE> <INDENT> self.l2g = numpy.empty(self.nElems, dtype=numpy.ndarray) <NEW_LINE> cnt = self.nElems + 1 <NEW_LINE> for i, e in enumerate(self.elems): <NEW_LINE> <INDENT> self.l2g[i] = numpy.array( [i] + list(range(cnt, cnt+e.n_nodes-2)) + [i+1], dtype=numpy.int) <NEW_LINE> cnt += (e.n_nodes - 2) <NEW_LINE> <DEDENT> assert cnt == self.nModes, "cnt={0}, self.nModes={1}".format(cnt, self.nModes)
1D global assembly that decomposes boundary and interior nodes
6259908097e22403b383c9bf
class DLL: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.head = None <NEW_LINE> self.tail = None <NEW_LINE> <DEDENT> def insert(self, val): <NEW_LINE> <INDENT> node = Node(val, self.head, None) <NEW_LINE> if not self.head: <NEW_LINE> <INDENT> self.head = self.tail = node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.head.prev = node <NEW_LINE> self.head = node <NEW_LINE> <DEDENT> <DEDENT> def remove(self, val): <NEW_LINE> <INDENT> curr = self.head <NEW_LINE> while curr: <NEW_LINE> <INDENT> if curr.value == val: <NEW_LINE> <INDENT> if curr.prev: <NEW_LINE> <INDENT> curr.prev.next = curr.next <NEW_LINE> curr.next.prev = curr.prev <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.head = curr.next <NEW_LINE> curr.next.prev = None <NEW_LINE> <DEDENT> <DEDENT> curr = curr.next <NEW_LINE> <DEDENT> <DEDENT> def show(self): <NEW_LINE> <INDENT> dll = [] <NEW_LINE> curr = self.head <NEW_LINE> while curr: <NEW_LINE> <INDENT> dll.append(str(curr.value)) <NEW_LINE> curr = curr.next <NEW_LINE> <DEDENT> print(" --> ".join(dll))
Doubly linked list.
62599080aad79263cf430279
class TaskTemplateRequirementsSerializer(serializers.Serializer): <NEW_LINE> <INDENT> trigger_conditions = serializers.SerializerMethodField() <NEW_LINE> end_conditions = serializers.SerializerMethodField() <NEW_LINE> task_templates = serializers.SerializerMethodField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> fields = ['trigger_conditions', 'end_conditions', 'task_templates'] <NEW_LINE> <DEDENT> def get_task_templates(self, obj): <NEW_LINE> <INDENT> templates = Task.objects.filter(is_template=True) <NEW_LINE> serializer = TemplateDetailsSerializer(templates, many=True) <NEW_LINE> return serializer.data <NEW_LINE> <DEDENT> def get_trigger_conditions(self, obj): <NEW_LINE> <INDENT> trigger_fields = FieldGroup.objects.get(name=TRIGGER_CONDITIONS).field_set.all() <NEW_LINE> serializer = FieldRequirementsSerializer(trigger_fields, many=True) <NEW_LINE> return serializer.data <NEW_LINE> <DEDENT> def get_end_conditions(self, obj): <NEW_LINE> <INDENT> end_fields = FieldGroup.objects.get(name=END_CONDITIONS).field_set.all() <NEW_LINE> return FieldRequirementsSerializer(end_fields, many=True).data
Task template requirements serializer.
625990807c178a314d78e94a
class SteinerHandler(Handler): <NEW_LINE> <INDENT> def __init__(self, city: str, path: str): <NEW_LINE> <INDENT> super().__init__(path) <NEW_LINE> self.radar = CAPPI(city) <NEW_LINE> <DEDENT> def save_steiner(self): <NEW_LINE> <INDENT> old_name = self.radar.file_name + "" <NEW_LINE> new_name = old_name.replace("raw.gz", "npy.gz") <NEW_LINE> new_name = new_name.replace("Radar", "Steiner") <NEW_LINE> np.savetxt(new_name, self.radar.steiner_mask) <NEW_LINE> <DEDENT> def populate_dirs(self): <NEW_LINE> <INDENT> for path, _, _ in os.walk(self.path): <NEW_LINE> <INDENT> os.makedirs(path.replace('Radar', 'Steiner'), exist_ok=True) <NEW_LINE> <DEDENT> <DEDENT> def process(self): <NEW_LINE> <INDENT> self.populate_dirs() <NEW_LINE> self.list_files() <NEW_LINE> present = 0 <NEW_LINE> final = len(self.files) <NEW_LINE> for file in self.files: <NEW_LINE> <INDENT> self.radar.file_name = file <NEW_LINE> self.radar.open() <NEW_LINE> self.radar.steiner_filter() <NEW_LINE> self.save_steiner() <NEW_LINE> present += 1 <NEW_LINE> percentage = (present * 100) // final <NEW_LINE> sys.stdout.write("\r%d%% - %s" % (percentage, file)) <NEW_LINE> sys.stdout.flush()
The SteinerHandler class is a subclass of Handler that deals with the Steiner method. While it is almost the same, it implements the iterator differently, and also saves the output as a file.
6259908060cbc95b06365acc
class SDLError(Exception): <NEW_LINE> <INDENT> pass
Generic SDL error.
625990805fcc89381b266ebc
class AR1Image(MathsCommand): <NEW_LINE> <INDENT> input_spec = AR1ImageInput <NEW_LINE> _suffix = "_ar1"
Use fslmaths to generate an AR1 coefficient image across a given dimension. (Should use -odt float and probably demean first)
6259908023849d37ff852b79
class TestApi(TestPyZWave): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(self): <NEW_LINE> <INDENT> super(TestApi, self).setUpClass() <NEW_LINE> self.options = ZWaveOption(device=self.device, user_path=self.userpath) <NEW_LINE> self.options.set_log_file("OZW_Log.log") <NEW_LINE> self.options.set_append_log_file(False) <NEW_LINE> self.options.set_console_output(self.ozwout) <NEW_LINE> self.options.set_save_log_level(self.ozwlog) <NEW_LINE> self.options.set_logging(True) <NEW_LINE> self.options.lock() <NEW_LINE> self.network = ZWaveNetwork(self.options) <NEW_LINE> self.node_result = None <NEW_LINE> self.ctrl_command_result = None <NEW_LINE> self.ctrl_command_signal = None <NEW_LINE> time.sleep(1.0) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(self): <NEW_LINE> <INDENT> self.network.stop() <NEW_LINE> time.sleep(2.0) <NEW_LINE> self.network.destroy() <NEW_LINE> time.sleep(1.0) <NEW_LINE> super(TestApi, self).tearDownClass() <NEW_LINE> self.network = None <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> dispatcher.connect(self.node_update, ZWaveNetwork.SIGNAL_NODE) <NEW_LINE> self.node_result = None <NEW_LINE> self.ctrl_command_result = None <NEW_LINE> self.ctrl_command_signal = None <NEW_LINE> self.wait_for_network_state(self.network.STATE_AWAKED, 1) <NEW_LINE> time.sleep(2.0) <NEW_LINE> self.wait_for_queue() <NEW_LINE> self.active_nodes = {} <NEW_LINE> for node in self.network.nodes: <NEW_LINE> <INDENT> if self.network.nodes[node].is_info_received: <NEW_LINE> <INDENT> self.active_nodes[node] = self.network.nodes[node] <NEW_LINE> <DEDENT> <DEDENT> print('active nodes : %s' % self.active_nodes) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> dispatcher.disconnect(self.node_update, ZWaveNetwork.SIGNAL_NODE) <NEW_LINE> <DEDENT> def wait_for_queue(self): <NEW_LINE> <INDENT> for i in range(0,60): <NEW_LINE> <INDENT> if self.network.controller.send_queue_count <= 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(0.5) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wait_for_network_state(self, state, multiply=1): <NEW_LINE> <INDENT> for i in range(0,SLEEP*multiply): <NEW_LINE> <INDENT> if self.network.state>=state: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(1.0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def ctrl_message(self, network, controller, node, node_id, state_int, state, state_full, error_int, error, error_full,): <NEW_LINE> <INDENT> self.ctrl_state_result = state <NEW_LINE> self.ctrl_command_signal = { 'network':network, 'controller':controller, 'node':node, 'node_id':node_id, 'state_int':state_int, 'state':state, 'state_full':state_full, 'error_int':error_int, 'error':error, 'error_full':error_full, } <NEW_LINE> <DEDENT> def ctrl_waiting(self, network, controller, state_int, state, state_full): <NEW_LINE> <INDENT> self.ctrl_state_result = state <NEW_LINE> <DEDENT> def node_update(self, network, node): <NEW_LINE> <INDENT> self.node_result = node
Parent test class for api
6259908097e22403b383c9c0
class Benktander(MethodBase): <NEW_LINE> <INDENT> def __init__(self, apriori=1.0, n_iters=1): <NEW_LINE> <INDENT> self.apriori = apriori <NEW_LINE> self.n_iters = n_iters <NEW_LINE> <DEDENT> def fit(self, X, y=None, sample_weight=None): <NEW_LINE> <INDENT> super().fit(X, y, sample_weight) <NEW_LINE> self.sample_weight_ = sample_weight <NEW_LINE> latest = self.X_.latest_diagonal.triangle <NEW_LINE> apriori = sample_weight.triangle * self.apriori <NEW_LINE> obj = copy.deepcopy(self.X_) <NEW_LINE> obj.triangle = self.X_.cdf_.triangle[..., :obj.shape[-1]]*(obj.triangle*0+1) <NEW_LINE> cdf = obj.latest_diagonal.triangle <NEW_LINE> cdf = np.expand_dims(1-1/cdf, 0) <NEW_LINE> exponents = np.arange(self.n_iters+1) <NEW_LINE> exponents = np.reshape(exponents, tuple([len(exponents)]+[1]*4)) <NEW_LINE> cdf = cdf**exponents <NEW_LINE> obj.triangle = np.sum(cdf[:-1, ...], 0)*latest+cdf[-1, ...]*apriori <NEW_LINE> obj.ddims = ['Ultimate'] <NEW_LINE> obj.valuation = pd.DatetimeIndex([pd.to_datetime('2262-04-11')]*obj.shape[-2]) <NEW_LINE> self.ultimate_ = obj <NEW_LINE> self.full_triangle_ = self._get_full_triangle_() <NEW_LINE> return self
The Benktander (or iterated Bornhuetter-Ferguson) IBNR model Parameters ---------- apriori : float, optional (default=1.0) Multiplier for the sample_weight used in the Benktander method method. If sample_weight is already an apriori measure of ultimate, then use 1.0 n_iters : int, optional (default=1) Multiplier for the sample_weight used in the Bornhuetter Ferguson method. If sample_weight is already an apriori measure of ultimate, then use 1.0 Attributes ---------- ultimate_ : Triangle The ultimate losses per the method ibnr_ : Triangle The IBNR per the method References ---------- .. [2] Benktander, G. (1976) An Approach to Credibility in Calculating IBNR for Casualty Excess Reinsurance. In The Actuarial Review, April 1976, p.7
625990805fc7496912d48fcb
class Pathways(DynamicDocument): <NEW_LINE> <INDENT> pathwayname = StringField(max_length=20, unique=True) <NEW_LINE> hyperlink = StringField(max_length=100) <NEW_LINE> source1 = StringField(max_length=20) <NEW_LINE> source2 = StringField(max_length=20) <NEW_LINE> source3 = StringField(max_length=20) <NEW_LINE> source4 = StringField(max_length=20) <NEW_LINE> category = StringField(max_length=20) <NEW_LINE> description = StringField(max_length=1024)
通路基本信息
62599080099cdd3c6367615a
class NewMailEvent(TimestampEvent): <NEW_LINE> <INDENT> ELEMENT_NAME = "NewMailEvent"
MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/newmailevent
625990808a349b6b43687d1e
class ParseLimitsTest(BaseLimitTestSuite): <NEW_LINE> <INDENT> def test_invalid(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, limits.Limiter.parse_limits, ';;;;;') <NEW_LINE> <DEDENT> def test_bad_rule(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, limits.Limiter.parse_limits, 'GET, *, .*, 20, minute') <NEW_LINE> <DEDENT> def test_missing_arg(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, limits.Limiter.parse_limits, '(GET, *, .*, 20)') <NEW_LINE> <DEDENT> def test_bad_value(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, limits.Limiter.parse_limits, '(GET, *, .*, foo, minute)') <NEW_LINE> <DEDENT> def test_bad_unit(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, limits.Limiter.parse_limits, '(GET, *, .*, 20, lightyears)') <NEW_LINE> <DEDENT> def test_multiple_rules(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> l = limits.Limiter.parse_limits('(get, *, .*, 20, minute);' '(PUT, /foo*, /foo.*, 10, hour);' '(POST, /bar*, /bar.*, 5, second);' '(Say, /derp*, /derp.*, 1, day)') <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> assert False, str(e) <NEW_LINE> <DEDENT> self.assertEqual(len(l), 4) <NEW_LINE> expected = ['GET', 'PUT', 'POST', 'SAY'] <NEW_LINE> self.assertEqual([t.verb for t in l], expected) <NEW_LINE> expected = ['*', '/foo*', '/bar*', '/derp*'] <NEW_LINE> self.assertEqual([t.uri for t in l], expected) <NEW_LINE> expected = ['.*', '/foo.*', '/bar.*', '/derp.*'] <NEW_LINE> self.assertEqual([t.regex for t in l], expected) <NEW_LINE> expected = [20, 10, 5, 1] <NEW_LINE> self.assertEqual([t.value for t in l], expected) <NEW_LINE> expected = [utils.TIME_UNITS['MINUTE'], utils.TIME_UNITS['HOUR'], utils.TIME_UNITS['SECOND'], utils.TIME_UNITS['DAY']] <NEW_LINE> self.assertEqual([t.unit for t in l], expected)
Tests for the default limits parser in the in-memory `limits.Limiter` class.
62599080442bda511e95dab7
class DefaultInitApi(InitApi): <NEW_LINE> <INDENT> def __init__(self, api_url: str, connect_retries: int, backoff_factor: float, context: EndpointContext): <NEW_LINE> <INDENT> session = requests.Session() <NEW_LINE> retry = Retry(connect=connect_retries, backoff_factor=backoff_factor) <NEW_LINE> adapter = HTTPAdapter(max_retries=retry) <NEW_LINE> session.mount(api_url, adapter) <NEW_LINE> self.api_url = api_url <NEW_LINE> self.context = context <NEW_LINE> self.state_session = session <NEW_LINE> <DEDENT> def state(self): <NEW_LINE> <INDENT> response = self.state_session.request( method='get', url='{}/init'.format(self.api_url), verify=self.context.verify ) <NEW_LINE> if response.ok: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return response.json() <NEW_LINE> <DEDENT> except JSONDecodeError: <NEW_LINE> <INDENT> logging.error( 'Response was [{}] but content is not JSON: [{}]'.format( response.status_code, response.content ) ) <NEW_LINE> raise click.Abort() from None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('Request failed: [{} - {}]'.format(response.status_code, response.reason)) <NEW_LINE> if response.text: <NEW_LINE> <INDENT> logging.error(response.text) <NEW_LINE> <DEDENT> raise click.Abort() <NEW_LINE> <DEDENT> <DEDENT> def provide_credentials(self, username, password): <NEW_LINE> <INDENT> response = requests.request( method='post', url='{}/init'.format(self.api_url), data={'username': username, 'password': password}, verify=self.context.verify ) <NEW_LINE> if response.ok: <NEW_LINE> <INDENT> return {'successful': True} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('Request failed: [{} - {}]'.format(response.status_code, response.reason)) <NEW_LINE> if response.text: <NEW_LINE> <INDENT> logging.error(response.text) <NEW_LINE> <DEDENT> raise click.Abort()
Default, HTTP/JSON based :class:`InitApi` implementation.
6259908097e22403b383c9c1
class Species: <NEW_LINE> <INDENT> def __init__(self, genome: np.ndarray, fitness: float): <NEW_LINE> <INDENT> self.fitness = fitness <NEW_LINE> self.genome = genome <NEW_LINE> self.len = len(self.genome) <NEW_LINE> six_hex_digits = hex(int(self.fitness * 16 ** 8))[2:8] <NEW_LINE> self._name = 'V' + six_hex_digits <NEW_LINE> self.color = '#' + six_hex_digits <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return np.array_equal(self.genome, other.genome) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return str(self.genome) <NEW_LINE> <DEDENT> def mutated_genome(self): <NEW_LINE> <INDENT> loc = random.randrange(0, self.len) <NEW_LINE> new_genome = self.genome.copy() <NEW_LINE> new_genome[loc] = not new_genome[loc] <NEW_LINE> return new_genome
A vehicle is a set of Replicants, fighting for survival together. They are represented in a numpy vector. 0 -> gene not contained. 1 -> gene contained.
62599080aad79263cf43027c
class SimpleCOAtomAbund(ChemicalAbund): <NEW_LINE> <INDENT> def __init__(self, *sizes): <NEW_LINE> <INDENT> atom_ids = ['C', 'O', 'Si'] <NEW_LINE> masses = [ 12., 16., 28. ] <NEW_LINE> super(SimpleCOAtomAbund, self).__init__(atom_ids, masses, *sizes) <NEW_LINE> <DEDENT> def set_solar_abundances(self, muH=1.28): <NEW_LINE> <INDENT> self._data[:] = np.outer(np.array([12*2.7e-4, 16*4.9e-4, 28*3.2e-5]), np.ones(self.size)) / muH
Class to hold the raw atomic abundaces of C/O/Si for the CO chemistry
625990804c3428357761bd7c
class CalculatorTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_add(self): <NEW_LINE> <INDENT> result = calculator.add(10, 20) <NEW_LINE> self.assertEqual(result, 30) <NEW_LINE> <DEDENT> def test_subtract(self): <NEW_LINE> <INDENT> result = calculator.subtract(10, 20) <NEW_LINE> self.assertEqual(result, -10) <NEW_LINE> <DEDENT> def test_multiply(self): <NEW_LINE> <INDENT> result = calculator.multiply(10, 20) <NEW_LINE> self.assertEqual(result, 200) <NEW_LINE> <DEDENT> def test_divide(self): <NEW_LINE> <INDENT> result = calculator.divide(20, 10) <NEW_LINE> self.assertEqual(result, 2)
Testcase for calculator functions
6259908066673b3332c31ec1
class BinaryManager(object): <NEW_LINE> <INDENT> def __init__(self, configs): <NEW_LINE> <INDENT> self._dependency_manager = dependency_manager.DependencyManager(configs) <NEW_LINE> <DEDENT> def FetchPath(self, binary_name, arch, os_name, os_version=None): <NEW_LINE> <INDENT> platform = '%s_%s' % (os_name, arch) <NEW_LINE> if os_version: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> versioned_platform = '%s_%s_%s' % (os_name, os_version, arch) <NEW_LINE> return self._dependency_manager.FetchPath( binary_name, versioned_platform) <NEW_LINE> <DEDENT> except dependency_manager.NoPathFoundError: <NEW_LINE> <INDENT> logging.warning( 'Cannot find path for %s on platform %s. Falling back to %s.', binary_name, versioned_platform, platform) <NEW_LINE> <DEDENT> <DEDENT> return self._dependency_manager.FetchPath(binary_name, platform) <NEW_LINE> <DEDENT> def LocalPath(self, binary_name, arch, os_name, os_version=None): <NEW_LINE> <INDENT> platform = '%s_%s' % (os_name, arch) <NEW_LINE> if os_version: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> versioned_platform = '%s_%s_%s' % (os_name, os_version, arch) <NEW_LINE> return self._dependency_manager.LocalPath( binary_name, versioned_platform) <NEW_LINE> <DEDENT> except dependency_manager.NoPathFoundError: <NEW_LINE> <INDENT> logging.warning( 'Cannot find local path for %s on platform %s. Falling back to %s.', binary_name, versioned_platform, platform) <NEW_LINE> <DEDENT> <DEDENT> return self._dependency_manager.LocalPath(binary_name, platform)
This class is effectively a subclass of dependency_manager, but uses a different number of arguments for FetchPath and LocalPath.
62599080796e427e5385023c
class LazyLoaderReloadingTest(TestCase): <NEW_LINE> <INDENT> module_name = 'loadertest' <NEW_LINE> module_key = 'loadertest.test' <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.opts = _config = minion_config(None) <NEW_LINE> self.tmp_dir = tempfile.mkdtemp(dir=tests.integration.TMP) <NEW_LINE> self.count = 0 <NEW_LINE> dirs = _module_dirs(self.opts, 'modules', 'module') <NEW_LINE> dirs.append(self.tmp_dir) <NEW_LINE> self.loader = LazyLoader(dirs, self.opts, tag='module') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree(self.tmp_dir) <NEW_LINE> <DEDENT> def update_module(self): <NEW_LINE> <INDENT> self.count += 1 <NEW_LINE> with open(self.module_path, 'wb') as fh: <NEW_LINE> <INDENT> fh.write(module_template.format(count=self.count)) <NEW_LINE> fh.flush() <NEW_LINE> os.fsync(fh.fileno()) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.unlink(self.module_path + 'c') <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def rm_module(self): <NEW_LINE> <INDENT> os.unlink(self.module_path) <NEW_LINE> os.unlink(self.module_path + 'c') <NEW_LINE> <DEDENT> @property <NEW_LINE> def module_path(self): <NEW_LINE> <INDENT> return os.path.join(self.tmp_dir, '{0}.py'.format(self.module_name)) <NEW_LINE> <DEDENT> def test_alias(self): <NEW_LINE> <INDENT> self.assertNotIn(self.module_key, self.loader) <NEW_LINE> self.update_module() <NEW_LINE> self.assertNotIn('{0}.test_alias'.format(self.module_name), self.loader) <NEW_LINE> self.assertTrue(inspect.isfunction(self.loader['{0}.working_alias'.format(self.module_name)])) <NEW_LINE> <DEDENT> def test_clear(self): <NEW_LINE> <INDENT> self.assertTrue(inspect.isfunction(self.loader['test.ping'])) <NEW_LINE> self.update_module() <NEW_LINE> self.loader.clear() <NEW_LINE> self.assertTrue(inspect.isfunction(self.loader[self.module_key])) <NEW_LINE> for k, v in six.iteritems(self.loader._dict): <NEW_LINE> <INDENT> self.assertTrue(k.startswith(self.module_name)) <NEW_LINE> <DEDENT> <DEDENT> def test_load(self): <NEW_LINE> <INDENT> self.assertNotIn(self.module_key, self.loader) <NEW_LINE> self.update_module() <NEW_LINE> self.assertTrue(inspect.isfunction(self.loader[self.module_key])) <NEW_LINE> <DEDENT> def test__load__(self): <NEW_LINE> <INDENT> self.update_module() <NEW_LINE> self.assertNotIn(self.module_key + '2', self.loader) <NEW_LINE> <DEDENT> def test__load__and_depends(self): <NEW_LINE> <INDENT> self.update_module() <NEW_LINE> self.assertNotIn(self.module_key + '3', self.loader) <NEW_LINE> self.assertNotIn(self.module_key + '4', self.loader) <NEW_LINE> <DEDENT> def test_reload(self): <NEW_LINE> <INDENT> self.assertNotIn(self.module_key, self.loader) <NEW_LINE> for x in range(1, 3): <NEW_LINE> <INDENT> self.update_module() <NEW_LINE> self.loader.clear() <NEW_LINE> self.assertEqual(self.loader[self.module_key](), self.count) <NEW_LINE> <DEDENT> self.rm_module() <NEW_LINE> self.assertEqual(self.loader[self.module_key](), self.count) <NEW_LINE> self.loader.clear() <NEW_LINE> self.assertNotIn(self.module_key, self.loader)
Test the loader of salt with changing modules
625990805fcc89381b266ebd
class AumSortedConcatenate(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ans = {} <NEW_LINE> <DEDENT> def step(self, ndx, author, sort, link): <NEW_LINE> <INDENT> if author is not None: <NEW_LINE> <INDENT> self.ans[ndx] = ':::'.join((author, sort, link)) <NEW_LINE> <DEDENT> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> keys = self.ans.keys() <NEW_LINE> l = len(keys) <NEW_LINE> if l == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if l == 1: <NEW_LINE> <INDENT> return self.ans[keys[0]] <NEW_LINE> <DEDENT> return ':#:'.join([self.ans[v] for v in sorted(keys)])
String concatenation aggregator for the author sort map
625990805fdd1c0f98e5fa41
class ClassificationGBDTModelAnalysis(GBDTModelAnalysis, ClassificationTreeModelAnalysis): <NEW_LINE> <INDENT> pass
Class for gradient boosting classification model analysis
625990801f5feb6acb1646bb
class HourOfDayField(metaclass=IntFieldMeta, seg_y_type='int16', min_value=0, max_value=24): <NEW_LINE> <INDENT> pass
Hour of day. (24 hour clock).
6259908092d797404e3898bd
class BotBeepCMD(Command): <NEW_LINE> <INDENT> key = "botbeep" <NEW_LINE> def parse(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def func(self): <NEW_LINE> <INDENT> self.caller.location.msg_contents("ROBOT SAYS BEEP.")
Test command that makes the constructor bot make a noise. Should only be accessible in the room where the ConstructorBot is currently residing.
625990803d592f4c4edbc8c0
class ConnectionError(Error): <NEW_LINE> <INDENT> pass
Base class for any socket-level connection issues
625990804f88993c371f1283
class TrustStore: <NEW_LINE> <INDENT> def __init__( self, platform: PlatformEnum, version: Optional[str], url: str, date_fetched: datetime.date, trusted_certificates: Set[RootCertificateRecord], blocked_certificates: Set[RootCertificateRecord]=None, ) -> None: <NEW_LINE> <INDENT> if blocked_certificates is None: <NEW_LINE> <INDENT> blocked_certificates = set() <NEW_LINE> <DEDENT> self.platform = platform <NEW_LINE> if version is not None: <NEW_LINE> <INDENT> version = version.strip() <NEW_LINE> <DEDENT> self.version = version <NEW_LINE> self.url = url.strip() <NEW_LINE> self.date_fetched = date_fetched <NEW_LINE> self.trusted_certificates = trusted_certificates <NEW_LINE> self.blocked_certificates = blocked_certificates <NEW_LINE> <DEDENT> def __eq__(self, other: object) -> bool: <NEW_LINE> <INDENT> if not isinstance(other, TrustStore): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self_dict = self.__dict__.copy() <NEW_LINE> self_dict.pop('date_fetched') <NEW_LINE> other_dict = other.__dict__.copy() <NEW_LINE> other_dict.pop('date_fetched') <NEW_LINE> return other_dict == self_dict <NEW_LINE> <DEDENT> @property <NEW_LINE> def trusted_certificates_count(self) -> int: <NEW_LINE> <INDENT> return len(self.trusted_certificates) <NEW_LINE> <DEDENT> @property <NEW_LINE> def blocked_certificates_count(self) -> int: <NEW_LINE> <INDENT> return len(self.blocked_certificates) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_default_for_platform(cls, platform: PlatformEnum) -> 'TrustStore': <NEW_LINE> <INDENT> module_path = Path(os.path.abspath(os.path.dirname(__file__))) <NEW_LINE> store_yaml_path = module_path / '..' / 'trust_stores' / f'{platform.name.lower()}.yaml' <NEW_LINE> return cls.from_yaml(store_yaml_path) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_yaml(cls, yaml_file_path: Path) -> 'TrustStore': <NEW_LINE> <INDENT> with open(yaml_file_path, mode='r') as store_file: <NEW_LINE> <INDENT> store_dict = yaml.load(store_file) <NEW_LINE> <DEDENT> trusted_certificates = [RootCertificateRecord(entry['subject_name'], unhexlify(entry['fingerprint'])) for entry in store_dict['trusted_certificates']] <NEW_LINE> blocked_certificates = [RootCertificateRecord(entry['subject_name'], unhexlify(entry['fingerprint'])) for entry in store_dict['blocked_certificates']] <NEW_LINE> return cls( PlatformEnum[store_dict['platform']], store_dict['version'], store_dict['url'], store_dict['date_fetched'], set(trusted_certificates), set(blocked_certificates) ) <NEW_LINE> <DEDENT> def export_trusted_certificates_as_pem(self, certs_repository: RootCertificatesRepository) -> str: <NEW_LINE> <INDENT> all_certs_as_pem = [] <NEW_LINE> for cert_record in self.trusted_certificates: <NEW_LINE> <INDENT> cert = certs_repository.lookup_certificate_with_fingerprint(cert_record.fingerprint) <NEW_LINE> all_certs_as_pem.append(cert.public_bytes(Encoding.PEM).decode('ascii')) <NEW_LINE> <DEDENT> return '\n'.join(all_certs_as_pem)
The set of root certificates that compose the trust store of one platform at a specific time.
625990807b180e01f3e49dc6
class Namespace(pulumi.CustomResource): <NEW_LINE> <INDENT> def __init__(__self__, __name__, __opts__=None, location=None, name=None, resource_group_name=None, sku=None, tags=None): <NEW_LINE> <INDENT> if not __name__: <NEW_LINE> <INDENT> raise TypeError('Missing resource name argument (for URN creation)') <NEW_LINE> <DEDENT> if not isinstance(__name__, str): <NEW_LINE> <INDENT> raise TypeError('Expected resource name to be a string') <NEW_LINE> <DEDENT> if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions): <NEW_LINE> <INDENT> raise TypeError('Expected resource options to be a ResourceOptions instance') <NEW_LINE> <DEDENT> __props__ = dict() <NEW_LINE> if not location: <NEW_LINE> <INDENT> raise TypeError('Missing required property location') <NEW_LINE> <DEDENT> __props__['location'] = location <NEW_LINE> __props__['name'] = name <NEW_LINE> if not resource_group_name: <NEW_LINE> <INDENT> raise TypeError('Missing required property resource_group_name') <NEW_LINE> <DEDENT> __props__['resource_group_name'] = resource_group_name <NEW_LINE> if not sku: <NEW_LINE> <INDENT> raise TypeError('Missing required property sku') <NEW_LINE> <DEDENT> __props__['sku'] = sku <NEW_LINE> __props__['tags'] = tags <NEW_LINE> __props__['metric_id'] = None <NEW_LINE> __props__['primary_connection_string'] = None <NEW_LINE> __props__['primary_key'] = None <NEW_LINE> __props__['secondary_connection_string'] = None <NEW_LINE> __props__['secondary_key'] = None <NEW_LINE> super(Namespace, __self__).__init__( 'azure:relay/namespace:Namespace', __name__, __props__, __opts__) <NEW_LINE> <DEDENT> def translate_output_property(self, prop): <NEW_LINE> <INDENT> return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop <NEW_LINE> <DEDENT> def translate_input_property(self, prop): <NEW_LINE> <INDENT> return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
Manages an Azure Relay Namespace.
6259908097e22403b383c9c2
@dataclass <NEW_LINE> class JoplinNotebook(JoplinNodeWithTitle): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def type(cls) -> NodeType: <NEW_LINE> <INDENT> return NodeType.NOTEBOOK
Dataclass for Joplin "notebook" node types.
62599080bf627c535bcb2f94
class Solution: <NEW_LINE> <INDENT> def rotate(self, matrix: List[List[int]]) -> None: <NEW_LINE> <INDENT> self.rotate_side(matrix, 0) <NEW_LINE> <DEDENT> def rotate_side(self, matrix: List[List[int]], start): <NEW_LINE> <INDENT> n = len(matrix) <NEW_LINE> end = n - 1 - start <NEW_LINE> if start >= end: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for i in range(end - start): <NEW_LINE> <INDENT> t = matrix[start][start + i] <NEW_LINE> matrix[start][start + i] = matrix[end - i][start] <NEW_LINE> matrix[end - i][start] = matrix[end][end - i] <NEW_LINE> matrix[end][end - i] = matrix[start + i][end] <NEW_LINE> matrix[start + i][end] = t <NEW_LINE> <DEDENT> self.rotate_side(matrix, start + 1)
20190815
625990807047854f46340e78
class Map: <NEW_LINE> <INDENT> def __init__ (self, gridXDimension, gridYDimension, townGridDimension, cdfHouseClasses, ukMap, ukClassBias, densityModifier, lha1, lha2, lha3, lha4): <NEW_LINE> <INDENT> self.towns = [] <NEW_LINE> self.allHouses = [] <NEW_LINE> self.occupiedHouses = [] <NEW_LINE> ukMap = np.array(ukMap) <NEW_LINE> ukMap.resize(int(gridYDimension), int(gridXDimension)) <NEW_LINE> ukClassBias = np.array(ukClassBias) <NEW_LINE> ukClassBias.resize(int(gridYDimension), int(gridXDimension)) <NEW_LINE> lha1 = np.array(lha1) <NEW_LINE> lha1.resize(int(gridYDimension), int(gridXDimension)) <NEW_LINE> lha2 = np.array(lha1) <NEW_LINE> lha2.resize(int(gridYDimension), int(gridXDimension)) <NEW_LINE> lha3 = np.array(lha1) <NEW_LINE> lha3.resize(int(gridYDimension), int(gridXDimension)) <NEW_LINE> lha4 = np.array(lha1) <NEW_LINE> lha4.resize(int(gridYDimension), int(gridXDimension)) <NEW_LINE> for y in range(int(gridYDimension)): <NEW_LINE> <INDENT> for x in range(int(gridXDimension)): <NEW_LINE> <INDENT> newTown = Town(townGridDimension, x, y, cdfHouseClasses, ukMap[y][x], ukClassBias[y][x], densityModifier, lha1[y][x], lha2[y][x], lha3[y][x], lha4[y][x]) <NEW_LINE> self.towns.append(newTown) <NEW_LINE> <DEDENT> <DEDENT> for t in self.towns: <NEW_LINE> <INDENT> for h in t.houses: <NEW_LINE> <INDENT> self.allHouses.append(h)
Contains a collection of towns to make up the whole country being simulated.
62599080442bda511e95dab8
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = util.Counter() <NEW_LINE> for p in self.legalPositions: self.beliefs[p] = 1.0 <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observe(self, observation, gameState): <NEW_LINE> <INDENT> noisyDistance = observation <NEW_LINE> emissionModel = busters.getObservationDistribution(noisyDistance) <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> if noisyDistance is None: <NEW_LINE> <INDENT> allPossible[self.getJailPosition()] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> legal_positions=self.legalPositions <NEW_LINE> for position in legal_positions: <NEW_LINE> <INDENT> allPossible[position]=self.beliefs[position]*emissionModel[util.manhattanDistance(position, pacmanPosition)] <NEW_LINE> <DEDENT> <DEDENT> "*** END YOUR CODE HERE ***" <NEW_LINE> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> for oldPos in self.legalPositions: <NEW_LINE> <INDENT> newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPos)) <NEW_LINE> for position in newPosDist: <NEW_LINE> <INDENT> allPossible[position] += newPosDist[position]*self.beliefs[oldPos] <NEW_LINE> <DEDENT> <DEDENT> self.beliefs = allPossible <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs
The exact dynamic inference module should use forward-algorithm updates to compute the exact belief function at each time step.
6259908097e22403b383c9c3
class AirSimCarEnv(gym.Env): <NEW_LINE> <INDENT> metadata = {'render.modes': ['human']} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> config = ConfigParser() <NEW_LINE> config.read(join(dirname(dirname(abspath(__file__))), 'config.ini')) <NEW_LINE> self.action_space = spaces.Discrete(int(config['car_agent']['actions'])) <NEW_LINE> self.image_height = int(config['airsim_settings']['image_height']) <NEW_LINE> self.image_width = int(config['airsim_settings']['image_width']) <NEW_LINE> self.image_channels = int(config['airsim_settings']['image_channels']) <NEW_LINE> image_shape = (self.image_height, self.image_width, self.image_channels) <NEW_LINE> self.track_width = float(config['airsim_settings']['track_width']) <NEW_LINE> self.observation_space = spaces.Box(low=0, high=255, shape=image_shape, dtype=np.uint8) <NEW_LINE> self.car_agent = CarAgent() <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> self.car_agent.move(action) <NEW_LINE> car_state= self.car_agent.getCarState() <NEW_LINE> reward = self._compute_reward(car_state) <NEW_LINE> car_controls = self.car_agent.getCarControls() <NEW_LINE> done = self._isDone(car_state, car_controls, reward) <NEW_LINE> info = {} <NEW_LINE> observation = self.car_agent.observe() <NEW_LINE> return observation, reward, done, info <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.car_agent.restart() <NEW_LINE> observation = self.car_agent.observe() <NEW_LINE> return observation <NEW_LINE> <DEDENT> def render(self, mode='human'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def close (self): <NEW_LINE> <INDENT> self.car_agent.reset() <NEW_LINE> return <NEW_LINE> <DEDENT> def _compute_reward(self, car_state): <NEW_LINE> <INDENT> way_point1, way_point2 = self.car_agent.simGet2ClosestWayPoints() <NEW_LINE> car_pos = car_state.kinematics_estimated.position <NEW_LINE> car_point = np.array([car_pos.x_val, car_pos.y_val]) <NEW_LINE> distance_p1_to_p2p3 = lambda p1, p2, p3: abs(np.cross(p2-p3, p3-p1))/norm(p2-p3) <NEW_LINE> distance_to_center = distance_p1_to_p2p3(car_point, way_point1, way_point2) <NEW_LINE> reward = utils.compute_reward_distance_to_center(distance_to_center, self.track_width) <NEW_LINE> return reward <NEW_LINE> <DEDENT> def _isDone(self, car_state, car_controls, reward): <NEW_LINE> <INDENT> if reward < 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> car_pos = car_state.kinematics_estimated.position <NEW_LINE> car_point = ([car_pos.x_val, car_pos.y_val]) <NEW_LINE> destination = self.car_agent.simGetWayPoints()[-1] <NEW_LINE> distance = norm(car_point-destination) <NEW_LINE> if distance < 5: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Custom Environment that follows gym interface
62599080283ffb24f3cf5364
class ProxyCloner(object): <NEW_LINE> <INDENT> def __init__(self, proxy_class, session): <NEW_LINE> <INDENT> for name, _ in inspect.getmembers(proxy_class, predicate=inspect.ismethod): <NEW_LINE> <INDENT> setattr(self, name, _ProxyMethod(name, session)) <NEW_LINE> <DEDENT> self.session = session
Class that will mimic an object methods but in fact send the calls over the networks
625990807c178a314d78e94c
class VideoPlaceholderNode(PlaceholderNode): <NEW_LINE> <INDENT> widget = VideoWidget <NEW_LINE> def render(self, context): <NEW_LINE> <INDENT> content = self.get_content_from_context(context) <NEW_LINE> if not content: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> if content: <NEW_LINE> <INDENT> video_url, w, h = content.split('\\') <NEW_LINE> m = re.search('youtube\.com\/watch\?v=([^&]+)', content) <NEW_LINE> if m: <NEW_LINE> <INDENT> video_url = 'http://www.youtube.com/v/' + m.group(1) <NEW_LINE> <DEDENT> if not w: <NEW_LINE> <INDENT> w = 425 <NEW_LINE> <DEDENT> if not h: <NEW_LINE> <INDENT> h = 344 <NEW_LINE> <DEDENT> context = {'video_url': video_url, 'w': w, 'h': h} <NEW_LINE> renderer = render_to_string('pages/embed.html', context) <NEW_LINE> return mark_safe(renderer) <NEW_LINE> <DEDENT> return ''
A youtube `PlaceholderNode`, just here as an example.
625990801f5feb6acb1646bd
class Display: <NEW_LINE> <INDENT> def showImage(self, image): <NEW_LINE> <INDENT> print("图片大小:%d x %d, 图片内容:%s" % (image.getWidth(), image.getHeight(), image.getPix()))
显示器
625990805166f23b2e244e9c
class VariableRenamer(ast.Transformer): <NEW_LINE> <INDENT> def visit_Variable(self, node): <NEW_LINE> <INDENT> return ast.Variable(node.location, '_' + node.name)
Add an underscore to all names of variables in an ast.
62599080d268445f2663a8c0
class DataGeneration(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.transaction_count = AppConfig.get('transaction_count') <NEW_LINE> self.data_set = AppConfig.get('data_set') <NEW_LINE> <DEDENT> def get_transaction_data_cardinality(self): <NEW_LINE> <INDENT> return random.randint(1, len(self.data_set)) <NEW_LINE> <DEDENT> def generate_data_item(self): <NEW_LINE> <INDENT> return random.sample(self.data_set, 1)[0] <NEW_LINE> <DEDENT> def make_transaction_data_sample(self): <NEW_LINE> <INDENT> data_sample = [] <NEW_LINE> data_cardinality = self.get_transaction_data_cardinality() <NEW_LINE> while len(data_sample) < data_cardinality: <NEW_LINE> <INDENT> data = self.generate_data_item() <NEW_LINE> if data not in data_sample: <NEW_LINE> <INDENT> data_sample.append(data) <NEW_LINE> <DEDENT> <DEDENT> return data_sample <NEW_LINE> <DEDENT> def generate_transactions(self): <NEW_LINE> <INDENT> transaction_cardinality = self.get_transaction_cardinality() <NEW_LINE> transactions = [] <NEW_LINE> for idx in range(transaction_cardinality): <NEW_LINE> <INDENT> tx = Transaction(idx+1) <NEW_LINE> for x in self.generate_tx_data_operations(tx, self.make_transaction_data_sample()): <NEW_LINE> <INDENT> tx.add_data_operation(x) <NEW_LINE> <DEDENT> transactions.append(tx) <NEW_LINE> <DEDENT> return transactions <NEW_LINE> <DEDENT> def get_transaction_cardinality(self): <NEW_LINE> <INDENT> return random.randint( self.transaction_count.get("min"), self.transaction_count.get("max") ) <NEW_LINE> <DEDENT> def generate_tx_data_operations(self, transaction, data_items): <NEW_LINE> <INDENT> if len(data_items) < 1: <NEW_LINE> <INDENT> raise ValueError('data_items list must have at least one item') <NEW_LINE> <DEDENT> data_operations = [] <NEW_LINE> for item in data_items: <NEW_LINE> <INDENT> data_operations = data_operations + list(map(lambda type: DataOperation(type, transaction, item), self.generate_read_writes_types())) <NEW_LINE> <DEDENT> random.shuffle(data_operations) <NEW_LINE> data_operations.append(DataOperation(self.generate_commit_abort_type(), transaction, None)) <NEW_LINE> return data_operations <NEW_LINE> <DEDENT> def generate_read_writes_types(self): <NEW_LINE> <INDENT> count = random.randint(1, 2) <NEW_LINE> if count is 1: <NEW_LINE> <INDENT> return [type_switcher.get(random.randint(0,1))] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [OperationType.READ, OperationType.WRITE] <NEW_LINE> <DEDENT> <DEDENT> def generate_commit_abort_type(self): <NEW_LINE> <INDENT> return type_switcher.get(random.randint(2,3))
DataGeneration is a helper class with several utility methods for generating random data sets for transactions and data operations
625990803346ee7daa3383c4
class CMEErrorSIMPINRequired(CMEError): <NEW_LINE> <INDENT> pass
Exception raised with +CME ERROR: SIM PIN required
6259908097e22403b383c9c4
class Gather(tile.Operation): <NEW_LINE> <INDENT> def __init__(self, value, indicies): <NEW_LINE> <INDENT> outshape = tile.Shape(value.shape.dtype, list(indicies.shape.dims) + list(value.shape.dims[1:])) <NEW_LINE> super(Gather, self).__init__('function (V, I) -> (O) { O = gather(V, I); }', [('V', value), ('I', indicies)], [('O', outshape)])
Gathers elements of a tensor.
62599080656771135c48ad92
class Game: <NEW_LINE> <INDENT> def __init__(self, board_size=3, save_history=True): <NEW_LINE> <INDENT> self.board_size = board_size <NEW_LINE> self.state = np.zeros((board_size, board_size), dtype=int) <NEW_LINE> self.save_history = save_history <NEW_LINE> self.history = [self.state.copy()] <NEW_LINE> self.last_play = None <NEW_LINE> self.sums = np.array([]) <NEW_LINE> self.players = [Player(name='A', value=1, display='O'), Player(name='B', value=-1, display='X')] <NEW_LINE> self.players_values = list([p.value for p in self.players]) <NEW_LINE> self.players_gen = cycle(self.players) <NEW_LINE> self.current_player = next(self.players_gen) <NEW_LINE> <DEDENT> def legal_plays(self): <NEW_LINE> <INDENT> legal_plays = [] <NEW_LINE> if self.winner() is None: <NEW_LINE> <INDENT> free_spaces = np.isin(self.state, self.players_values, invert=True) <NEW_LINE> legal_plays = np.argwhere(free_spaces) <NEW_LINE> legal_plays = list(map(tuple, legal_plays)) <NEW_LINE> <DEDENT> logging.debug('Legal plays: %s', legal_plays) <NEW_LINE> return legal_plays <NEW_LINE> <DEDENT> def winner(self): <NEW_LINE> <INDENT> for player in self.players: <NEW_LINE> <INDENT> if self.board_size * player.value in self.sums: <NEW_LINE> <INDENT> logging.debug('Winner: %s', player.display) <NEW_LINE> return player <NEW_LINE> <DEDENT> <DEDENT> logging.debug('Winner: None') <NEW_LINE> return None <NEW_LINE> <DEDENT> def show_board(self, state_number=-1, return_string=False): <NEW_LINE> <INDENT> lines = [] <NEW_LINE> no_player_display = '.' <NEW_LINE> for line in self.history[state_number]: <NEW_LINE> <INDENT> elements = [] <NEW_LINE> for element in line: <NEW_LINE> <INDENT> if element in self.players_values: <NEW_LINE> <INDENT> for player in self.players: <NEW_LINE> <INDENT> if element == player.value: <NEW_LINE> <INDENT> elements.append(player.display) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> elements.append(no_player_display) <NEW_LINE> <DEDENT> <DEDENT> lines.append('|'.join(elements)) <NEW_LINE> <DEDENT> board_representation = '\n'.join(lines) <NEW_LINE> if return_string: <NEW_LINE> <INDENT> return board_representation <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(board_representation) <NEW_LINE> <DEDENT> <DEDENT> def play(self, move=None): <NEW_LINE> <INDENT> legal_plays = self.legal_plays() <NEW_LINE> if move is not None: <NEW_LINE> <INDENT> if move in legal_plays: <NEW_LINE> <INDENT> selected_move = move <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Selected move is illegal') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> selected_move = legal_plays[np.random.choice(len(legal_plays), 1)[0]] <NEW_LINE> <DEDENT> logging.debug('Selected move: %s', move) <NEW_LINE> self.state[selected_move] = self.current_player.value <NEW_LINE> if self.save_history: <NEW_LINE> <INDENT> self.history.append(self.state.copy()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.history = [self.state.copy()] <NEW_LINE> <DEDENT> self.current_player = next(self.players_gen) <NEW_LINE> self.last_play = selected_move <NEW_LINE> self.sums = np.concatenate( (np.sum(self.state, axis=0), np.sum(self.state, axis=1), np.array([np.sum(np.diag(self.state)), np.sum(np.diag(self.state[::-1]))])))
TicTacToe game implementation to be used by Monte Carlo Tree Search https://en.wikipedia.org/wiki/Tic-tac-toe
625990805fc7496912d48fcd
class PerlMoose(PerlPackage): <NEW_LINE> <INDENT> homepage = "http://search.cpan.org/~ether/Moose-2.2006/lib/Moose.pm" <NEW_LINE> url = "http://search.cpan.org/CPAN/authors/id/E/ET/ETHER/Moose-2.2006.tar.gz" <NEW_LINE> version('2.2010', '636238ac384818ee1e92eff6b9ecc50a') <NEW_LINE> version('2.2009', '5527b1a5abc29b5c57fc488447e76ccd') <NEW_LINE> version('2.2007', 'de487ae226003f7e7f22c0fd8f0074e6') <NEW_LINE> version('2.2006', '929c6b3877a6054ef617cf7ef1e220b5') <NEW_LINE> depends_on('perl-cpan-meta-check', type=('build', 'run')) <NEW_LINE> depends_on('perl-test-cleannamespaces', type=('build', 'run')) <NEW_LINE> depends_on('perl-devel-overloadinfo', type=('build', 'run')) <NEW_LINE> depends_on('perl-class-load-xs', type=('build', 'run')) <NEW_LINE> depends_on('perl-devel-stacktrace', type=('build', 'run')) <NEW_LINE> depends_on('perl-eval-closure', type=('build', 'run')) <NEW_LINE> depends_on('perl-sub-name', type=('build', 'run')) <NEW_LINE> depends_on('perl-module-runtime-conflicts', type=('build', 'run')) <NEW_LINE> depends_on('perl-devel-globaldestruction', type=('build', 'run')) <NEW_LINE> depends_on('perl-package-deprecationmanager', type=('build', 'run')) <NEW_LINE> depends_on('perl-package-stash-xs', type=('build', 'run'))
A postmodern object system for Perl 5
62599080f9cc0f698b1c602e
class EspressoCzar(MP): <NEW_LINE> <INDENT> def __init__(self, max_coffee_bean_units): <NEW_LINE> <INDENT> MP.__init__(self) <NEW_LINE> self.sema_empty = self.Semaphore("machine empty", max_coffee_bean_units) <NEW_LINE> self.sema_full = self.Semaphore("machine full", 0) <NEW_LINE> self.queue = list() <NEW_LINE> self.lock = self.Lock("queue lock") <NEW_LINE> <DEDENT> def refill_espresso_machine(self, refill_unit): <NEW_LINE> <INDENT> self.sema_empty.procure() <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> self.queue.insert(0, refill_unit) <NEW_LINE> <DEDENT> self.sema_full.vacate() <NEW_LINE> <DEDENT> def make_espresso(self): <NEW_LINE> <INDENT> self.sema_full.procure() <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> used_unit = self.queue.pop() <NEW_LINE> <DEDENT> self.sema_empty.vacate() <NEW_LINE> return used_unit
Three years into the future... You are a PhD student at Cornell University and have recently been elected to manage the CS Department coffee supply as the Espresso Czar. In other words, you are now the go-to person whenever the Gates Hall espresso machine is empty. Congrats! With the CS Department's shared love of coffee and 'round-the-clock work ethic, your colleagues will enter the kitchen all throughout the day and (rather quickly) deplete the coffee supply. In keeping up with the demand, you order small coffee bean shipments throughout the day and get notified upon delivery from DeliveryGuys. When notified, you immediately let him try to refill the coffee machine. To keep track of coffee usage, you rigged the machine with a bell to notify you when a CoffeeDrinker tries to make a cup of espresso. The fill capacity of the machine is specified by the constant FILL_CAPACITY and it uses up the coffee beans in a FIFO manner. In technical terms, "refilling the machine" means to add 1 unit of coffee beans to the machine, and "making a cup of espresso" means to use up 1 unit of coffee beans. The fill tank of the espresso machine may be represented by a data structure. Your task is to apply your CS 4410 mastery of synchronization to efficiently prevent CoffeeDrinkers from making coffee when the machine is empty and block DeliveryGuys from refilling the machine when it is already full. Clarification points: - 1 unit of coffee beans makes 1 cup of espresso. - Each shipment contains 1 unit of coffee beans (also written as "refill unit") - Your colleagues are civilized and only make 1 cup at a time. - Choose your data structure carefully! Keep in mind that general correctness is just as important as the synchronization-safety. - The goal of this assignment is to ensure you understand the internals of thread-safe data structures, so be explicit with protection!
6259908044b2445a339b76bf
class CouchDBStorageSettingsForm(SiteSettingsForm): <NEW_LINE> <INDENT> couchdb_default_server = forms.CharField( label=_('Default server'), help_text=_('For example, "http://couchdb.local:5984"'), required=True) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CouchDBStorageSettingsForm, self).__init__(*args, **kwargs) <NEW_LINE> can_use_couchdb, reason = get_can_use_couchdb() <NEW_LINE> if not can_use_couchdb: <NEW_LINE> <INDENT> self.disabled_fields['couchdb_default_server'] = True <NEW_LINE> self.disabled_reasons['couchdb_default_server'] = reason <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> title = _('CouchDB Settings') <NEW_LINE> fieldsets = ( (None, { 'classes': ('wide', 'hidden'), 'fields': ('couchdb_default_server',), }), )
Settings subform for CouchDB-based file storage. Note that this is currently unused. It's here for legacy reasons and future support.
62599080aad79263cf430280
class TestV1ClusterRole(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1ClusterRole(self): <NEW_LINE> <INDENT> pass
V1ClusterRole unit test stubs
6259908063b5f9789fe86c2d
class AntlrObjectiveCLexer(DelegatingLexer): <NEW_LINE> <INDENT> name = 'ANTLR With ObjectiveC Target' <NEW_LINE> aliases = ['antlr-objc'] <NEW_LINE> filenames = ['*.G', '*.g'] <NEW_LINE> def __init__(self, **options): <NEW_LINE> <INDENT> super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer, AntlrLexer, **options) <NEW_LINE> <DEDENT> def analyse_text(text): <NEW_LINE> <INDENT> return AntlrLexer.analyse_text(text) and re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
`ANTLR`_ with Objective-C Target *New in Pygments 1.1.*
6259908126068e7796d4e406
class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): <NEW_LINE> <INDENT> _fields_ = [ ("dwSize", COORD), ("dwCursorPosition", COORD), ("wAttributes", WORD), ("srWindow", SMALL_RECT), ("dwMaximumWindowSize", COORD) ]
struct in wincon.h.
625990817d847024c075dea3
class LineDLayer(DLayer): <NEW_LINE> <INDENT> def __init__(self, name, dmap): <NEW_LINE> <INDENT> super(LineDLayer, self).__init__(name, dmap) <NEW_LINE> <DEDENT> def addFeature(self, feature, feature_id = None): <NEW_LINE> <INDENT> if feature_id: feature_id = str(feature_id) <NEW_LINE> line_shp = shapeObj(MS_SHAPE_LINE) <NEW_LINE> line = lineObj() <NEW_LINE> p0 = pointObj(feature['gx0'], feature['gy0']) <NEW_LINE> line.add(p0) <NEW_LINE> p1 = pointObj(feature['gx1'], feature['gy1']) <NEW_LINE> line.add(p1) <NEW_LINE> line_shp.add(line) <NEW_LINE> line_shp.index = self.shape_index <NEW_LINE> if feature_id in self.selected: <NEW_LINE> <INDENT> line_shp.classindex = 1 <NEW_LINE> <DEDENT> self.ms_layer.addFeature(line_shp) <NEW_LINE> if not feature_id: <NEW_LINE> <INDENT> feature_id = str(self.shape_index) <NEW_LINE> <DEDENT> self.features[feature_id] = feature <NEW_LINE> self.shape_index += 1 <NEW_LINE> <DEDENT> def drawLine(self, x0, y0, x1, y1, from_pixel_coords = True): <NEW_LINE> <INDENT> if from_pixel_coords: <NEW_LINE> <INDENT> p0 = pix2geo(self.dmap, x0, y0) <NEW_LINE> p1 = pix2geo(self.dmap, x1, y1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> p0 = pointObj(x0, y0) <NEW_LINE> p1 = pointObj(x1, y1) <NEW_LINE> <DEDENT> self.addFeature({'gx0':p0.x, 'gy0':p0.y, 'gx1':p1.x, 'gy1':p1.y, 'is_sel':False})
A line-specialized DLayer subclass.
625990813d592f4c4edbc8c2
class PointTnf(object): <NEW_LINE> <INDENT> def __init__(self, use_cuda=True): <NEW_LINE> <INDENT> self.use_cuda=use_cuda <NEW_LINE> self.tpsTnf = TpsGridGen(use_cuda=self.use_cuda) <NEW_LINE> <DEDENT> def tpsPointTnf(self,theta,points): <NEW_LINE> <INDENT> points=points.unsqueeze(3).transpose(1,3) <NEW_LINE> warped_points = self.tpsTnf.apply_transformation(theta,points) <NEW_LINE> warped_points=warped_points.transpose(3,1).contiguous().squeeze(3) <NEW_LINE> return warped_points <NEW_LINE> <DEDENT> def affPointTnf(self,theta,points): <NEW_LINE> <INDENT> theta_mat = theta.view(-1,2,3) <NEW_LINE> warped_points = torch.bmm(theta_mat[:,:,:2],points) <NEW_LINE> warped_points += theta_mat[:,:,2].unsqueeze(2).expand_as(warped_points) <NEW_LINE> return warped_points
Class with functions for transforming a set of points with affine/tps transformations
625990814f88993c371f1285
class CertificateManager( CertificateManagerMixin["Certificate", "CertificateQuerySet"], CertificateManagerBase ): <NEW_LINE> <INDENT> if TYPE_CHECKING: <NEW_LINE> <INDENT> def expired(self) -> "CertificateQuerySet": <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def not_yet_valid(self) -> "CertificateQuerySet": <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def revoked(self) -> "CertificateQuerySet": <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> <DEDENT> def create_cert( self, ca: "CertificateAuthority", csr: x509.CertificateSigningRequest, profile: Optional[Profile] = None, autogenerated: Optional[bool] = None, **kwargs: Any, ) -> "Certificate": <NEW_LINE> <INDENT> if profile is None: <NEW_LINE> <INDENT> profile = profiles[None] <NEW_LINE> <DEDENT> elif not isinstance(profile, Profile): <NEW_LINE> <INDENT> raise TypeError("profile must be of type django_ca.profiles.Profile.") <NEW_LINE> <DEDENT> cert = profile.create_cert(ca, csr, **kwargs) <NEW_LINE> obj = self.model(ca=ca, csr=LazyCertificateSigningRequest(csr), profile=profile.name) <NEW_LINE> obj.update_certificate(cert) <NEW_LINE> if autogenerated is None: <NEW_LINE> <INDENT> obj.autogenerated = profile.autogenerated <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj.autogenerated = autogenerated <NEW_LINE> <DEDENT> obj.save() <NEW_LINE> post_issue_cert.send(sender=self.model, cert=obj) <NEW_LINE> return obj
Model manager for the Certificate model.
625990817b180e01f3e49dc8
class PathInfo: <NEW_LINE> <INDENT> def __init__(self, params) : <NEW_LINE> <INDENT> self.params = params <NEW_LINE> <DEDENT> def getStatsPath(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.workDir,"alignmentStats.txt") <NEW_LINE> <DEDENT> def getChromDepth(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.workDir,"chromDepth.txt") <NEW_LINE> <DEDENT> def getGraphPath(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.workDir,"svLocusGraph.bin") <NEW_LINE> <DEDENT> def getHyGenDir(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.workDir,"svHyGen") <NEW_LINE> <DEDENT> def getHyGenCandidatePath(self, binStr) : <NEW_LINE> <INDENT> return os.path.join(self.getHyGenDir(),"candidateSV.%s.vcf" % (binStr)) <NEW_LINE> <DEDENT> def getSortedCandidatePath(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.variantsDir,"candidateSV.vcf.gz") <NEW_LINE> <DEDENT> def getHyGenSomaticPath(self, binStr) : <NEW_LINE> <INDENT> return os.path.join(self.getHyGenDir(),"somaticSV.%s.vcf" % (binStr)) <NEW_LINE> <DEDENT> def getSortedSomaticPath(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.variantsDir,"somaticSV.vcf.gz") <NEW_LINE> <DEDENT> def getGraphStatsPath(self) : <NEW_LINE> <INDENT> return os.path.join(self.params.statsDir,"svLocusGraphStats.tsv")
object to centralize shared workflow path names
6259908197e22403b383c9c6
class MissingParameterError(Exception): <NEW_LINE> <INDENT> pass
Error related to proxy. Raised when some compulsory parameter is missing from both its attribute and argument set of the method invocation.
6259908163b5f9789fe86c2f
class Image(object): <NEW_LINE> <INDENT> def __init__(self, width, height): <NEW_LINE> <INDENT> self.iterations = 0 <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.image = [[0 for i in xrange(width*3)] for j in xrange(height)] <NEW_LINE> self.writer = png.Writer(width = self.width, height = self.height, bitdepth = 8) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Total {} iterations done.".format(self.iterations) <NEW_LINE> <DEDENT> def AddFromString(self, data, iterations, fmt): <NEW_LINE> <INDENT> WIDTH = 0 <NEW_LINE> HEIGHT = 1 <NEW_LINE> PIXELS = 2 <NEW_LINE> META = 3 <NEW_LINE> new_img = png.Reader(bytes=data).read() <NEW_LINE> if new_img[WIDTH] != self.width or new_img[HEIGHT] != self.height: <NEW_LINE> <INDENT> raise InvalidImage <NEW_LINE> <DEDENT> for h, row in enumerate(new_img[PIXELS]): <NEW_LINE> <INDENT> for i, value in enumerate(row): <NEW_LINE> <INDENT> if (fmt == Socket.PNG_8BIT): <NEW_LINE> <INDENT> value = value / 255 * 1023 <NEW_LINE> <DEDENT> self.image[h][i] += value*iterations <NEW_LINE> <DEDENT> <DEDENT> self.iterations += iterations <NEW_LINE> <DEDENT> def Clamp16(self, v): <NEW_LINE> <INDENT> if v > 1023: <NEW_LINE> <INDENT> v = 1023 <NEW_LINE> <DEDENT> return v / 1023 * 255 <NEW_LINE> <DEDENT> def Write(self,filename): <NEW_LINE> <INDENT> iterations = self.iterations if self.iterations > 0 else 1 <NEW_LINE> scaled = [[self.Clamp16(value / iterations) for value in row] for row in self.image] <NEW_LINE> with open(filename, 'wb') as fp: <NEW_LINE> <INDENT> self.writer.write(fp, scaled)
Image-class takes care of turning incoming binary data into images. Averages multiple pass images to one and writes png-files.
625990817cff6e4e811b7508
class Laplace(Distribution): <NEW_LINE> <INDENT> arg_constraints = {'loc': constraints.real, 'scale': constraints.positive} <NEW_LINE> support = constraints.real <NEW_LINE> has_rsample = True <NEW_LINE> @property <NEW_LINE> def mean(self): <NEW_LINE> <INDENT> return self.loc <NEW_LINE> <DEDENT> @property <NEW_LINE> def variance(self): <NEW_LINE> <INDENT> return 2 * self.scale.pow(2) <NEW_LINE> <DEDENT> @property <NEW_LINE> def stddev(self): <NEW_LINE> <INDENT> return (2 ** 0.5) * self.scale <NEW_LINE> <DEDENT> def __init__(self, loc, scale, validate_args=None): <NEW_LINE> <INDENT> self.loc, self.scale = broadcast_all(loc, scale) <NEW_LINE> if isinstance(loc, Number) and isinstance(scale, Number): <NEW_LINE> <INDENT> batch_shape = torch.Size() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> batch_shape = self.loc.size() <NEW_LINE> <DEDENT> super(Laplace, self).__init__(batch_shape, validate_args=validate_args) <NEW_LINE> <DEDENT> def expand(self, batch_shape, _instance=None): <NEW_LINE> <INDENT> new = self._get_checked_instance(Laplace, _instance) <NEW_LINE> batch_shape = torch.Size(batch_shape) <NEW_LINE> new.loc = self.loc.expand(batch_shape) <NEW_LINE> new.scale = self.scale.expand(batch_shape) <NEW_LINE> super(Laplace, new).__init__(batch_shape, validate_args=False) <NEW_LINE> new._validate_args = self._validate_args <NEW_LINE> return new <NEW_LINE> <DEDENT> def rsample(self, sample_shape=torch.Size()): <NEW_LINE> <INDENT> shape = self._extended_shape(sample_shape) <NEW_LINE> u = self.loc.new(shape).uniform_(_finfo(self.loc).eps - 1, 1) <NEW_LINE> return self.loc - self.scale * u.sign() * torch.log1p(-u.abs()) <NEW_LINE> <DEDENT> def log_prob(self, value): <NEW_LINE> <INDENT> if self._validate_args: <NEW_LINE> <INDENT> self._validate_sample(value) <NEW_LINE> <DEDENT> return -torch.log(2 * self.scale) - torch.abs(value - self.loc) / self.scale <NEW_LINE> <DEDENT> def cdf(self, value): <NEW_LINE> <INDENT> if self._validate_args: <NEW_LINE> <INDENT> self._validate_sample(value) <NEW_LINE> <DEDENT> return 0.5 - 0.5 * (value - self.loc).sign() * torch.expm1(-(value - self.loc).abs() / self.scale) <NEW_LINE> <DEDENT> def icdf(self, value): <NEW_LINE> <INDENT> if self._validate_args: <NEW_LINE> <INDENT> self._validate_sample(value) <NEW_LINE> <DEDENT> term = value - 0.5 <NEW_LINE> return self.loc - self.scale * (term).sign() * torch.log1p(-2 * term.abs()) <NEW_LINE> <DEDENT> def entropy(self): <NEW_LINE> <INDENT> return 1 + torch.log(2 * self.scale)
Creates a Laplace distribution parameterized by :attr:`loc` and :attr:'scale'. Example:: >>> m = Laplace(torch.tensor([0.0]), torch.tensor([1.0])) >>> m.sample() # Laplace distributed with loc=0, scale=1 tensor([ 0.1046]) Args: loc (float or Tensor): mean of the distribution scale (float or Tensor): scale of the distribution
625990815fcc89381b266ec0
class Schedule(object): <NEW_LINE> <INDENT> def __init__(self, schedule_id, name=None, description=None, entity_ids=None, days=None): <NEW_LINE> <INDENT> self.schedule_id = schedule_id <NEW_LINE> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.entity_ids = entity_ids or [] <NEW_LINE> self.days = days or [0, 1, 2, 3, 4, 5, 6] <NEW_LINE> self.__event_listeners = [] <NEW_LINE> <DEDENT> def add_event_listener(self, event_listener): <NEW_LINE> <INDENT> self.__event_listeners.append(event_listener) <NEW_LINE> <DEDENT> def schedule(self, hass): <NEW_LINE> <INDENT> for event in self.__event_listeners: <NEW_LINE> <INDENT> event.schedule(hass)
A Schedule
625990815166f23b2e244ea0
class TestPrimitive_C_CANCEL(unittest.TestCase): <NEW_LINE> <INDENT> def test_assignment(self): <NEW_LINE> <INDENT> primitive = C_CANCEL() <NEW_LINE> primitive.MessageIDBeingRespondedTo = 13 <NEW_LINE> self.assertEqual(primitive.MessageIDBeingRespondedTo, 13) <NEW_LINE> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> primitive.MessageIDBeingRespondedTo = 100000 <NEW_LINE> <DEDENT> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> primitive.MessageIDBeingRespondedTo = 'test'
Test DIMSE C-CANCEL operations.
6259908192d797404e3898c0
class RemovingSetWrapper(set): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if len(self): <NEW_LINE> <INDENT> return self.pop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise StopIteration
RemovingSetWrapper removes elements on iteration. **Note:** Usually used internally only. When iterating over RemovingSetWrapper, the iterated element is removed. This manages basically which points are still needed to iterate over in the viability algorithm.
625990813d592f4c4edbc8c3
@StatusCodes.EBADF <NEW_LINE> class BadFileDescriptorError(UVError, common.builtins.IOError): <NEW_LINE> <INDENT> pass
Bad file descriptor.
625990814f88993c371f1286
class Echo(ProtoMessage): <NEW_LINE> <INDENT> message = pmessages.StringField(1, default='Hello, world!')
I am rubber and you are glue...
62599081d268445f2663a8c2
class TestTopology(TopologyBuilder): <NEW_LINE> <INDENT> def __init__(self, input_file, logger_config): <NEW_LINE> <INDENT> TopologyBuilder.__init__(self, input_file, logger_config) <NEW_LINE> self.resolved_topology = [] <NEW_LINE> self.topology = None <NEW_LINE> with open(self.input_file, 'r') as file_desc: <NEW_LINE> <INDENT> doc = yaml.load(file_desc) <NEW_LINE> <DEDENT> for key, val in doc.iteritems(): <NEW_LINE> <INDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> self.populate_topology() <NEW_LINE> <DEDENT> def get_all_nodes(self): <NEW_LINE> <INDENT> return self.resolved_topology <NEW_LINE> <DEDENT> def get_random_node(self): <NEW_LINE> <INDENT> if CONFIG_KEY in self.topology: <NEW_LINE> <INDENT> if SKIP_NODE_FLAG in self.topology[CONFIG_KEY] and self.topology[CONFIG_KEY][SKIP_NODE_FLAG] is True: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return random.choice(self.resolved_topology) <NEW_LINE> <DEDENT> def populate_topology(self): <NEW_LINE> <INDENT> for node in self.topology['nodes']: <NEW_LINE> <INDENT> self.resolved_topology.append(node)
Test topology class
625990817b180e01f3e49dc9
class AlcatelAosSSH(CiscoSSHConnection): <NEW_LINE> <INDENT> def session_preparation(self): <NEW_LINE> <INDENT> self._test_channel_read(pattern=r"[>#]") <NEW_LINE> self.set_base_prompt() <NEW_LINE> time.sleep(0.3 * self.global_delay_factor) <NEW_LINE> self.clear_buffer() <NEW_LINE> <DEDENT> def check_enable_mode(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def enable(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def exit_enable_mode(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check_config_mode(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def config_mode(self, *args, **kwargs): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def exit_config_mode(self, *args, **kwargs): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def save_config( self, cmd="write memory flash-synchro", confirm=False, confirm_response="" ): <NEW_LINE> <INDENT> return super().save_config( cmd=cmd, confirm=confirm, confirm_response=confirm_response )
Alcatel-Lucent Enterprise AOS support (AOS6 and AOS8).
625990815fc7496912d48fcf
class Status : <NEW_LINE> <INDENT> node2com = {} <NEW_LINE> total_weight = 0 <NEW_LINE> internals = {} <NEW_LINE> degrees = {} <NEW_LINE> gdegrees = {} <NEW_LINE> def __init__(self) : <NEW_LINE> <INDENT> self.node2com = dict([]) <NEW_LINE> self.total_weight = 0 <NEW_LINE> self.degrees = dict([]) <NEW_LINE> self.gdegrees = dict([]) <NEW_LINE> self.internals = dict([]) <NEW_LINE> self.loops = dict([]) <NEW_LINE> <DEDENT> def __str__(self) : <NEW_LINE> <INDENT> return ("node2com : " + str(self.node2com) + " degrees : " + str(self.degrees) + " internals : " + str(self.internals) + " total_weight : " + str(self.total_weight)) <NEW_LINE> <DEDENT> def copy(self) : <NEW_LINE> <INDENT> new_status = Status() <NEW_LINE> new_status.node2com = self.node2com.copy() <NEW_LINE> new_status.internals = self.internals.copy() <NEW_LINE> new_status.degrees = self.degrees.copy() <NEW_LINE> new_status.gdegrees = self.gdegrees.copy() <NEW_LINE> new_status.total_weight = self.total_weight <NEW_LINE> <DEDENT> def init(self, graph, part = None) : <NEW_LINE> <INDENT> count = 0 <NEW_LINE> self.node2com = dict([]) <NEW_LINE> self.total_weight = 0 <NEW_LINE> self.degrees = dict([]) <NEW_LINE> self.gdegrees = dict([]) <NEW_LINE> self.internals = dict([]) <NEW_LINE> self.total_weight = graph.size(weight = 'weight') <NEW_LINE> if part == None : <NEW_LINE> <INDENT> for node in graph.nodes() : <NEW_LINE> <INDENT> self.node2com[node] = count <NEW_LINE> deg = float(graph.degree(node, weight = 'weight')) <NEW_LINE> if deg < 0 : <NEW_LINE> <INDENT> raise ValueError("Bad graph type, use positive weights") <NEW_LINE> <DEDENT> self.degrees[count] = deg <NEW_LINE> self.gdegrees[node] = deg <NEW_LINE> self.loops[node] = float(graph.get_edge_data(node, node, {"weight":0}).get("weight", 1)) <NEW_LINE> self.internals[count] = self.loops[node] <NEW_LINE> count = count + 1 <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> for node in graph.nodes() : <NEW_LINE> <INDENT> com = part[node] <NEW_LINE> self.node2com[node] = com <NEW_LINE> deg = float(graph.degree(node, weigh = 'weight')) <NEW_LINE> self.degrees[com] = self.degrees.get(com, 0) + deg <NEW_LINE> self.gdegrees[node] = deg <NEW_LINE> inc = 0. <NEW_LINE> for neighbor, datas in graph[node].items() : <NEW_LINE> <INDENT> weight = datas.get("weight", 1) <NEW_LINE> if weight <= 0 : <NEW_LINE> <INDENT> raise ValueError("Bad graph type, use positive weights") <NEW_LINE> <DEDENT> if part[neighbor] == com : <NEW_LINE> <INDENT> if neighbor == node : <NEW_LINE> <INDENT> inc += float(weight) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> inc += float(weight) / 2. <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.internals[com] = self.internals.get(com, 0) + inc
To handle several data in one struct. Could be replaced by named tuple, but don't want to depend on python 2.6
62599081bf627c535bcb2f9a
class BestNewswireFile(_BaseBestFile): <NEW_LINE> <INDENT> data_type = 'news' <NEW_LINE> def _build_source_fname(self, dir="source"): <NEW_LINE> <INDENT> return os.path.join(self.data_root, dir, '{}.xml'.format(self.doc_id))
Newswire data
62599081283ffb24f3cf5369
class Cancel(Message): <NEW_LINE> <INDENT> MESSAGE_TYPE = 49 <NEW_LINE> SKIP = u'skip' <NEW_LINE> ABORT = u'abort' <NEW_LINE> KILL = u'kill' <NEW_LINE> def __init__(self, request, mode=None): <NEW_LINE> <INDENT> assert(type(request) in six.integer_types) <NEW_LINE> assert(mode is None or type(mode) == six.text_type) <NEW_LINE> assert(mode in [None, self.SKIP, self.ABORT, self.KILL]) <NEW_LINE> Message.__init__(self) <NEW_LINE> self.request = request <NEW_LINE> self.mode = mode <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parse(wmsg): <NEW_LINE> <INDENT> assert(len(wmsg) > 0 and wmsg[0] == Cancel.MESSAGE_TYPE) <NEW_LINE> if len(wmsg) != 3: <NEW_LINE> <INDENT> raise ProtocolError("invalid message length {0} for CANCEL".format(len(wmsg))) <NEW_LINE> <DEDENT> request = check_or_raise_id(wmsg[1], u"'request' in CANCEL") <NEW_LINE> options = check_or_raise_extra(wmsg[2], u"'options' in CANCEL") <NEW_LINE> mode = None <NEW_LINE> if u'mode' in options: <NEW_LINE> <INDENT> option_mode = options[u'mode'] <NEW_LINE> if type(option_mode) != six.text_type: <NEW_LINE> <INDENT> raise ProtocolError("invalid type {0} for 'mode' option in CANCEL".format(type(option_mode))) <NEW_LINE> <DEDENT> if option_mode not in [Cancel.SKIP, Cancel.ABORT, Cancel.KILL]: <NEW_LINE> <INDENT> raise ProtocolError("invalid value '{0}' for 'mode' option in CANCEL".format(option_mode)) <NEW_LINE> <DEDENT> mode = option_mode <NEW_LINE> <DEDENT> obj = Cancel(request, mode=mode) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def marshal(self): <NEW_LINE> <INDENT> options = {} <NEW_LINE> if self.mode is not None: <NEW_LINE> <INDENT> options[u'mode'] = self.mode <NEW_LINE> <DEDENT> return [Cancel.MESSAGE_TYPE, self.request, options] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return u"Cancel(request={0}, mode={1})".format(self.request, self.mode)
A WAMP ``CANCEL`` message. Format: ``[CANCEL, CALL.Request|id, Options|dict]``
625990817cff6e4e811b750a
class Consuming(MonitorTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(Consuming, self).setUp() <NEW_LINE> self.controller = controller.HttpConsuming() <NEW_LINE> self.controller.respond = lambda x, y: None <NEW_LINE> <DEDENT> def test_get(self): <NEW_LINE> <INDENT> self.controller.get() <NEW_LINE> self.assertInCommand(("--action list", "--consuming")) <NEW_LINE> <DEDENT> def test_post_blank(self): <NEW_LINE> <INDENT> self.assertEqual(None, self.controller.post()) <NEW_LINE> <DEDENT> def test_post_put_set(self): <NEW_LINE> <INDENT> with self.assertRaises(cherrypy.HTTPRedirect) as context: <NEW_LINE> <INDENT> self.controller.post(put="a") <NEW_LINE> <DEDENT> self.assertEqual(['http://127.0.0.1:8080/consuming'], context.exception[0]) <NEW_LINE> self.assertInCommand(("--action add", "--consuming", "--key a")) <NEW_LINE> <DEDENT> def test_post_delete_set(self): <NEW_LINE> <INDENT> with self.assertRaises(cherrypy.HTTPRedirect) as context: <NEW_LINE> <INDENT> self.controller.post(delete="a") <NEW_LINE> <DEDENT> self.assertEqual(['http://127.0.0.1:8080/consuming'], context.exception[0]) <NEW_LINE> self.assertInCommand(("--action remove", "--consuming", "--key a")) <NEW_LINE> <DEDENT> def test_post_put_delete_set(self): <NEW_LINE> <INDENT> self.assertRaises(HTTPSConnectorInvalidCombinationError, self.controller.post, put="a", delete="a") <NEW_LINE> <DEDENT> def test_put(self): <NEW_LINE> <INDENT> self.controller.put("a") <NEW_LINE> self.assertInCommand(("--action add", "--consuming", "--key a")) <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> self.controller.delete("a") <NEW_LINE> self.assertInCommand(("--action remove", "--consuming", "--key a"))
Test the "HttpConsuming" controller.
62599081a8370b77170f1e9b
class ScaledOrthogonalAlignment(Alignment): <NEW_LINE> <INDENT> def __init__(self, scaling=True): <NEW_LINE> <INDENT> self.scaling = scaling <NEW_LINE> self.scale = 1 <NEW_LINE> <DEDENT> def fit(self, X, Y): <NEW_LINE> <INDENT> R, sc = scaled_procrustes(X, Y, scaling=self.scaling) <NEW_LINE> self.scale = sc <NEW_LINE> self.R = sc * R <NEW_LINE> return self <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> return X.dot(self.R)
Compute a mixing matrix R and a scaling sc such that Frobenius norm ||sc RX - Y||^2 is minimized and R is an orthogonal matrix Parameters --------- scaling : boolean, optional Determines whether a scaling parameter is applied to improve transform. R : ndarray (n_features, n_features) Optimal orthogonal transform
6259908123849d37ff852b83
class Child(Father, Mother): <NEW_LINE> <INDENT> pass
Implementation class: must not appear in specifications
625990814527f215b58eb705
class Zcertstore(object): <NEW_LINE> <INDENT> def __init__(self, location): <NEW_LINE> <INDENT> p = utils.lib.zcertstore_new(utils.to_bytes(location)) <NEW_LINE> if p == utils.ffi.NULL: <NEW_LINE> <INDENT> raise MemoryError("Could not allocate person") <NEW_LINE> <DEDENT> self._p = utils.ffi.gc(p, libczmq_destructors.zcertstore_destroy_py) <NEW_LINE> <DEDENT> def set_loader(self, loader, destructor, state): <NEW_LINE> <INDENT> utils.lib.zcertstore_set_loader(self._p, loader, destructor, state) <NEW_LINE> <DEDENT> def lookup(self, public_key): <NEW_LINE> <INDENT> return utils.lib.zcertstore_lookup(self._p, utils.to_bytes(public_key)) <NEW_LINE> <DEDENT> def insert(self, cert_p): <NEW_LINE> <INDENT> utils.lib.zcertstore_insert(self._p, cert_p._p) <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> utils.lib.zcertstore_empty(self._p) <NEW_LINE> <DEDENT> def print_py(self): <NEW_LINE> <INDENT> utils.lib.zcertstore_print(self._p) <NEW_LINE> <DEDENT> def certs(self): <NEW_LINE> <INDENT> return utils.lib.zcertstore_certs(self._p) <NEW_LINE> <DEDENT> def test(verbose): <NEW_LINE> <INDENT> utils.lib.zcertstore_test(verbose)
work with CURVE security certificate stores
625990815fc7496912d48fd0
class NamedLink(object): <NEW_LINE> <INDENT> def __init__(self, url, name): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> return urlparse.urlparse(self.url).path[1:] <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.url) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, NamedLink): <NEW_LINE> <INDENT> return self.url == other.url <NEW_LINE> <DEDENT> return False
Holds a url with an additional name attribute. Used when scraping all the urls from the main page into a single set of urls.
62599081f9cc0f698b1c6031
class StupidCrypto(cryptosystem.Pkc): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_name(cls): <NEW_LINE> <INDENT> return "StupidPkc" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_priority(cls): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def encrypt_public(self, message): <NEW_LINE> <INDENT> return message <NEW_LINE> <DEDENT> def decrypt_public(self, message): <NEW_LINE> <INDENT> return message <NEW_LINE> <DEDENT> def encrypt_private(self, message): <NEW_LINE> <INDENT> return message <NEW_LINE> <DEDENT> def decrypt_private(self, message): <NEW_LINE> <INDENT> return message <NEW_LINE> <DEDENT> def get_key_pair(self): <NEW_LINE> <INDENT> return "pub_key", "priv_key" <NEW_LINE> <DEDENT> def copy_with_public_key(self, pub_key): <NEW_LINE> <INDENT> return StupidCrypto()
A cryptosystem that makes no changes to the plaintext.
625990814a966d76dd5f09b0
class Glyph(QtWidgets.QGraphicsItem): <NEW_LINE> <INDENT> def __init__(self, pixmap, char, leftMargin=0, charWidth=0, fullWidth=0): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.char = char <NEW_LINE> self.leftMargin = leftMargin <NEW_LINE> self.charWidth = charWidth <NEW_LINE> self.fullWidth = fullWidth <NEW_LINE> self.pixmap = pixmap <NEW_LINE> self.boundingRect = QtCore.QRectF(0,0,pixmap.width(),pixmap.height()) <NEW_LINE> self.selectionRect = QtCore.QRectF(0,0,pixmap.width()-1,pixmap.height()-1) <NEW_LINE> self.setFlag(self.ItemIsMovable, False) <NEW_LINE> self.setFlag(self.ItemIsSelectable, True) <NEW_LINE> self.setFlag(self.ItemIsFocusable, True) <NEW_LINE> <DEDENT> def value(self, encoding): <NEW_LINE> <INDENT> if encoding.lower() == 'ucs-2': <NEW_LINE> <INDENT> return ord(self.char) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return int.from_bytes(self.char.encode(encoding, 'replace'), 'big') <NEW_LINE> <DEDENT> <DEDENT> def updateToolTip(self, encoding): <NEW_LINE> <INDENT> if self.char is None: <NEW_LINE> <INDENT> name = '<p>Unknown glyph</p>' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = ( ('<p style="font-size: 24pt;">&#%d;</p>' % ord(self.char)) + ('<p>Value: 0x%X</p>' % self.value(encoding)) ) <NEW_LINE> <DEDENT> text = '<p>Character:</p>' + name <NEW_LINE> self.setToolTip(text) <NEW_LINE> <DEDENT> def boundingRect(self): <NEW_LINE> <INDENT> return self.boundingRect <NEW_LINE> <DEDENT> def contextMenuEvent(self, e): <NEW_LINE> <INDENT> QtWidgets.QGraphicsItem.contextMenuEvent(self, e) <NEW_LINE> menu = QtWidgets.QMenu() <NEW_LINE> menu.addAction('Import...', self.handleImport) <NEW_LINE> menu.addAction('Export...', self.handleExport) <NEW_LINE> menu.exec_(e.screenPos()) <NEW_LINE> <DEDENT> def handleExport(self): <NEW_LINE> <INDENT> fn = QtWidgets.QFileDialog.getSaveFileName(window, 'Choose a PNG file', '', 'PNG image file (*.png);;All Files(*)')[0] <NEW_LINE> if not fn: return <NEW_LINE> self.pixmap.save(fn) <NEW_LINE> <DEDENT> def handleImport(self): <NEW_LINE> <INDENT> fn = QtWidgets.QFileDialog.getOpenFileName(window, 'Choose a PNG file', '', 'PNG image file (*.png);;All Files(*)')[0] <NEW_LINE> if not fn: return <NEW_LINE> try: pix = QtGui.QPixmap(fn) <NEW_LINE> except: return <NEW_LINE> tooWide = pix.width() > self.pixmap.width() <NEW_LINE> tooTall = pix.height() > self.pixmap.height() <NEW_LINE> if tooWide and tooTall: <NEW_LINE> <INDENT> pix = pix.scaled(self.pixmap.width(), self.pixmap.height()) <NEW_LINE> <DEDENT> elif tooWide: <NEW_LINE> <INDENT> pix = pix.scaledToWidth(self.pixmap.width()) <NEW_LINE> <DEDENT> elif tooTall: <NEW_LINE> <INDENT> pix = pix.scaledToHeight(self.pixmap.height()) <NEW_LINE> <DEDENT> self.pixmap = pix <NEW_LINE> self.update() <NEW_LINE> window.prevDock.updatePreview() <NEW_LINE> <DEDENT> def paint(self, painter, option, widget): <NEW_LINE> <INDENT> painter.drawPixmap(0, 0, self.pixmap) <NEW_LINE> if self.isSelected(): <NEW_LINE> <INDENT> painter.setPen(QtGui.QPen(Qt.blue, 1, Qt.SolidLine)) <NEW_LINE> painter.drawRect(self.selectionRect) <NEW_LINE> painter.fillRect(self.selectionRect, QtGui.QColor.fromRgb(255, 255, 255, 64))
Class for a character glyph
62599081bf627c535bcb2f9c
class Zeus_1d_file(Zeus_file): <NEW_LINE> <INDENT> def open_density(self): <NEW_LINE> <INDENT> super(Zeus_1d_file,self).open_density() <NEW_LINE> self.rho = self.rho[0,0,:] <NEW_LINE> <DEDENT> def open_energy(self): <NEW_LINE> <INDENT> super(Zeus_1d_file,self).open_energy() <NEW_LINE> self.e = self.e[0,0,:] <NEW_LINE> <DEDENT> def open_i_vel(self): <NEW_LINE> <INDENT> super(Zeus_1d_file,self).open_i_vel() <NEW_LINE> self.v_i = self.v_i[0,0,:] <NEW_LINE> <DEDENT> def plot_density(self): <NEW_LINE> <INDENT> self.open_density() <NEW_LINE> self.rho_v_r = self._lin_plot(self.rho, r'$\rho$') <NEW_LINE> return self.rho_v_r <NEW_LINE> <DEDENT> def plot_vel(self): <NEW_LINE> <INDENT> self.open_i_vel() <NEW_LINE> self.v_i_v_r = self._lin_plot(self.v_i, r'$v$') <NEW_LINE> return self.v_i_v_r <NEW_LINE> <DEDENT> def _lin_plot(self,var,var_y_label): <NEW_LINE> <INDENT> var_v_r = Zeus_f_linplot(self.x,var,self.fnum_str) <NEW_LINE> var_v_r.plot_init(xlabel=r'$x$',ylabel=var_y_label) <NEW_LINE> var_v_r.plot() <NEW_LINE> var_v_r.time_title(self.t, unit='') <NEW_LINE> return var_v_r <NEW_LINE> <DEDENT> def plot_velocity(self): <NEW_LINE> <INDENT> self.open_i_vel()
assumes x-direction variation
6259908155399d3f05627fdf
class DeleteUser(command.Command): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(DeleteUser, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'users', metavar='<user>', nargs="+", help='User(s) to delete (name or ID)', ) <NEW_LINE> parser.add_argument( '--domain', metavar='<domain>', help='Domain owning <user> (name or ID)', ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> identity_client = self.app.client_manager.identity <NEW_LINE> domain = None <NEW_LINE> if parsed_args.domain: <NEW_LINE> <INDENT> domain = common.find_domain(identity_client, parsed_args.domain) <NEW_LINE> <DEDENT> for user in parsed_args.users: <NEW_LINE> <INDENT> if domain is not None: <NEW_LINE> <INDENT> user_obj = utils.find_resource(identity_client.users, user, domain_id=domain.id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user_obj = utils.find_resource(identity_client.users, user) <NEW_LINE> <DEDENT> identity_client.users.delete(user_obj.id)
Delete user(s)
625990815fcc89381b266ec2
class Movie(object): <NEW_LINE> <INDENT> VALID_RATINGS = ['G', 'PG', 'PG-13', 'R'] <NEW_LINE> def __init__(self, movie_title, movie_storyline, poster_url, trailer_url): <NEW_LINE> <INDENT> self.title = movie_title <NEW_LINE> self.storyline = movie_storyline <NEW_LINE> self.poster_image_url = poster_url <NEW_LINE> self.trailer_youtube_url = trailer_url <NEW_LINE> <DEDENT> def show_trailer(self): <NEW_LINE> <INDENT> webbrowser.open(self.trailer_youtube_url)
This class provides a way to store movie related information.
62599081091ae3566870670b
class TsObject(object): <NEW_LINE> <INDENT> def __init__(self, client, table, rows=[], columns=[]): <NEW_LINE> <INDENT> if not isinstance(table, Table): <NEW_LINE> <INDENT> raise ValueError('table must be an instance of Table.') <NEW_LINE> <DEDENT> self.client = client <NEW_LINE> self.table = table <NEW_LINE> self.rows = rows <NEW_LINE> if not isinstance(self.rows, list): <NEW_LINE> <INDENT> raise RiakError("TsObject requires a list of rows") <NEW_LINE> <DEDENT> self.columns = columns <NEW_LINE> if self.columns is not None and not isinstance(self.columns, list): <NEW_LINE> <INDENT> raise RiakError("TsObject columns must be a list") <NEW_LINE> <DEDENT> <DEDENT> def store(self): <NEW_LINE> <INDENT> return self.client.ts_put(self)
The TsObject holds information about Timeseries data, plus the data itself.
6259908199fddb7c1ca63b3f
class StatusResourceView(ViewSet): <NEW_LINE> <INDENT> @never_cache <NEW_LINE> def status(self, request, *args, **kwargs): <NEW_LINE> <INDENT> plugins = sorted(( plugin_class(**copy.deepcopy(options)) for plugin_class, options in plugin_dir._registry ), key=lambda plugin: plugin.identifier()) <NEW_LINE> errors = _run_checks(plugins) <NEW_LINE> status_code = status.HTTP_500_INTERNAL_SERVER_ERROR if errors else status.HTTP_200_OK <NEW_LINE> components, system_status = _build_components_status(plugins) <NEW_LINE> return JsonResponse( { "components": components, "status": system_status }, status=status_code )
status: Check the status of the system and its dependencies.
6259908123849d37ff852b85
class SaltExecutionFunction(SaltTargettedStep): <NEW_LINE> <INDENT> @property <NEW_LINE> def function(self): <NEW_LINE> <INDENT> return self.step_dict['name'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def args(self): <NEW_LINE> <INDENT> args = [] <NEW_LINE> if 'm_name' in self.step_dict: <NEW_LINE> <INDENT> args.append(self.step_dict['m_name']) <NEW_LINE> <DEDENT> elif 'arg' in self.step_dict: <NEW_LINE> <INDENT> args.extend(self.step_dict['arg']) <NEW_LINE> <DEDENT> if 'kwargs' in self.step_dict: <NEW_LINE> <INDENT> for k, v in self.step_dict['kwargs'].items(): <NEW_LINE> <INDENT> args.append("{}={}".format(k, v)) <NEW_LINE> <DEDENT> <DEDENT> return args <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "SaltExecFunc(desc: {}, fun: {}, args: {}, target: {})" .format(self.desc, self.function, self.args, self.target) <NEW_LINE> <DEDENT> def pretty_string(self): <NEW_LINE> <INDENT> if self.args: <NEW_LINE> <INDENT> return "{}({})".format(self.function, ", ".join(self.args)) <NEW_LINE> <DEDENT> return self.function
Class to represent a Salt module.run step
625990813346ee7daa3383c8
class ImageDatasetColor(torch.utils.data.Dataset): <NEW_LINE> <INDENT> def __init__(self, size, text_function, map_function, fonts, device = None): <NEW_LINE> <INDENT> self.text_function = text_function <NEW_LINE> self.fonts = fonts <NEW_LINE> self.size = size <NEW_LINE> self.device = device <NEW_LINE> token_to_index, index_to_token = map_function() <NEW_LINE> self.token_to_index = token_to_index <NEW_LINE> self.index_to_token = index_to_token <NEW_LINE> self.color_map = {k : tuple([i + 1] + list(np.random.randint(0, 255, (2,)))) for i, k in enumerate(sorted(token_to_index.keys()))} <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.size <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> sample = {} <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> input, target, weights, font, text = generate_color_data_point(text = self.text_function(), font = random.choice(self.fonts), color_map = self.color_map) <NEW_LINE> break <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Error") <NEW_LINE> <DEDENT> <DEDENT> input, target = input[:, :, :1], target[:, :, 0] <NEW_LINE> sample['input'] = torch.tensor(input.astype(np.float32) / 255, device = self.device) <NEW_LINE> sample['target'] = torch.tensor(target / 255, dtype = torch.long, device = self.device) <NEW_LINE> sample['target_text'] = torch.tensor([self.token_to_index[c] for c in text], dtype = torch.long, device = self.device) <NEW_LINE> sample['weights'] = torch.tensor(weights.astype(np.float32), device = self.device) <NEW_LINE> sample['input_length'] = torch.tensor(2048, dtype = torch.long, device = self.device) <NEW_LINE> sample['target_length'] = torch.tensor(len(text), dtype = torch.long, device = self.device) <NEW_LINE> sample['font'] = font <NEW_LINE> sample['text'] = text <NEW_LINE> return sample
Generated dataset for a mono masking process. The dataset distribution is customizable
62599081f548e778e596d05e
class AuthPluginOptionsMissing(AuthorizationFailure): <NEW_LINE> <INDENT> def __init__(self, opt_names): <NEW_LINE> <INDENT> super(AuthPluginOptionsMissing, self).__init__( "Authentication failed. Missing options: %s" % ", ".join(opt_names)) <NEW_LINE> self.opt_names = opt_names
Auth plugin misses some options.
62599081ad47b63b2c5a931d
class CheckerRouter: <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'college': <NEW_LINE> <INDENT> return 'college' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'school': <NEW_LINE> <INDENT> return 'school' <NEW_LINE> <DEDENT> return 'default' <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'college': <NEW_LINE> <INDENT> return 'college' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'school': <NEW_LINE> <INDENT> return 'school' <NEW_LINE> <DEDENT> return 'default' <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._meta.app_label == 'college' or obj2._meta.app_label == 'college': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'college' not in [obj1._meta.app_label, obj2._meta.app_label]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif obj1._meta.app_label == 'school' or obj2._meta.app_label == 'school': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'school' not in [obj1._meta.app_label, obj2._meta.app_label]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def allow_migrate(self, db, app_label, model_name=None, **hints): <NEW_LINE> <INDENT> if app_label == 'college': <NEW_LINE> <INDENT> return db == 'college' <NEW_LINE> <DEDENT> elif app_label == 'school': <NEW_LINE> <INDENT> return db == 'school' <NEW_LINE> <DEDENT> return None
A router to control all database operations on models in the user application.
62599081f548e778e596d05f
class Shared_Term_Do_Construct(Base): <NEW_LINE> <INDENT> subclass_names = ['Outer_Shared_Do_Construct', 'Inner_Shared_Do_Construct']
<shared-term-do-construct> = <outer-shared-do-construct> | <inner-shared-do-construct>
62599081aad79263cf430287
class K8SNamespaceInfo(StructuredRel, models.Relation): <NEW_LINE> <INDENT> adopted_since = IntegerProperty()
k8s namespace information model (for relationship)
625990814c3428357761bd88
class MessageBusBackend: <NEW_LINE> <INDENT> def produce(self, value: dict, key: str = None, **kwargs) -> None: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_consumer(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Message bus interface.
62599081be7bc26dc9252bbc
class IsBusinessAdmin(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> print('entro a has perm') <NEW_LINE> try: <NEW_LINE> <INDENT> if request.user: <NEW_LINE> <INDENT> print(request.user) <NEW_LINE> try: <NEW_LINE> <INDENT> empleado=Empleado.objects.get(usuario__user=request.user, is_admin=True, active=True) <NEW_LINE> print(empleado) <NEW_LINE> return True <NEW_LINE> <DEDENT> except Empleado.DoesNotExist: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return False
Object-level permission to only allow owners of an object to edit it. Assumes the model instance has an `owner` attribute.
6259908197e22403b383c9cd