code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class LogRaptorConfigError(LogRaptorException): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> Exception.__init__(self, message) <NEW_LINE> logger.debug('!ConfigError: {0}'.format(message))
Error in a configuration file or a misconfiguration of the package.
6259904282261d6c52730833
class Proxy: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.occupied = 'No' <NEW_LINE> self.producer = None <NEW_LINE> <DEDENT> def produce(self): <NEW_LINE> <INDENT> print("Artist checking if producer is available...") <NEW_LINE> if self.occupied == 'No': <NEW_LINE> <INDENT> self.producer = Producer() <NEW_LINE> time.sleep(2) <NEW_LINE> self.producer.meet() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(2) <NEW_LINE> print("Producer is busy!")
Define teh 'relatively less resource-intensive' proxyton instantiate as a middleman
625990426e29344779b01931
class AreaInfoHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret = self.redis.get("area_info") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> ret = None <NEW_LINE> <DEDENT> if ret: <NEW_LINE> <INDENT> logging.debug(ret) <NEW_LINE> logging.info("hit redis") <NEW_LINE> return self.write('{"errno":%s,"errmsg":"ok","data":%s}' %(RET.OK,ret)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ret = self.db.query("select ai_area_id,ai_name from ih_area_info") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> return self.write(dict(errno=RET.DBERR,errmsg="get user error")) <NEW_LINE> <DEDENT> if not ret: <NEW_LINE> <INDENT> return self.write(dict(errno=RET.NODATA, errmsg="no area data")) <NEW_LINE> <DEDENT> areas = [] <NEW_LINE> for i in ret: <NEW_LINE> <INDENT> area = {"area_id":i["ai_area_id"],"name":l["ai_name"]} <NEW_LINE> areas.append(area) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.redis.setex("area_info",constants.REDIS_AREA_INFO_EXPIRES_SECONDES,json.dumps(areas)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> <DEDENT> self.write(dict(errno=RET.OK,errmsg="ok",data=areas))
区域选择
62599042e76e3b2f99fd9ce9
class Registro0000(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', '0000'), CampoFixo(2, 'LECD', 'LECD'), CampoData(3, 'DT_INI'), CampoData(4, 'DT_FIN'), Campo(5, 'NOME'), Campo(6, 'CNPJ'), Campo(7, 'UF'), Campo(8, 'IE'), CampoNumerico(9, 'COD_MUN'), Campo(10, 'IM'), Campo(11, 'IND_SIT_ESP'), Campo(12, 'IND_SIT_INI_PER'), Campo(13, 'IND_NIRE'), Campo(14, 'IND_FIN_ESC'), Campo(15, 'COD_HASH_SUB'), Campo(16, 'IND_GRANDE_PORTE'), Campo(17, 'TIP_ECD'), Campo(18, 'COD_SCP'), Campo(19, 'IDENT_MF'), Campo(20, 'IND_ESC_CONS') ]
ABERTURA DO ARQUIVO DIGITAL E IDENTIFICAÇÃO DO EMPRESÁRIO OU DA SOCIEDADE EMPRESÁRIA
625990421f5feb6acb163ed2
class ModePlugin(): <NEW_LINE> <INDENT> def get_match_results( self, search_term=None, page=1, text=None, response=None, session=None, url=None): <NEW_LINE> <INDENT> parsed_url = urlparse('https://www.google.com/search') <NEW_LINE> url_query = { 'asearch': 'ichunk', 'async': '_id:rg_s,_pms:s,_fmt:pc', 'ijn': str(page - 1), 'q': search_term, 'start': str(int(page - 1) * 100), 'tbm': 'isch', 'yv': '3', } <NEW_LINE> query_url = parsed_url._replace(query=urlencode(url_query)).geturl() <NEW_LINE> log.debug('query url', url=query_url) <NEW_LINE> resp_model = models.Response.create(query_url, method='get', session=session) <NEW_LINE> mr_dict = self.get_match_results_dict( text=resp_model.text, session=session, url=search_term) <NEW_LINE> match_results = self.match_results_models_from_dict(mr_dict, session) <NEW_LINE> return match_results <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_match_results_dict(self, text=None, response=None, session=None, url=None): <NEW_LINE> <INDENT> text = '<style>{}'.format(text.split('<style>', 1)[1]) <NEW_LINE> soup = BeautifulSoup(text, 'html.parser') <NEW_LINE> res = {'url': {}, 'tag': []} <NEW_LINE> rg_bx = soup.select('.rg_bx') <NEW_LINE> for html_tag in rg_bx: <NEW_LINE> <INDENT> rg_meta = json.loads(html_tag.select_one('div.rg_meta').text) <NEW_LINE> url_tags = [ ('gi {}'.format(key), str(value)) for key, value in rg_meta.items() if str(value)] <NEW_LINE> url = rg_meta['ou'] <NEW_LINE> thumbnail = rg_meta['tu'] <NEW_LINE> if url in res['url']: <NEW_LINE> <INDENT> res['url'][url]['tag'].extend(url_tags) <NEW_LINE> res['url'][url]['thumbnail'].append(thumbnail) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res['url'][url] = {'thumbnail': [thumbnail], 'tag': url_tags} <NEW_LINE> <DEDENT> <DEDENT> return res
Base class for parser plugin.
6259904223e79379d538d7dc
class memoize(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self.func <NEW_LINE> <DEDENT> return partial(self, obj) <NEW_LINE> <DEDENT> def __call__(self, *args, **kw): <NEW_LINE> <INDENT> obj = args[0] <NEW_LINE> try: <NEW_LINE> <INDENT> cache = obj.__cache <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> cache = obj.__cache = {} <NEW_LINE> <DEDENT> key = (self.func, args[1:], frozenset(kw.items())) <NEW_LINE> try: <NEW_LINE> <INDENT> res = cache[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> res = cache[key] = self.func(*args, **kw) <NEW_LINE> <DEDENT> return res
cache the return value of a method This class is meant to be used as a decorator of methods. The return value from a given method invocation will be cached on the instance whose method was invoked. All arguments passed to a method decorated with memoize must be hashable. If a memoized method is invoked directly on its class the result will not be cached. Instead the method will be invoked like a static method: class Obj(object): @memoize def add_to(self, arg): return self + arg Obj.add_to(1) # not enough arguments Obj.add_to(1, 2) # returns 3, result is not cached Receta de: http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
62599042dc8b845886d54897
class data_generator(DataGenerator): <NEW_LINE> <INDENT> def __iter__(self, random=False): <NEW_LINE> <INDENT> batch_token_ids, batch_segment_ids = [], [] <NEW_LINE> for is_end, (question, equation, answer) in self.sample(random): <NEW_LINE> <INDENT> token_ids, segment_ids = tokenizer.encode( question, equation, maxlen=maxlen ) <NEW_LINE> batch_token_ids.append(token_ids) <NEW_LINE> batch_segment_ids.append(segment_ids) <NEW_LINE> if len(batch_token_ids) == self.batch_size or is_end: <NEW_LINE> <INDENT> batch_token_ids = sequence_padding(batch_token_ids) <NEW_LINE> batch_segment_ids = sequence_padding(batch_segment_ids) <NEW_LINE> yield [batch_token_ids, batch_segment_ids], None <NEW_LINE> batch_token_ids, batch_segment_ids = [], []
数据生成器
6259904207f4c71912bb0711
class Criteria: <NEW_LINE> <INDENT> def __init__(self, key: str, format_func, acc_mean: bool): <NEW_LINE> <INDENT> self._key = key <NEW_LINE> self._format_func = format_func <NEW_LINE> self._acc_mean = acc_mean <NEW_LINE> global _all_criterias <NEW_LINE> if self._key in _all_criterias: <NEW_LINE> <INDENT> raise ValueError("The criteria '{}' does already exist.".format(self._key)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _all_criterias[self._key] = self <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def acc_mean(self): <NEW_LINE> <INDENT> return self._acc_mean <NEW_LINE> <DEDENT> def format(self, value): <NEW_LINE> <INDENT> return self._format_func(value)
An object of this class represents one criteria that can be used to evaluate net performance.
62599042d4950a0f3b1117b0
class QuizQuestion(models.Model): <NEW_LINE> <INDENT> u <NEW_LINE> quiz = models.ForeignKey('quiz.Quiz', verbose_name="Quiz") <NEW_LINE> question = models.ForeignKey('quiz.Question', verbose_name="Questão") <NEW_LINE> number = models.IntegerField(verbose_name="Número") <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.id: <NEW_LINE> <INDENT> num = QuizQuestion.objects.filter(quiz=self.quiz).count() <NEW_LINE> self.number = num + 1 <NEW_LINE> <DEDENT> super(QuizQuestion, self).save(*args, **kwargs)
Classe que representa as questões de um quiz.
625990421d351010ab8f4dfd
class TestIsIterable(unittest.TestCase): <NEW_LINE> <INDENT> def test_is_iterable(self): <NEW_LINE> <INDENT> self.assertTrue(is_iterable('')) <NEW_LINE> self.assertTrue(is_iterable(())) <NEW_LINE> self.assertTrue(is_iterable([])) <NEW_LINE> self.assertTrue(is_iterable(dict())) <NEW_LINE> self.assertTrue(is_iterable(np.array([]))) <NEW_LINE> self.assertFalse(is_iterable(1)) <NEW_LINE> self.assertFalse(is_iterable(2))
Defines :func:`colour.algebra.common.is_iterable` definition unit tests methods.
6259904250485f2cf55dc264
class AgencyESGActivity(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> agency = models.ForeignKey('Agency', on_delete=models.CASCADE) <NEW_LINE> activity = models.CharField(max_length=500) <NEW_LINE> activity_display = models.CharField(max_length=500, blank=True, null=True) <NEW_LINE> activity_local_identifier = models.CharField(max_length=100, blank=True) <NEW_LINE> activity_description = models.CharField(max_length=300, blank=True) <NEW_LINE> activity_type = models.ForeignKey('AgencyActivityType', on_delete=models.PROTECT) <NEW_LINE> reports_link = models.URLField(blank=True, null=True) <NEW_LINE> activity_valid_from = models.DateField(default=date.today) <NEW_LINE> activity_valid_to = models.DateField(blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.activity_display <NEW_LINE> <DEDENT> def set_activity_display(self): <NEW_LINE> <INDENT> self.activity_display = "%s -> %s (%s)" % (self.agency.acronym_primary, self.activity, self.activity_type) <NEW_LINE> <DEDENT> def validate_local_identifier(self): <NEW_LINE> <INDENT> if self.activity_local_identifier != '': <NEW_LINE> <INDENT> conflicting_instance = AgencyESGActivity.objects.filter(agency=self.agency, activity_local_identifier=self.activity_local_identifier) <NEW_LINE> if self.id: <NEW_LINE> <INDENT> conflicting_instance = conflicting_instance.exclude(pk=self.id) <NEW_LINE> <DEDENT> if conflicting_instance.exists(): <NEW_LINE> <INDENT> raise ValidationError('ESG Activity with this name and parent already exists.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.set_activity_display() <NEW_LINE> self.validate_local_identifier() <NEW_LINE> super(AgencyESGActivity, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'deqar_agency_esg_activities' <NEW_LINE> verbose_name = 'Agency ESG Activity' <NEW_LINE> ordering = ('agency', 'activity') <NEW_LINE> indexes = [ models.Index(fields=['activity_display']), models.Index(fields=['activity_valid_to']) ]
External quality assurance activities in the scope of the ESG.
6259904221a7993f00c67247
class BoreHole(object): <NEW_LINE> <INDENT> def __init__(self, fname): <NEW_LINE> <INDENT> self._fname = fname <NEW_LINE> self._load() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__class__.__name__ + '("{}")'.format(self._fname) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> out = 'Borehole id: {}\n Inline position (x, z): {}\n Layers:'.format( self.borehole_id, self.inline_pos) <NEW_LINE> for layer in self.data[1:]: <NEW_LINE> <INDENT> out = ''.join([out, '\n ', str(layer)]) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> def _load(self): <NEW_LINE> <INDENT> self.data = np.genfromtxt(self._fname, dtype=None) <NEW_LINE> if self.data.size > 1: <NEW_LINE> <INDENT> header = self.data[0][2].split('_') <NEW_LINE> self.borehole_id = header[0] <NEW_LINE> self._textoffset = float(header[1]) <NEW_LINE> self.inline_pos = (self.data[0][0], self.data[0][1]) <NEW_LINE> self.classes = [d[-1] for d in self.data[1:]] <NEW_LINE> self.unique_classes, rev_idx = np.unique(self.classes, return_inverse=True) <NEW_LINE> self.class_id = rev_idx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Warning('File "{}" contains no layers!'.format(self._fname)) <NEW_LINE> <DEDENT> <DEDENT> def plot(self, ax, plot_thickness=1.0, cmin=None, cmax=None, cm=None, do_legend=True, **legend_kwargs): <NEW_LINE> <INDENT> start_depths = np.asarray([d[0] for d in self.data[1:]]) <NEW_LINE> end_depths = np.asarray([d[1] for d in self.data[1:]]) <NEW_LINE> thickness = end_depths - start_depths <NEW_LINE> if cmin is None or cmax is None: <NEW_LINE> <INDENT> cmin = min(self.class_id) <NEW_LINE> cmax = max(self.class_id) <NEW_LINE> <DEDENT> if cm is None: <NEW_LINE> <INDENT> cm = plt.get_cmap('jet', len(self.unique_classes)) <NEW_LINE> <DEDENT> draw1DColumn(ax, self.inline_pos[0], self.class_id, thickness, ztopo=self.inline_pos[1], width=plot_thickness, cmin=cmin, cmax=cmax, name=self.borehole_id, cmap=cm, textoffset=self._textoffset) <NEW_LINE> if do_legend: <NEW_LINE> <INDENT> self.add_legend(ax, cm, **legend_kwargs) <NEW_LINE> <DEDENT> <DEDENT> def add_legend(self, ax, cmap, **legend_kwargs): <NEW_LINE> <INDENT> leg = create_legend(ax, cmap, self.class_id, self.unique_classes) <NEW_LINE> ax.legend(handles=leg, **legend_kwargs)
Class to load and store data from a borehole. Each row in the data file must contain a start depth [m], end depth and a classification. The values should be separated by whitespace. The first line should contain the inline position (x and z), text ID and an offset for the text (for plotting). The format is very simple, and a sample file could look like this: 16.0 0.0 BOREHOLEID_TEXTOFFSET 0.0 1.6 clti 1.6 10.0 shale
6259904210dbd63aa1c71eb7
class WorkerMiddleware(MiddlewareMixin if django_version_ge('1.10.0') else object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> assert hasattr(request, "user"), "Worker middleware requires authentication middleware to be installed. Also make sure the database is set and writable." <NEW_LINE> request.__class__.worker = LazyWorker() <NEW_LINE> return None
Sets a request.worker. - Worker instance if username exists in database - None otherwise
62599042d6c5a102081e3405
class PeekingIterator(object): <NEW_LINE> <INDENT> def __init__(self, iterator): <NEW_LINE> <INDENT> self.iter = iterator <NEW_LINE> self.temp = self.iter.next() if self.iter.hasNext() else None <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> return self.temp <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> ret = self.temp <NEW_LINE> self.temp = self.iter.next() if self.iter.hasNext() else None <NEW_LINE> return ret <NEW_LINE> <DEDENT> def hasNext(self): <NEW_LINE> <INDENT> return self.temp is not None
题意是求构造一个peek迭代的类对象 Runtime: 24 ms, faster than 28.60% of Python online submissions for Peeking Iterator. Memory Usage: 11.9 MB, less than 46.67% of Python online submissions for Peeking Iterator.
6259904273bcbd0ca4bcb56a
class Dataloader: <NEW_LINE> <INDENT> TRAIN = 'train' <NEW_LINE> TEST = 'test' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def length(self, portion): <NEW_LINE> <INDENT> raise NotImplementedError("Implement length in child class!") <NEW_LINE> <DEDENT> def num_classes(self): <NEW_LINE> <INDENT> raise NotImplementedError("Implement num_classes in child class!") <NEW_LINE> <DEDENT> def get_image(self, portion, idx): <NEW_LINE> <INDENT> raise NotImplementedError("Implement get_image in child class!") <NEW_LINE> <DEDENT> def get_image_label(self, portion, idx): <NEW_LINE> <INDENT> raise NotImplementedError("Implement get_image_label in child class!")
Base class for loading data; any other dataset loading class should inherit from this to ensure consistency In addition to these functions, also define 1. self.classes which contains strings of all class labels by id, ie self.classes[0] should give class name of class id 0. See Adience/UTKFace/CIFAR10 classes below 2. self.data_transform: this should be of the form transforms.Compose([ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ])
625990428e05c05ec3f6f7cb
class BetaAssetServicer(object): <NEW_LINE> <INDENT> pass <NEW_LINE> def GetAsset(self, request, context): <NEW_LINE> <INDENT> pass <NEW_LINE> context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.
62599042baa26c4b54d50589
class TestPuppetProvisioner(object): <NEW_LINE> <INDENT> @pytest.fixture <NEW_LINE> def manifest_dir(self) -> str: <NEW_LINE> <INDENT> __dirname = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> return os.path.join(__dirname, "assets/puppet-offline-repo") <NEW_LINE> <DEDENT> def test_can_provision_from_local_puppet_manifest( self, existing_jail: 'libioc.Jail.Jail', manifest_dir: str, pkg: 'libioc.Pkg.Pkg' ) -> None: <NEW_LINE> <INDENT> existing_jail.config.set_dict(dict( provision=dict( method="puppet", source=manifest_dir ) )) <NEW_LINE> assert existing_jail.config["provision.method"] == "puppet" <NEW_LINE> assert existing_jail.config["provision.source"] == manifest_dir <NEW_LINE> assert existing_jail.running is False <NEW_LINE> for event in existing_jail.provisioner.provision(): <NEW_LINE> <INDENT> assert isinstance(event, libioc.events.IocEvent) is True <NEW_LINE> <DEDENT> assert existing_jail.running is False <NEW_LINE> assert os.path.exists( os.path.join(existing_jail.root_path, "puppet.test") )
Run Puppet Provisioner tests.
6259904229b78933be26aa33
class FunctionClassFactory(FunctionClassFactoryBase): <NEW_LINE> <INDENT> FUNCTION_NAMES = ( 'urn:oasis:names:tc:xacml:1.0:function:string-equal', 'urn:oasis:names:tc:xacml:1.0:function:boolean-equal', 'urn:oasis:names:tc:xacml:1.0:function:integer-equal', 'urn:oasis:names:tc:xacml:1.0:function:double-equal', 'urn:oasis:names:tc:xacml:1.0:function:date-equal', 'urn:oasis:names:tc:xacml:1.0:function:time-equal', 'urn:oasis:names:tc:xacml:1.0:function:dateTime-equal', 'urn:oasis:names:tc:xacml:1.0:function:dayTimeDuration-equal', 'urn:oasis:names:tc:xacml:1.0:function:yearMonthDuration-equal', 'urn:oasis:names:tc:xacml:1.0:function:anyURI-equal', 'urn:oasis:names:tc:xacml:1.0:function:x500Name-equal', 'urn:oasis:names:tc:xacml:1.0:function:rfc822Name-equal', 'urn:oasis:names:tc:xacml:1.0:function:hexBinary-equal', 'urn:oasis:names:tc:xacml:1.0:function:base64Binary-equal', 'urn:oasis:names:tc:xacml:1.0:function:xpath-node-equal' ) <NEW_LINE> FUNCTION_NS_SUFFIX = '-equal' <NEW_LINE> FUNCTION_BASE_CLASS = EqualBase
Class Factory for *-equal XACML function classes @cvar FUNCTION_NAMES: equal function URNs @type FUNCTION_NAMES: tuple @cvar FUNCTION_NS_SUFFIX: generic suffix for equal function URNs @type FUNCTION_NS_SUFFIX: string @cvar FUNCTION_BASE_CLASS: base class for all equal function classes @type FUNCTION_BASE_CLASS: ndg.xacml.core.functions.v1.EqualBase
6259904223e79379d538d7df
class TestOmerNoInConfigFile(TestOmerFalseInConfigFile): <NEW_LINE> <INDENT> config_data = "omer = no"
Test "omer = no" in configuration file.
62599042b830903b9686edea
@export <NEW_LINE> class AmbiguousDataRequest(Exception): <NEW_LINE> <INDENT> def __init__(self, found, message=''): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> self.found = found
Raised when more than one piece of data match a users' request
6259904221a7993f00c67249
class ShapelinkAccumulator: <NEW_LINE> <INDENT> def __init__( self, shapelink_obj, name ): <NEW_LINE> <INDENT> self.shapelink_obj = shapelink_obj <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __repr__( self ): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __call__(self, *args, **kw ): <NEW_LINE> <INDENT> return self.shapelink_obj.call_method( self.name, *args, **kw )
Used by Shapelink API object to generate methods for all Shapelink API calls
6259904221bff66bcd723f4c
class Die: <NEW_LINE> <INDENT> def __init__(self, possible_values: Sequence) -> None: <NEW_LINE> <INDENT> self._all_values = possible_values <NEW_LINE> self._value = random.choice(self._all_values) <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, _): <NEW_LINE> <INDENT> raise ValueError("You must roll the die to change its value") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._value) <NEW_LINE> <DEDENT> def roll(self): <NEW_LINE> <INDENT> self._value = random.choice(self._all_values) <NEW_LINE> return self._value
Class Die
62599042c432627299fa4272
class OsDelCommand(TemareCommand): <NEW_LINE> <INDENT> def __init__(self, base): <NEW_LINE> <INDENT> TemareCommand.__init__(self, base) <NEW_LINE> self.names = ['osdel'] <NEW_LINE> self.usage = 'OSTYPE' <NEW_LINE> self.summary = 'Remove an operating system type' <NEW_LINE> self.description = ' OSTYPE Name of the operating system type' <NEW_LINE> <DEDENT> def do_command(self, args): <NEW_LINE> <INDENT> ostypeops = dbops.OsTypes() <NEW_LINE> ostypeops.delete(args)
Remove an operating system type
6259904230c21e258be99ae8
class TestLexerSimple(BaseTestLexer): <NEW_LINE> <INDENT> def test_empty(self): <NEW_LINE> <INDENT> self.run_assert_lexer("", []) <NEW_LINE> <DEDENT> def test_simple(self): <NEW_LINE> <INDENT> input_data = "a 42" <NEW_LINE> exp_tokens = [ self.lex_token('IDENTIFIER', 'a', 1, 0), self.lex_token('decimalValue', 42, 1, 2), ] <NEW_LINE> self.run_assert_lexer(input_data, exp_tokens) <NEW_LINE> <DEDENT> def test_no_ws_delimiter(self): <NEW_LINE> <INDENT> input_data = "0f" <NEW_LINE> exp_tokens = [ self.lex_token('decimalValue', 0, 1, 0), self.lex_token('IDENTIFIER', 'f', 1, 1), ] <NEW_LINE> self.run_assert_lexer(input_data, exp_tokens) <NEW_LINE> <DEDENT> def test_ignore_space(self): <NEW_LINE> <INDENT> input_data = "a b" <NEW_LINE> exp_tokens = [ self.lex_token('IDENTIFIER', 'a', 1, 0), self.lex_token('IDENTIFIER', 'b', 1, 2), ] <NEW_LINE> self.run_assert_lexer(input_data, exp_tokens) <NEW_LINE> <DEDENT> def test_ignore_cr(self): <NEW_LINE> <INDENT> input_data = "a\rb" <NEW_LINE> exp_tokens = [ self.lex_token('IDENTIFIER', 'a', 1, 0), self.lex_token('IDENTIFIER', 'b', 1, 2), ] <NEW_LINE> self.run_assert_lexer(input_data, exp_tokens) <NEW_LINE> <DEDENT> def test_ignore_tab(self): <NEW_LINE> <INDENT> input_data = "a\tb" <NEW_LINE> exp_tokens = [ self.lex_token('IDENTIFIER', 'a', 1, 0), self.lex_token('IDENTIFIER', 'b', 1, 2), ] <NEW_LINE> self.run_assert_lexer(input_data, exp_tokens) <NEW_LINE> <DEDENT> def test_invalid_token(self): <NEW_LINE> <INDENT> input_data = "a%b cd" <NEW_LINE> exp_tokens = [ self.lex_token('IDENTIFIER', 'a', 1, 0), self.lex_token('error', '%b cd', 1, 1), self.lex_token('IDENTIFIER', 'b', 1, 2), self.lex_token('IDENTIFIER', 'cd', 1, 4), ] <NEW_LINE> self.run_assert_lexer(input_data, exp_tokens)
Simple testcases for the lexical analyzer.
6259904266673b3332c316db
class WSGIGateway_10(WSGIGateway): <NEW_LINE> <INDENT> def get_environ(self): <NEW_LINE> <INDENT> req = self.req <NEW_LINE> env = { 'ACTUAL_SERVER_PROTOCOL': req.server.protocol, 'PATH_INFO': bton(req.path), 'QUERY_STRING': bton(req.qs), 'REMOTE_ADDR': req.conn.remote_addr or '', 'REMOTE_PORT': str(req.conn.remote_port or ''), 'REQUEST_METHOD': bton(req.method), 'REQUEST_URI': bton(req.uri), 'SCRIPT_NAME': '', 'SERVER_NAME': req.server.server_name, 'SERVER_PROTOCOL': bton(req.request_protocol), 'SERVER_SOFTWARE': req.server.software, 'wsgi.errors': sys.stderr, 'wsgi.input': req.rfile, 'wsgi.multiprocess': False, 'wsgi.multithread': True, 'wsgi.run_once': False, 'wsgi.url_scheme': bton(req.scheme), 'wsgi.version': (1, 0), } <NEW_LINE> if isinstance(req.server.bind_addr, six.string_types): <NEW_LINE> <INDENT> env['SERVER_PORT'] = '' <NEW_LINE> env['X_REMOTE_UID'] = self._get_peer_uid() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> env['SERVER_PORT'] = str(req.server.bind_addr[1]) <NEW_LINE> <DEDENT> env.update( ('HTTP_' + bton(k).upper().replace('-', '_'), bton(v)) for k, v in req.inheaders.items() ) <NEW_LINE> ct = env.pop('HTTP_CONTENT_TYPE', None) <NEW_LINE> if ct is not None: <NEW_LINE> <INDENT> env['CONTENT_TYPE'] = ct <NEW_LINE> <DEDENT> cl = env.pop('HTTP_CONTENT_LENGTH', None) <NEW_LINE> if cl is not None: <NEW_LINE> <INDENT> env['CONTENT_LENGTH'] = cl <NEW_LINE> <DEDENT> if req.conn.ssl_env: <NEW_LINE> <INDENT> env.update(req.conn.ssl_env) <NEW_LINE> <DEDENT> return env <NEW_LINE> <DEDENT> def _get_peer_uid(self): <NEW_LINE> <INDENT> creds = self.req.conn.socket.getsockopt( socket.SOL_SOCKET, socket.SO_PEERCRED, struct.calcsize('3i') ) <NEW_LINE> pid, uid, gid = struct.unpack('3i', creds) <NEW_LINE> return uid
A Gateway class to interface HTTPServer with WSGI 1.0.x.
6259904273bcbd0ca4bcb56c
class Newsletter(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=255, blank=False) <NEW_LINE> active = models.BooleanField(blank=False) <NEW_LINE> from_email = models.CharField(max_length=255, blank=False, default=getattr(settings, 'NOVA_FROM_EMAIL', ''), help_text=_("The address that issues of this newsletter will be sent from.")) <NEW_LINE> reply_to_email = models.CharField(max_length=255, blank=True, help_text=_("The reply to address that will be set for all issues of this newsletter.")) <NEW_LINE> approvers = models.TextField(blank=True, help_text=_("A whitespace separated list of email addresses.")) <NEW_LINE> default_template = models.CharField(max_length=255, blank=True, help_text=_("The name of a default template to use for issues of this newsletter.")) <NEW_LINE> default_tracking_domain = models.CharField(max_length=255, blank=True, help_text=_("A domain for which links should be tracked. Used as the default value for the tracking domain field on an issue of this newsletter.")) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> subscriptions = models.ManyToManyField(EmailAddress, through='Subscription') <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.reply_to_email: <NEW_LINE> <INDENT> self.reply_to_email = self.from_email <NEW_LINE> <DEDENT> super(Newsletter, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def subscribers(self): <NEW_LINE> <INDENT> return self.subscriptions.filter(confirmed=True) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s' % self.title
A basic newsletter model. :todo: Change default_template to a TextField?
62599042b57a9660fecd2d5d
class ConcentratorS02(ConcentratorWithMetersWithConcentratorName): <NEW_LINE> <INDENT> @property <NEW_LINE> def meter_class(self): <NEW_LINE> <INDENT> return MeterS02
Class for a concentrator of report S02.
6259904282261d6c52730835
class CompactLatticeEncodeMapper(_EncodeMapper, _encode.CompactLatticeEncodeMapper): <NEW_LINE> <INDENT> pass
Arc encoder for an FST over the compact lattice semiring.
62599042a8ecb033258724f3
class MultiAgentDialogWorld(World): <NEW_LINE> <INDENT> def __init__(self, opt, agents=None, shared=None): <NEW_LINE> <INDENT> super().__init__(opt) <NEW_LINE> if shared: <NEW_LINE> <INDENT> self.agents = create_agents_from_shared(shared['agents']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.agents = agents <NEW_LINE> self.acts = [None] * len(agents) <NEW_LINE> <DEDENT> super().__init__(opt, agents, shared) <NEW_LINE> <DEDENT> def parley(self): <NEW_LINE> <INDENT> acts = self.acts <NEW_LINE> for index, agent in enumerate(self.agents): <NEW_LINE> <INDENT> acts[index] = agent.act() <NEW_LINE> for other_agent in self.agents: <NEW_LINE> <INDENT> if other_agent != agent: <NEW_LINE> <INDENT> other_agent.observe(validate(acts[index])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.update_counters() <NEW_LINE> <DEDENT> def epoch_done(self): <NEW_LINE> <INDENT> done = False <NEW_LINE> for a in self.agents: <NEW_LINE> <INDENT> if a.epoch_done(): <NEW_LINE> <INDENT> done = True <NEW_LINE> <DEDENT> <DEDENT> return done <NEW_LINE> <DEDENT> def episode_done(self): <NEW_LINE> <INDENT> done = False <NEW_LINE> for a in self.agents: <NEW_LINE> <INDENT> if a.episode_done(): <NEW_LINE> <INDENT> done = True <NEW_LINE> <DEDENT> <DEDENT> return done <NEW_LINE> <DEDENT> def report(self, compute_time=False): <NEW_LINE> <INDENT> metrics = self.agents[0].report() <NEW_LINE> if compute_time: <NEW_LINE> <INDENT> self.total_exs += metrics['total'] <NEW_LINE> time_metrics = compute_time_metrics(self, self.opt['max_train_time']) <NEW_LINE> metrics.update(time_metrics) <NEW_LINE> <DEDENT> return metrics <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> for a in self.agents: <NEW_LINE> <INDENT> a.shutdown()
Basic world where each agent gets a turn in a round-robin fashion, receiving as input the actions of all other agents since that agent last acted.
6259904229b78933be26aa34
class NewSNRStatistic(Stat): <NEW_LINE> <INDENT> def single(self, trigs): <NEW_LINE> <INDENT> return get_newsnr(trigs) <NEW_LINE> <DEDENT> def coinc(self, s0, s1, slide, step): <NEW_LINE> <INDENT> return (s0**2. + s1**2.) ** 0.5 <NEW_LINE> <DEDENT> def coinc_multiifo(self, s, slide, step, ): <NEW_LINE> <INDENT> return (sum([x ** 2. for x in s.values()])) ** 0.5
Calculate the NewSNR coincident detection statistic
62599042596a897236128f20
class AuthorizationException(TokenException): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AuthorizationException, self).__init__()
Thrown when a user has tried to access a page for which they need a token.
625990421d351010ab8f4e01
class Model(object): <NEW_LINE> <INDENT> def __init__(self, model_type, is_train, grid_size): <NEW_LINE> <INDENT> self.model_type = model_type <NEW_LINE> self.is_train = is_train <NEW_LINE> self.grid_size = grid_size <NEW_LINE> <DEDENT> def __call__(self, img_a, img_b, labels): <NEW_LINE> <INDENT> self.img_a = img_a <NEW_LINE> self.img_b = img_b <NEW_LINE> self.labels = labels <NEW_LINE> if self.model_type == 'early_fusion': <NEW_LINE> <INDENT> early_fusion = nets.EarlyFusion(name='early_fusion', is_train=self.is_train) <NEW_LINE> self.end_points, self.outputs = early_fusion(inputs_a=self.img_a, inputs_b=self.img_b) <NEW_LINE> self.var_list = early_fusion.var_list <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> late_fusion = nets.LateFusion(name='late_fusion', is_train=self.is_train) <NEW_LINE> self.end_points, self.outputs = late_fusion(inputs_a=self.img_a, inputs_b=self.img_b) <NEW_LINE> self.var_list = late_fusion.var_list <NEW_LINE> <DEDENT> <DEDENT> def build(self, img_a, img_b, labels): <NEW_LINE> <INDENT> self.__call__(img_a, img_b, labels) <NEW_LINE> num_images = self.grid_size ** 2 <NEW_LINE> image_shape = self.img_a.get_shape().as_list()[1:3] <NEW_LINE> summary.image('image_a', eval_utils.image_grid(self.img_a[:num_images, :, :, :3], grid_shape=(self.grid_size, self.grid_size), image_shape=image_shape, num_channels=3), max_outputs=1) <NEW_LINE> summary.image('image_b', eval_utils.image_grid(self.img_b[:num_images, :, :, :3], grid_shape=(self.grid_size, self.grid_size), image_shape=image_shape, num_channels=3), max_outputs=1)
Create a model for training/testing
6259904216aa5153ce4017d0
class SplitProduction(Wizard): <NEW_LINE> <INDENT> __name__ = 'production.split' <NEW_LINE> start = StateView('production.split.start', 'production_split.split_start_view_form', [ Button('Cancel', 'end', 'tryton-cancel'), Button('Split', 'split', 'tryton-ok', default=True), ]) <NEW_LINE> split = StateTransition() <NEW_LINE> def default_start(self, fields): <NEW_LINE> <INDENT> return { 'uom': self.record.uom.id, 'uom_category': self.record.uom.category.id, } <NEW_LINE> <DEDENT> def transition_split(self): <NEW_LINE> <INDENT> self.record.split( self.start.quantity, self.start.uom, count=self.start.count) <NEW_LINE> return 'end'
Split Production
6259904221a7993f00c6724b
class ValidationError(Exception): <NEW_LINE> <INDENT> def __init__(self, message:str): <NEW_LINE> <INDENT> Exception.__init__(self, message)
This is used for the `validate()` method, indicating invalidation.
62599042e64d504609df9d42
class GouvFrMetricsTest(DBTestMixin, TestCase): <NEW_LINE> <INDENT> settings = GouvFrSettings <NEW_LINE> def test_public_services(self): <NEW_LINE> <INDENT> ps_badge = Badge(kind=PUBLIC_SERVICE) <NEW_LINE> public_services = [ OrganizationFactory(badges=[ps_badge]) for _ in range(2) ] <NEW_LINE> for _ in range(3): <NEW_LINE> <INDENT> OrganizationFactory() <NEW_LINE> <DEDENT> self.assertEqual(PublicServicesMetric().get_value(), len(public_services))
Check metrics
6259904221bff66bcd723f4e
class Speller(Decoder): <NEW_LINE> <INDENT> def __init__(self, conf, output_dim, name=None): <NEW_LINE> <INDENT> self.sample_prob = float(conf['speller_sample_prob']) <NEW_LINE> self.numlayers = int(conf['speller_numlayers']) <NEW_LINE> self.numunits = int(conf['speller_numunits']) <NEW_LINE> self.dropout = float(conf['speller_dropout']) <NEW_LINE> if conf['speller_cell'] == 'gru': <NEW_LINE> <INDENT> self.cell_fn = tf.contrib.rnn.GRUCell <NEW_LINE> <DEDENT> elif conf['speller_cell'] == 'lstm': <NEW_LINE> <INDENT> self.cell_fn = tf.contrib.rnn.BasicLSTMCell <NEW_LINE> <DEDENT> elif conf['speller_cell'] == 'rnn': <NEW_LINE> <INDENT> self.cell_fn = tf.contrib.rnn.BasicRNNCell <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError(conf['speller_cell']) <NEW_LINE> <DEDENT> if conf['speller_activation'] == 'relu': <NEW_LINE> <INDENT> self.activation = tf.nn.relu <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.activation = tf.nn.tanh <NEW_LINE> <DEDENT> super(Speller,self).__init__(conf,output_dim,name) <NEW_LINE> <DEDENT> def decode(self, hlfeat, encoder_inputs, initial_state, first_step, is_training, time_major=False): <NEW_LINE> <INDENT> one_hot_inputs = tf.one_hot(encoder_inputs, self.output_dim, dtype=tf.float32) <NEW_LINE> time_major_inputs = tf.transpose(one_hot_inputs, [1, 0, 2]) <NEW_LINE> input_list = tf.unstack(time_major_inputs) <NEW_LINE> rnn_cell = self.create_rnn(is_training) <NEW_LINE> lf = partial(loop_function, time_major_inputs, self.sample_prob) <NEW_LINE> if time_major: <NEW_LINE> <INDENT> hlfeat = tf.transpose(hlfeat,[1,0,2]) <NEW_LINE> <DEDENT> logit_list, state = tf.contrib.legacy_seq2seq.attention_decoder( decoder_inputs=input_list, initial_state=initial_state, attention_states=hlfeat, cell=rnn_cell, output_size=self.output_dim, loop_function=lf, scope='attention_decoder', initial_state_attention=not first_step) <NEW_LINE> logits = tf.stack(logit_list) <NEW_LINE> if not time_major: <NEW_LINE> <INDENT> logits = tf.transpose(logits, [1, 0, 2]) <NEW_LINE> <DEDENT> return logits, state <NEW_LINE> <DEDENT> def create_rnn(self, is_training=False): <NEW_LINE> <INDENT> rnn_cells = [] <NEW_LINE> for _ in range(int(self.numlayers)): <NEW_LINE> <INDENT> rnn_cell = self.cell_fn(self.numunits, activation=self.activation) <NEW_LINE> if self.dropout < 1 and is_training: <NEW_LINE> <INDENT> rnn_cell = tf.contrib.rnn.DropoutWrapper(rnn_cell, output_keep_prob=self.dropout) <NEW_LINE> <DEDENT> rnn_cells.append(rnn_cell) <NEW_LINE> <DEDENT> rnn_cell = tf.contrib.rnn.MultiRNNCell(rnn_cells) <NEW_LINE> return rnn_cell <NEW_LINE> <DEDENT> def zero_state(self, batch_size): <NEW_LINE> <INDENT> return self.create_rnn().zero_state(batch_size, tf.float32)
a speller decoder for the LAS architecture
62599042507cdc57c63a607f
class Tag(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> user = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Tags to be used for a recipe
62599042287bf620b6272eca
class S3EventNeedModel(S3Model): <NEW_LINE> <INDENT> names = ("event_event_need", ) <NEW_LINE> def model(self): <NEW_LINE> <INDENT> if current.deployment_settings.get_event_cascade_delete_incidents(): <NEW_LINE> <INDENT> ondelete = "CASCADE" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ondelete = "SET NULL" <NEW_LINE> <DEDENT> tablename = "event_event_need" <NEW_LINE> self.define_table(tablename, self.event_event_id(ondelete = ondelete), self.event_incident_id(ondelete = "CASCADE"), self.req_need_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) <NEW_LINE> self.configure(tablename, onaccept = lambda form: set_event_from_incident(form, "event_event_need"), ) <NEW_LINE> return {}
Link Events &/or Incidents with Needs
6259904230c21e258be99aea
class __metaclass__(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, dic): <NEW_LINE> <INDENT> if name == 'mk_obj': <NEW_LINE> <INDENT> return type.__new__(cls, name, bases, dic) <NEW_LINE> <DEDENT> del dic['__module__'] <NEW_LINE> mangling = '_%s__' % name <NEW_LINE> msize = len(mangling) <NEW_LINE> for arg_name in dic.keys(): <NEW_LINE> <INDENT> if arg_name.startswith(mangling): <NEW_LINE> <INDENT> dic[arg_name[msize:] + '?'] = dic.pop(arg_name) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> descr = dic['__doc__'] <NEW_LINE> return Object(dic, name=name, descr=descr) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return Object(dic, name=name)
Optional fields should start with double underscores Examples -------- Schema initialized using the class inheritance notation >>> from pyson.schema import * >>> class Date(mk_obj): ... year = Int() ... month = Int() ... day = Int() ... __is_end_of_the_world = Bool() >>> Date().is_valid({'year': 2012, 'month': 12, 'day': 12}) True
6259904215baa72349463276
class OneOf(DetectAugment): <NEW_LINE> <INDENT> def __init__(self, transforms, **kwargs): <NEW_LINE> <INDENT> super(OneOf, self).__init__(**kwargs) <NEW_LINE> self.p = 1 <NEW_LINE> if isinstance(transforms[0], DetectAugment): <NEW_LINE> <INDENT> prob = float(1 / len(transforms)) <NEW_LINE> transforms = [(prob, transform) for transform in transforms] <NEW_LINE> <DEDENT> probs, transforms = zip(*transforms) <NEW_LINE> probs, transforms = list(probs), list(transforms) <NEW_LINE> self.probs = probs <NEW_LINE> self.transforms = transforms <NEW_LINE> <DEDENT> def img_aug(self, img: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> index = np.random.choice(a=range(len(self.probs)), p=self.probs) <NEW_LINE> img = self.transforms[index].img_aug(img) <NEW_LINE> return img <NEW_LINE> <DEDENT> def aug(self, img: np.ndarray, labels: np.ndarray) -> tuple: <NEW_LINE> <INDENT> index = np.random.choice(a=range(len(self.probs)), p=self.probs) <NEW_LINE> img, labels = self.transforms[index](img, labels) <NEW_LINE> return img, labels
随即一个增强方式进行增强
6259904273bcbd0ca4bcb56f
class Logger: <NEW_LINE> <INDENT> def __init__(self, mode, *files): <NEW_LINE> <INDENT> self.mode = mode <NEW_LINE> self.files = files <NEW_LINE> <DEDENT> def __call__(self, func): <NEW_LINE> <INDENT> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> to_log = [] <NEW_LINE> if self.mode[0] == '1': <NEW_LINE> <INDENT> to_log.append("\tFunction Name -->> " + func.__code__.co_name) <NEW_LINE> <DEDENT> if self.mode[1] == '1': <NEW_LINE> <INDENT> to_log.append("\tArguments of -->> " + str(args) + " KWARGS -->> " + str(kwargs)) <NEW_LINE> <DEDENT> r = func(*args, **kwargs) <NEW_LINE> if self.mode[2] == '1': <NEW_LINE> <INDENT> to_log.append("\tResult of -->> " + str(r)) <NEW_LINE> <DEDENT> if len(to_log) > 0: <NEW_LINE> <INDENT> for i in self.files: <NEW_LINE> <INDENT> file = open(i, "a") <NEW_LINE> file.write('\n') <NEW_LINE> file.write(str(func) + " log begin") <NEW_LINE> for j in to_log: <NEW_LINE> <INDENT> file.write("\n") <NEW_LINE> file.write(j) <NEW_LINE> <DEDENT> file.write('\n') <NEW_LINE> file.write(str(func) + " log end") <NEW_LINE> file.close() <NEW_LINE> <DEDENT> <DEDENT> return r <NEW_LINE> <DEDENT> return wrapper
Los modos para el logger se definen como 3 bit: El primer es para guardar el nombre de la funcion que se invoca El segundo es para guardar los parametros que se le pasan a la funcion El tercero es para guardar el __str__ del resultado de la funcion
625990421f5feb6acb163ed8
class MailerDeferredEmailsProcessorTestCase(TestCase): <NEW_LINE> <INDENT> longMessage = True <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.normal_messages = mixer.cycle(10).blend( 'mailer.Message', priority=PRIORITY_MEDIUM) <NEW_LINE> <DEDENT> def test_deferred_emails(self): <NEW_LINE> <INDENT> self.assertEqual( deferred_emails()['status'], SERVER_STATUS['OK'], msg='Without deferred emails, the status should be OK.' ) <NEW_LINE> mixer.cycle(1).blend('mailer.Message', priority=PRIORITY_DEFERRED) <NEW_LINE> self.assertEqual( deferred_emails()['status'], SERVER_STATUS['WARNING'], msg='With 1 deferred email, the status should be WARNING.' ) <NEW_LINE> mixer.cycle(9).blend('mailer.Message', priority=PRIORITY_DEFERRED) <NEW_LINE> self.assertEqual( deferred_emails()['status'], SERVER_STATUS['DANGER'], msg='With 10 deferred emails, the status should be DANGER.' )
Test case for the ``deferred_emails`` django-mailer processor.
625990428da39b475be044d3
class MyDecorator(type): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def magic_decorator(mcs, arg=None): <NEW_LINE> <INDENT> def decorator(_func): <NEW_LINE> <INDENT> def wrapper(*a, **ka): <NEW_LINE> <INDENT> ffunc = a[0] <NEW_LINE> mcs._wrapper(ffunc, *a[1:], **ka) <NEW_LINE> return ffunc <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> if callable(arg): <NEW_LINE> <INDENT> _ = decorator(arg) <NEW_LINE> return _ <NEW_LINE> <DEDENT> _ = decorator <NEW_LINE> return _
Metaclass that provides a decorator able to be invoked both with and without parenthesis. The wrapper function logic should be implemented by the client code.
62599042d164cc617582225c
class AboutController(BaseController): <NEW_LINE> <INDENT> def index(self,id=None): <NEW_LINE> <INDENT> template = "about.html" <NEW_LINE> return render(template,{"names":[id,'Souphaphone','Phathitmyxay']})
Generates error documents as and when they are required. The ErrorDocuments middleware forwards to ErrorController when error related status codes are returned from the application. This behaviour can be altered by changing the parameters to the ErrorDocuments middleware in your config/middleware.py file.
62599042711fe17d825e160f
class SampleDiversityRarefaction(Target): <NEW_LINE> <INDENT> def __init__(self, outdir, sample, samdir, sizes, numsampling, indices, avrdir, workdir): <NEW_LINE> <INDENT> Target.__init__(self) <NEW_LINE> self.outdir = outdir <NEW_LINE> self.sample = sample <NEW_LINE> self.samdir = samdir <NEW_LINE> self.sizes = sizes <NEW_LINE> self.numsampling = numsampling <NEW_LINE> self.indices = indices <NEW_LINE> self.avrdir = avrdir <NEW_LINE> self.workdir = workdir <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for size in self.sizes: <NEW_LINE> <INDENT> sizedir = os.path.join(self.outdir, "%d" % size) <NEW_LINE> system("mkdir -p %s" % sizedir) <NEW_LINE> for i in xrange(self.numsampling): <NEW_LINE> <INDENT> outfile = os.path.join(sizedir, "%d" % i) <NEW_LINE> workdir = os.path.join(self.workdir, str(size), str(i)) <NEW_LINE> system("mkdir -p %s" % workdir) <NEW_LINE> self.addChildTarget(libsample.SampleAnalysis(self.sample, self.samdir, outfile, sample_sampling_diversity, size, self.indices, workdir)) <NEW_LINE> <DEDENT> <DEDENT> self.setFollowOnTarget(AvrSamplingDiversity(self.outdir, self.indices, self.avrdir, self.workdir))
Perform rarefaction analyses on the specific sample "numsampling" of times
62599043097d151d1a2c234d
class Logarithmic1D(Fittable1DModel): <NEW_LINE> <INDENT> amplitude = Parameter(default=1) <NEW_LINE> tau = Parameter(default=1) <NEW_LINE> @staticmethod <NEW_LINE> def evaluate(x, amplitude, tau): <NEW_LINE> <INDENT> return amplitude * np.log(x / tau) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fit_deriv(x, amplitude, tau): <NEW_LINE> <INDENT> d_amplitude = np.log(x / tau) <NEW_LINE> d_tau = np.zeros(x.shape) - (amplitude / tau) <NEW_LINE> return [d_amplitude, d_tau] <NEW_LINE> <DEDENT> @property <NEW_LINE> def inverse(self): <NEW_LINE> <INDENT> new_amplitude = self.tau <NEW_LINE> new_tau = self.amplitude <NEW_LINE> return Exponential1D(amplitude=new_amplitude, tau=new_tau) <NEW_LINE> <DEDENT> @tau.validator <NEW_LINE> def tau(self, val): <NEW_LINE> <INDENT> if val == 0: <NEW_LINE> <INDENT> raise ValueError("0 is not an allowed value for tau") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def input_units(self): <NEW_LINE> <INDENT> if self.tau.unit is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return {self.inputs[0]: self.tau.unit} <NEW_LINE> <DEDENT> def _parameter_units_for_data_units(self, inputs_unit, outputs_unit): <NEW_LINE> <INDENT> return {'tau': inputs_unit[self.inputs[0]], 'amplitude': outputs_unit[self.outputs[0]]}
One dimensional logarithmic model. Parameters ---------- amplitude : float, optional tau : float, optional See Also -------- Exponential1D, Gaussian1D
62599043be383301e0254afd
class BObject(BCell): <NEW_LINE> <INDENT> def __init__(self, theX, theY, theName, **kwargs): <NEW_LINE> <INDENT> super(BObject, self).__init__(theX, theY, theName, **kwargs) <NEW_LINE> self.attrs = Attributes() <NEW_LINE> self.walkable = False <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if 'attrs' in self.__dict__ and attr in self.__dict__['attrs']: <NEW_LINE> <INDENT> return self.attrs[attr].now <NEW_LINE> <DEDENT> return AttributeError <NEW_LINE> <DEDENT> def __setattr__(self, attr, value): <NEW_LINE> <INDENT> if 'attrs' in self.__dict__ and attr in self.__dict__['attrs']: <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(BObject, self).__setattr__(attr, value) <NEW_LINE> <DEDENT> <DEDENT> def add_attr(self, attr): <NEW_LINE> <INDENT> return self.attrs.add_attr(attr) <NEW_LINE> <DEDENT> def is_object(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_in_board(self): <NEW_LINE> <INDENT> return True
BObject class derives from BCell and it implements particular functionality for cells in the OBJECT layer.
6259904345492302aabfd7bf
class Image(Context, TemporaryImage): <NEW_LINE> <INDENT> __tablename__ = 'image' <NEW_LINE> id = Column(Integer, ForeignKey('context.id', ondelete='CASCADE'), primary_key=True) <NEW_LINE> document_id = Column(Integer, ForeignKey('document.id', ondelete='CASCADE')) <NEW_LINE> position = Column(Integer, nullable=False) <NEW_LINE> url = Column(String) <NEW_LINE> document = relationship('Document', backref=backref('images', order_by=position, cascade='all, delete-orphan'), foreign_keys=document_id) <NEW_LINE> __table_args__ = ( UniqueConstraint(document_id, position), ) <NEW_LINE> __mapper_args__ = { 'polymorphic_identity': 'image', } <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "Image(Doc: %s, Position: %s, Url: %s)" % (self.document.name.encode('utf-8'), self.position, self.url) <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.__repr__() > other.__repr__()
A span of characters, identified by Context id and character-index start, end (inclusive). char_offsets are **relative to the Context start**
625990431d351010ab8f4e03
class StaticSourceNAT(NATElement): <NEW_LINE> <INDENT> typeof = "static_src_nat"
Source NAT defines the available options for configuration. This is typically used for outbound traffic where you need to hide the original source address. Example of changing existing source NAT rule to use a different source NAT address:: for rule in policy.fw_ipv4_nat_rules.all(): if rule.name == 'sourcenat': rule.static_src_nat.translated_value = '10.10.50.50' rule.save()
6259904315baa72349463277
class DQN(nn.Module): <NEW_LINE> <INDENT> def __init__(self, args, state_size, action_size, fc1_units=64, fc2_units=64): <NEW_LINE> <INDENT> super(DQN, self).__init__() <NEW_LINE> self.seed = torch.manual_seed(args.seed) <NEW_LINE> self.fc1 = nn.Linear(state_size, fc1_units) <NEW_LINE> self.fc2 = nn.Linear(fc1_units, fc2_units) <NEW_LINE> self.fc3 = nn.Linear(fc2_units, action_size) <NEW_LINE> <DEDENT> def forward(self, state): <NEW_LINE> <INDENT> x = F.relu(self.fc1(state)) <NEW_LINE> x = F.relu(self.fc2(x)) <NEW_LINE> return self.fc3(x)
Actor (Policy) Model.
62599043507cdc57c63a6081
class DownTriangle(T): <NEW_LINE> <INDENT> def draw(self, can, x, y): <NEW_LINE> <INDENT> can.polygon(self.line_style, self.fill_style, ((x, y-self.size/2.0), (x-self.size/1.6, y+self.size/2.0), (x+self.size/1.6, y+self.size/2.0)))
Draws a triangle pointing down.
62599043d53ae8145f919743
class CodeRepresentationError(Exception): <NEW_LINE> <INDENT> pass
Type of error occurring when there is a problem getting the code-representation of a value.
6259904363b5f9789fe86451
class OutputPluginDescriptor(rdfvalue.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = output_plugin_pb2.OutputPluginDescriptor <NEW_LINE> def GetPluginClass(self): <NEW_LINE> <INDENT> if self.plugin_name: <NEW_LINE> <INDENT> plugin_cls = OutputPlugin.classes.get(self.plugin_name) <NEW_LINE> if plugin_cls is None: <NEW_LINE> <INDENT> raise KeyError("Unknown output plugin %s" % self.plugin_name) <NEW_LINE> <DEDENT> return plugin_cls <NEW_LINE> <DEDENT> <DEDENT> def GetPluginArgsClass(self): <NEW_LINE> <INDENT> plugin_cls = self.GetPluginClass() <NEW_LINE> if plugin_cls: <NEW_LINE> <INDENT> return plugin_cls.args_type <NEW_LINE> <DEDENT> <DEDENT> def GetPluginForState(self, plugin_state): <NEW_LINE> <INDENT> cls = self.GetPluginClass() <NEW_LINE> return cls(None, state=plugin_state) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = self.plugin_name <NEW_LINE> if self.plugin_args: <NEW_LINE> <INDENT> result += " <%s>" % utils.SmartStr(self.plugin_args) <NEW_LINE> <DEDENT> return result
An rdfvalue describing the output plugin to create.
62599043004d5f362081f958
class DeleteProcedureTemplateResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
DeleteProcedureTemplate response structure.
625990438e71fb1e983bcdb4
class AsyncChannel: <NEW_LINE> <INDENT> _resolver_configured = False <NEW_LINE> @classmethod <NEW_LINE> def _config_resolver(cls, num_threads=10): <NEW_LINE> <INDENT> import salt.ext.tornado.netutil <NEW_LINE> salt.ext.tornado.netutil.Resolver.configure( "salt.ext.tornado.netutil.ThreadedResolver", num_threads=num_threads ) <NEW_LINE> cls._resolver_configured = True
Parent class for Async communication channels
6259904373bcbd0ca4bcb570
class Auth(Structure): <NEW_LINE> <INDENT> __slots__ = ('data', ) <NEW_LINE> _format = "<128s" <NEW_LINE> op_code = Operations.AUTH <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"Auth(op_code={self.op_code}, data={self.data})"
Authentication Request from an Agent to server!
6259904382261d6c52730837
class AzureDataLakeHook(BaseHook): <NEW_LINE> <INDENT> def __init__(self, azure_data_lake_conn_id='azure_data_lake_default'): <NEW_LINE> <INDENT> self.conn_id = azure_data_lake_conn_id <NEW_LINE> self.connection = self.get_conn() <NEW_LINE> <DEDENT> def get_conn(self): <NEW_LINE> <INDENT> conn = self.get_connection(self.conn_id) <NEW_LINE> service_options = conn.extra_dejson <NEW_LINE> self.account_name = service_options.get('account_name') <NEW_LINE> adlCreds = lib.auth(tenant_id=service_options.get('tenant'), client_secret=conn.password, client_id=conn.login) <NEW_LINE> adlsFileSystemClient = core.AzureDLFileSystem(adlCreds, store_name=self.account_name) <NEW_LINE> adlsFileSystemClient.connect() <NEW_LINE> return adlsFileSystemClient <NEW_LINE> <DEDENT> def check_for_file(self, file_path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> files = self.connection.glob(file_path, details=False, invalidate_cache=True) <NEW_LINE> return len(files) == 1 <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def upload_file(self, local_path, remote_path, nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304): <NEW_LINE> <INDENT> multithread.ADLUploader(self.connection, lpath=local_path, rpath=remote_path, nthreads=nthreads, overwrite=overwrite, buffersize=buffersize, blocksize=blocksize) <NEW_LINE> <DEDENT> def download_file(self, local_path, remote_path, nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304): <NEW_LINE> <INDENT> multithread.ADLDownloader(self.connection, lpath=local_path, rpath=remote_path, nthreads=nthreads, overwrite=overwrite, buffersize=buffersize, blocksize=blocksize) <NEW_LINE> <DEDENT> def list(self, path): <NEW_LINE> <INDENT> if "*" in path: <NEW_LINE> <INDENT> return self.connection.glob(path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.connection.walk(path)
Interacts with Azure Data Lake. Client ID and client secret should be in user and password parameters. Tenant and account name should be extra field as {"tenant": "<TENANT>", "account_name": "ACCOUNT_NAME"}. :param azure_data_lake_conn_id: Reference to the Azure Data Lake connection. :type azure_data_lake_conn_id: str
62599043b57a9660fecd2d61
class PostViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.BoastsRoastsModel.objects.all() <NEW_LINE> serializer_class = serializers.PostSerializer <NEW_LINE> @action(detail=False) <NEW_LINE> def boasts(self,request,pk=None): <NEW_LINE> <INDENT> all_boasts = models.BoastsRoastsModel.objects.filter(isboast=True).order_by('-date_created') <NEW_LINE> serializer = self.get_serializer(all_boasts, many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> @action(detail=False) <NEW_LINE> def roasts(self,request,pk=None): <NEW_LINE> <INDENT> all_roasts = models.BoastsRoastsModel.objects.filter(isboast=False).order_by('-date_created') <NEW_LINE> print(all_roasts) <NEW_LINE> serializer = self.get_serializer(all_roasts, many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> @action(detail=False) <NEW_LINE> def heighest_rated(self,request,pk=None): <NEW_LINE> <INDENT> all_roasts = models.BoastsRoastsModel.objects.all() <NEW_LINE> sort_post_by_vote = list(all_roasts) <NEW_LINE> sort_post_by_vote = sorted(sort_post_by_vote ,key=lambda a: a.total,reverse=True) <NEW_LINE> serializer = self.get_serializer(sort_post_by_vote, many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> @action(detail=True,methods=['post']) <NEW_LINE> def upvote(self,request, pk=None): <NEW_LINE> <INDENT> post = models.BoastsRoastsModel.objects.get(id=pk) <NEW_LINE> post.post_upvote = post.post_upvote + 1 <NEW_LINE> post.save() <NEW_LINE> return Response({'status': 'ok'}) <NEW_LINE> <DEDENT> @action(detail=True,methods=['post']) <NEW_LINE> def downvote(self,request ,pk=None): <NEW_LINE> <INDENT> post = models.BoastsRoastsModel.objects.get(id=pk) <NEW_LINE> post.post_downvote = post.post_downvote - 1 <NEW_LINE> post.save() <NEW_LINE> return Response({'status': 'ok'})
API endpoint that allows Posts to be viewed
62599043e76e3b2f99fd9cf1
class Spider(NewsSpider): <NEW_LINE> <INDENT> name = "院系/法政" <NEW_LINE> list_urls = [ "http://www3.ouc.edu.cn/fzxy/xydtmore.aspx?id=1", "http://www3.ouc.edu.cn/fzxy/xydtmore.aspx?id=2", "http://www3.ouc.edu.cn/fzxy/xydtmore.aspx?id=3", "http://www3.ouc.edu.cn/fzxy/xydtmore.aspx?id=4", "http://www3.ouc.edu.cn/fzxy/xydtmore.aspx?id=5", "http://www3.ouc.edu.cn/fzxy/morestu.aspx?id=2", "http://www3.ouc.edu.cn/fzxy/morestu.aspx?id=12", "http://www3.ouc.edu.cn/fzxy/morestu.aspx?id=18", "http://www3.ouc.edu.cn/fzxy/morestu.aspx?id=19", "http://www3.ouc.edu.cn/fzxy/morestu.aspx?id=20", ] <NEW_LINE> list_extract_scope = "//table[@width='95%'][2]//table" <NEW_LINE> list_extract_field = { 'link': ".//@href", 'datetime': ".//td[@align='right']/text()", 'category': "//span[@id='MainContent_Labeltop']//text()", 'title': ".//a/text()", } <NEW_LINE> item_url_pattern = r"http://www3.ouc.edu.cn/fzxy/page(stu)?\.aspx" <NEW_LINE> item_extract_scope = "" <NEW_LINE> item_extract_field = { 'datetime': "//span[contains(@id, 'Labeltime')]//text()", 'title': "//span[contains(@id, 'Labeltitle')]//text()", 'content': "//span[contains(@id, 'Labelcontent')]", } <NEW_LINE> datetime_format = "%Y-%m-%d %H:%M:%S"
法政学院 注意这个网站 page.aspx 和 pagestu.aspx 页面的标签id不完全一样
62599043d53ae8145f919744
class CustomBackend(object): <NEW_LINE> <INDENT> def get_by_secret_key(self,key,password): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(secret_key=key) <NEW_LINE> if password: <NEW_LINE> <INDENT> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def get_by_email(self,email,password): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(email=email) <NEW_LINE> if password: <NEW_LINE> <INDENT> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def get_by_phone_number(self,phone_number,password): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(phone_number=phone_number) <NEW_LINE> return (user if user.check_password(password) else None) if password else None <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def authenticate(self, email=None, phone_number=None,secret_key=None,password=None, **kwargs): <NEW_LINE> <INDENT> if email: <NEW_LINE> <INDENT> return self.get_by_email(email, password) <NEW_LINE> <DEDENT> elif phone_number: <NEW_LINE> <INDENT> return self.get_by_phone_number(phone_number,password) <NEW_LINE> <DEDENT> elif secret_key: <NEW_LINE> <INDENT> return self.get_by_secret_key(secret_key, password) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_user(self, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return User.objects.get(pk=pk) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
authenticate when given email,phone number or secret key and password
6259904323e79379d538d7e5
class ModuleCreatorWithApps(ModuleCreator): <NEW_LINE> <INDENT> def __init__(self, module_path, area, module_template, **kwargs): <NEW_LINE> <INDENT> if 'app_name' not in kwargs: <NEW_LINE> <INDENT> raise ArgumentError("'app_name' must be provided as keyword " "argument.") <NEW_LINE> <DEDENT> super(ModuleCreatorWithApps, self).__init__( module_path, area, module_template, **kwargs ) <NEW_LINE> self._app_name = kwargs['app_name']
Abstract class for the management of the creation of app-based modules. Attributes: _app_name: The name of the app for the new module. This is a separate folder in each git repository, corresponding to the newly created module. Raises: :class:`~dls_ade.exceptions.ArgumentError`: If 'app_name' not given as a keyword argument
62599043097d151d1a2c234f
class ClassInstance(Base): <NEW_LINE> <INDENT> __tablename__ = 'class_instances' <NEW_LINE> id = sa.Column(sa.Integer, sa.Sequence('class_instance_id_seq'), primary_key=True) <NEW_LINE> class_id = sa.Column(sa.Integer, sa.ForeignKey(Class.id), nullable=False) <NEW_LINE> period_id = sa.Column(sa.Integer, sa.ForeignKey(Period.id), nullable=False) <NEW_LINE> department_id = sa.Column(sa.Integer, sa.ForeignKey(Department.id), nullable=True) <NEW_LINE> year = sa.Column(sa.Integer) <NEW_LINE> information = sa.Column(sa.Text, nullable=True) <NEW_LINE> department = orm.relationship(Department) <NEW_LINE> parent = orm.relationship(Class, back_populates="instances") <NEW_LINE> period = orm.relationship(Period, back_populates="class_instances") <NEW_LINE> file_relations = orm.relationship(ClassFile, back_populates="class_instance") <NEW_LINE> enrollments = orm.relationship("Enrollment", back_populates="class_instance") <NEW_LINE> shifts = orm.relationship("Shift", back_populates="class_instance") <NEW_LINE> files = association_proxy('file_relations', 'file') <NEW_LINE> events = orm.relationship("ClassEvent", back_populates="class_instance") <NEW_LINE> messages = orm.relationship("ClassMessages", order_by="ClassMessages.datetime", back_populates="class_instance") <NEW_LINE> __table_args__ = ( sa.UniqueConstraint('class_id', 'year', 'period_id', name='un_class_instance'),) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{} on period {} of {}".format(self.parent, self.period, self.year) <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> data = { 'id': self.id, 'class_id': self.class_id, 'period': self.period_id, 'year': self.year, 'info': None if self.information is None else json.loads(self.information, object_hook=json_util.object_hook), 'department_id': self.department_id, 'events': [event.serialize() for event in self.events], 'enrollments': [enrollment.serialize() for enrollment in self.enrollments], 'shifts': [shift.serialize() for shift in self.shifts], 'files': [file.serialize() for file in self.file_relations] } <NEW_LINE> return data
| A ClassInstance is the existence of a :py:class:`Class` with a temporal period associated with it. | There's a lot of redundancy between different ClassInstances of the same :py:class:`Class`, but sometimes the associated information and related teachers change wildly.
625990436fece00bbacccc99
class PageMassChangeForm(SelfHandlingForm): <NEW_LINE> <INDENT> page_id = forms.IntegerField( label=_('Page ID'), widget=forms.widgets.HiddenInput) <NEW_LINE> color_scheme = PageColorSchemeSelectField( label=_('Color Scheme'), required=False ) <NEW_LINE> theme = PageThemeSelectField( label=_('Theme'), required=False ) <NEW_LINE> layout = forms.ChoiceField( label=_('Layout'), choices=BLANK_CHOICE_DASH + list(PAGE_LAYOUT_CHOICES), required=False) <NEW_LINE> depth = forms.IntegerField(label=_('Depth'), initial=1) <NEW_LINE> from_root = forms.BooleanField(label=_('From Root ?'), initial=True) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(PageMassChangeForm, self).__init__(*args, **kwargs) <NEW_LINE> self.helper.layout = Layout( TabHolder( Tab(_('Options'), 'depth', 'page_id', 'from_root', ), Tab(_('Styles'), 'layout', 'theme', 'color_scheme', ), ), ) <NEW_LINE> <DEDENT> def handle(self, request, data): <NEW_LINE> <INDENT> root_page = Page.objects.get(pk=data['page_id']) <NEW_LINE> if data['from_root']: <NEW_LINE> <INDENT> root_page = root_page.get_root() <NEW_LINE> <DEDENT> color_scheme = data.get('color_scheme', None) <NEW_LINE> theme = data.get('theme', None) <NEW_LINE> layout = data.get('layout', None) <NEW_LINE> if color_scheme: <NEW_LINE> <INDENT> root_page.color_scheme = data['color_scheme'] <NEW_LINE> <DEDENT> if layout: <NEW_LINE> <INDENT> root_page.layout = data['layout'] <NEW_LINE> <DEDENT> if theme: <NEW_LINE> <INDENT> root_page.theme = data['theme'] <NEW_LINE> <DEDENT> for page in root_page.get_descendants(): <NEW_LINE> <INDENT> if page.level <= data['depth']: <NEW_LINE> <INDENT> if color_scheme: <NEW_LINE> <INDENT> page.color_scheme = data['color_scheme'] <NEW_LINE> <DEDENT> if layout: <NEW_LINE> <INDENT> page.layout = data['layout'] <NEW_LINE> <DEDENT> if theme: <NEW_LINE> <INDENT> page.theme = data['theme'] <NEW_LINE> <DEDENT> page.save() <NEW_LINE> <DEDENT> <DEDENT> root_page.save() <NEW_LINE> return HttpResponseRedirect(root_page.get_absolute_url())
Page Mass Update Form for mass update of page theme, color scheme and layout
625990438c3a8732951f7841
class Gate(UGen): <NEW_LINE> <INDENT> _ordered_input_names = collections.OrderedDict([("source", None), ("trigger", 0)]) <NEW_LINE> _valid_calculation_rates = (CalculationRate.AUDIO, CalculationRate.CONTROL)
Gates or holds. :: >>> source = supriya.ugens.WhiteNoise.ar() >>> trigger = supriya.ugens.Dust.kr(1) >>> gate = supriya.ugens.Gate.ar( ... source=source, ... trigger=trigger, ... ) >>> gate Gate.ar()
6259904382261d6c52730838
class ErrorChainTest(TestCase): <NEW_LINE> <INDENT> def test_direct_loop(self): <NEW_LINE> <INDENT> self.assertEqual( _build_error_chain( "group1", "group1", [], ), ["group1", "group1"], ) <NEW_LINE> <DEDENT> def test_simple_indirect_loop(self): <NEW_LINE> <INDENT> self.assertEqual( _build_error_chain( "group1", "group2", ["group1"], ), ["group1", "group2", "group1"], ) <NEW_LINE> <DEDENT> def test_deep_indirect_loop(self): <NEW_LINE> <INDENT> self.assertEqual( _build_error_chain( "group1", "group3", ["group1", "group2"], ), ["group1", "group2", "group3", "group1"], ) <NEW_LINE> <DEDENT> def test_deep_indirect_inner_loop(self): <NEW_LINE> <INDENT> self.assertEqual( _build_error_chain( "group2", "group3", ["group1", "group2"], ), ["group2", "group3", "group2"], )
Tests blockwart.group._build_error_chain.
6259904373bcbd0ca4bcb572
class MasterLuaHighlighter(MasterHighlighter): <NEW_LINE> <INDENT> extensions = ["lua"] <NEW_LINE> comment = "--" <NEW_LINE> multilineComment = ("--[[", "]]") <NEW_LINE> def getRules(self): <NEW_LINE> <INDENT> return [ [ "[(){}[\]]", QtCore.Qt.darkMagenta, QtCore.Qt.magenta, QtGui.QFont.Bold ], [ r"\b(?:and|break|do|else|elseif|end|for|function|if|in|local|not|or|repeat|return|then|until|while)\b", QtGui.QColor("#4b7029"), QtGui.QColor("#4b7029"), QtGui.QFont.Bold ], [ r"\b(?:true|false|nil|_G|_VERSION)\b", QtGui.QColor("#997500"), QtGui.QColor("#997500"), QtGui.QFont.Bold ], [ r"\b(?:abs|acos|asin|assert|atan|atan2|byte|ceil|char|clock|close|collectgarbage|concat|config|" "coroutine|cos|cosh|cpath|create|date|debug|debug|deg|difftime|dofile|dump|error|execute|exit|exp|" "find|floor|flush|fmod|foreach|foreachi|format|frexp|gcinfo|getenv|getfenv|getfenv|gethook|getinfo|" "getlocal|getmetatable|getmetatable|getn|getregistry|getupvalue|gfind|gmatch|gsub|huge|input|insert|" "io|ipairs|ldexp|len|lines|load|loaded|loaders|loadfile|loadlib|loadstring|log|log10|lower|match|math|" "max|maxn|min|mod|modf|module|newproxy|next|open|os|output|package|pairs|path|pcall|pi|popen|pow|" "preload|print|rad|random|randomseed|rawequal|rawget|rawset|read|remove|remove|rename|rep|require|" "resume|reverse|running|seeall|select|setfenv|setfenv|sethook|setlocal|setlocale|setmetatable|" "setmetatable|setn|setupvalue|sin|sinh|sort|sqrt|status|stderr|stdin|stdout|string|sub|table|tan|tanh|" r"time|tmpfile|tmpname|tonumber|tostring|traceback|type|type|unpack|upper|wrap|write|xpcall|yield)\b", QtGui.QColor("#678CB1"), QtGui.QColor("#678CB1") ], [ r"\b(?:coroutine|debug|io|math|os|package|string|table)\b", QtGui.QColor("#8080FF"), QtGui.QColor("#8080FF") ], [ '(?:[\-+*/%=!<>&|^~]|\.\.)', QtGui.QColor("#990000"), QtGui.QColor("#990000") ], self.ruleNumber, self.ruleDoubleQuote, self.ruleSingleQuote, self.ruleLink, self.ruleComment ]
Lua syntax highlighter.
625990438a349b6b43687531
class RSELimit(BASE, ModelBase): <NEW_LINE> <INDENT> __tablename__ = 'rse_limits' <NEW_LINE> rse_id = Column(GUID()) <NEW_LINE> name = Column(String(255)) <NEW_LINE> value = Column(BigInteger) <NEW_LINE> _table_args = (PrimaryKeyConstraint('rse_id', 'name', name='RSE_LIMITS_PK'), ForeignKeyConstraint(['rse_id'], ['rses.id'], name='RSE_LIMIT_RSE_ID_FK'), )
Represents RSE limits
6259904315baa7234946327b
class Feature(BaseOption): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.saveAsImage = SaveAsImage() <NEW_LINE> self.restore = Restore() <NEW_LINE> self.dataView = DataView() <NEW_LINE> self.dataZoom = DataZoom() <NEW_LINE> self.magicType = MagicType() <NEW_LINE> self.brush = Brush() <NEW_LINE> <DEDENT> @check_args <NEW_LINE> def set_keys(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
This Class Is For ToolBox
62599043d10714528d69f001
@total_ordering <NEW_LINE> @autorepr <NEW_LINE> @autoinit <NEW_LINE> class Card(object): <NEW_LINE> <INDENT> suit_map = ['Clubs','Diamonds', 'Hearts', 'Spades'] <NEW_LINE> rank_map = ['Ace', '2', '3', '4', '5', '6', '7', '8', '9', '10', 'Jack', 'Queen', 'King'] <NEW_LINE> __slots__ = ('suit', 'rank') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '<%s: %s of %s>' % (self.__class__.__name__, self.rank_map[self.rank], self.suit_map[self.suit]) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Card): return False <NEW_LINE> return self.suit == other.suit and self.rank == other.rank <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Card): <NEW_LINE> <INDENT> raise TypeError("unorderable types, %s < %s" % (type(self).__name__, type(other).__name__)) <NEW_LINE> <DEDENT> if self.rank == other.rank: <NEW_LINE> <INDENT> return self.suit < other.suit <NEW_LINE> <DEDENT> if self.rank == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if other.rank == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.rank < other.rank
Virtual playing card
6259904373bcbd0ca4bcb574
class JobsHistoryViewSet(generics.ListAPIView, viewsets.GenericViewSet): <NEW_LINE> <INDENT> lookup_field = "uuid" <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> serializer_class = JobsHistorySerializer <NEW_LINE> queryset = JobsHistory.objects.all()
Jobs History ViewSet /api/v1/jobs/history/
6259904307d97122c4217f89
class EventSocketConfig(models.Model): <NEW_LINE> <INDENT> listen_ip = models.IPAddressField() <NEW_LINE> listen_port = models.PositiveIntegerField() <NEW_LINE> password = models.CharField(max_length=25) <NEW_LINE> def form_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> result["listen_ip"] = self.listen_ip <NEW_LINE> result["listen_port"] = self.listen_port <NEW_LINE> result["password"] = self.password <NEW_LINE> return result
<configuration name="event_socket.conf" description="Socket Client"> <settings> <param name="listen-ip" value="127.0.0.1"/> <param name="listen-port" value="8021"/> <param name="password" value="ClueCon"/> </settings> </configuration>
625990438e71fb1e983bcdb7
class UdpReader(UdpHandler): <NEW_LINE> <INDENT> def found_terminator(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> x = self.ibuffer <NEW_LINE> self.ibuffer = '' <NEW_LINE> return ''.join(x)
This handler does not answer anything. It accumulates data which it receives, i.e. does the same as the original one.
62599043cad5886f8bdc59f2
class NsqLookupdTest(BaseTest): <NEW_LINE> <INDENT> @run_until_complete <NEW_LINE> async def test_ok(self): <NEW_LINE> <INDENT> conn = NsqLookupd('127.0.0.1', 4161, loop=self.loop) <NEW_LINE> res = await conn.ping() <NEW_LINE> self.assertEqual(res, 'OK') <NEW_LINE> await conn.close() <NEW_LINE> <DEDENT> @run_until_complete <NEW_LINE> async def test_info(self): <NEW_LINE> <INDENT> conn = NsqLookupd('127.0.0.1', 4161, loop=self.loop) <NEW_LINE> res = await conn.info() <NEW_LINE> self.assertTrue('version' in res) <NEW_LINE> await conn.close() <NEW_LINE> <DEDENT> @run_until_complete <NEW_LINE> async def test_lookup(self): <NEW_LINE> <INDENT> conn = NsqLookupd('127.0.0.1', 4161, loop=self.loop) <NEW_LINE> res = await conn.lookup('foo') <NEW_LINE> self.assertIn('producers', res) <NEW_LINE> await conn.close() <NEW_LINE> <DEDENT> @run_until_complete <NEW_LINE> async def test_topics(self): <NEW_LINE> <INDENT> conn = NsqLookupd('127.0.0.1', 4161, loop=self.loop) <NEW_LINE> res = await conn.topics() <NEW_LINE> self.assertIn('topics', res) <NEW_LINE> await conn.close() <NEW_LINE> <DEDENT> @run_until_complete <NEW_LINE> async def test_channels(self): <NEW_LINE> <INDENT> conn = NsqLookupd('127.0.0.1', 4161, loop=self.loop) <NEW_LINE> res = await conn.channels('foo') <NEW_LINE> self.assertIn('channels', res) <NEW_LINE> await conn.close() <NEW_LINE> <DEDENT> @run_until_complete <NEW_LINE> async def test_nodes(self): <NEW_LINE> <INDENT> conn = NsqLookupd('127.0.0.1', 4161, loop=self.loop) <NEW_LINE> res = await conn.nodes() <NEW_LINE> self.assertIn('producers', res) <NEW_LINE> await conn.close()
:see: http://nsq.io/components/nsqd.html
625990434e696a045264e796
class PRelu(NeuralLayer): <NEW_LINE> <INDENT> def __init__(self, input_tensor=2): <NEW_LINE> <INDENT> super(PRelu, self).__init__("prelu") <NEW_LINE> self.input_tensor = input_tensor <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> self.alphas = self.create_bias(self.output_dim, "alphas") <NEW_LINE> self.register_parameters(self.alphas) <NEW_LINE> if self.input_tensor == 3: <NEW_LINE> <INDENT> self.alphas = self.alphas.dimshuffle('x', 0, 'x') <NEW_LINE> <DEDENT> elif self.input_tensor == 4: <NEW_LINE> <INDENT> self.alphas = self.alphas.dimshuffle('x', 0, 'x', 'x') <NEW_LINE> <DEDENT> <DEDENT> def compute_tensor(self, x): <NEW_LINE> <INDENT> positive_vector = x * (x >= 0) <NEW_LINE> negative_vector = self.alphas * (x * (x < 0)) <NEW_LINE> return positive_vector + negative_vector
Probabilistic ReLU. - http://arxiv.org/pdf/1502.01852v1.pdf
625990430a366e3fb87ddccf
class DiskName(basestring): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_api_name(): <NEW_LINE> <INDENT> return "disk-name"
Disk path name
62599043d164cc6175822261
class SecurityQuestionGateForm(forms.Form): <NEW_LINE> <INDENT> error_css_class = "error" <NEW_LINE> required_css_class = "required" <NEW_LINE> answer_one = forms.CharField( label="", max_length=128 ) <NEW_LINE> answer_two = forms.CharField( label="", max_length=128 ) <NEW_LINE> def __init__(self, user, language, *args, **kwargs): <NEW_LINE> <INDENT> self._user = user <NEW_LINE> language = language <NEW_LINE> question_one = self._user.question_one[language] <NEW_LINE> super(SecurityQuestionGateForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields["answer_one"].label = question_one <NEW_LINE> if not self._user.answer_two: <NEW_LINE> <INDENT> self.fields["answer_two"].required = False <NEW_LINE> self.fields["answer_two"].widget.is_required = False <NEW_LINE> self.fields["answer_two"].widget = HiddenInput() <NEW_LINE> self.fields["answer_two"].disabled = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> question_two = self._user.question_two[language] <NEW_LINE> self.fields["answer_two"].label = question_two <NEW_LINE> <DEDENT> <DEDENT> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super(SecurityQuestionGateForm, self).clean() <NEW_LINE> answer_one = cleaned_data.get("answer_one") <NEW_LINE> answer_two = cleaned_data.get("answer_two") <NEW_LINE> if not self._user.answer_two: <NEW_LINE> <INDENT> if not self._user.check_answer_one(answer_one): <NEW_LINE> <INDENT> raise ValidationError(_("Incorrect answer provided")) <NEW_LINE> <DEDENT> <DEDENT> elif not all([ self._user.check_answer_one(answer_one), self._user.check_answer_two(answer_two)]): <NEW_LINE> <INDENT> raise ValidationError(_("Incorrect answers provided")) <NEW_LINE> <DEDENT> return cleaned_data
NOTE agreed upon, if user only has one answer, it must be answer_one
62599043b5575c28eb71363e
class Job(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, creator: User, command: Command): <NEW_LINE> <INDENT> self._creation_time = Time.now() <NEW_LINE> self._creator = creator <NEW_LINE> self._command = command <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def next_execution(self) -> Time: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs) -> CommandResult: <NEW_LINE> <INDENT> return self._command(args, kwargs)
This is the baseclass for all Jobs, it specifies some interfaces so business logic can be implemented in the according Subclass
6259904326238365f5fade44
class Solution: <NEW_LINE> <INDENT> def zigzagLevelOrder(self, root: TreeNode) -> List[List[int]]: <NEW_LINE> <INDENT> if root is None: return [] <NEW_LINE> output = [] <NEW_LINE> d = deque([(root, 0)]) <NEW_LINE> while d: <NEW_LINE> <INDENT> current_node, level = d.popleft() <NEW_LINE> if len(output) == level: <NEW_LINE> <INDENT> output.append([]) <NEW_LINE> <DEDENT> output[-1].append(current_node.val) <NEW_LINE> if current_node.left is not None: <NEW_LINE> <INDENT> d.append((current_node.left, level+1)) <NEW_LINE> <DEDENT> if current_node.right is not None: <NEW_LINE> <INDENT> d.append((current_node.right, level+1)) <NEW_LINE> <DEDENT> <DEDENT> for i in range(1, len(output), 2): <NEW_LINE> <INDENT> output[i] = output[i][::-1] <NEW_LINE> <DEDENT> return output
Use DFS to get nodes per level, then reverse lists with odd index. Space : O(log N) ---------------- d : O(log N) Since we use DFS, the deque only carries nodes from at most two levels. The number of nodes per level is O(log N). Time : O(N) ----------- DFS : O(N) All nodes must be visited, which is O(N). Zigzag : O((log N)^2) There are O(log N) lists, and half of them need to be reversed. For each list of length K, O(K) time is needed for list reversal. The biggest list has O(log N) nodes, so O((log N)^2). Runtime: 28 ms / 94.91% Memory Usage: 14.2 MB / 13.00%
6259904323849d37ff8523a5
@admin.register(Referendum) <NEW_LINE> class ReferendumAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ( "title", "question", "creator", "slug", "creation_date", "last_update", "publication_date", "event_start", "duration", "event_end", "is_published", "is_in_progress", "is_over", "nb_votes", "results") <NEW_LINE> list_filter = ("categories", "creation_date", "last_update", "publication_date", "event_start", "duration") <NEW_LINE> search_fields = ("title", "description", "question") <NEW_LINE> inlines = [ChoiceInline, ] <NEW_LINE> def results(self, obj): <NEW_LINE> <INDENT> return format_html("<br>".join([f"{choice.title} : {choice.votes_percentage}%" for choice in sorted( obj.get_results(), key=lambda choice: choice.nb_votes, reverse=True)]))
admin class for Referendum model.
625990431d351010ab8f4e09
class OrgCreateProfileForm(OrgProfileForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = GCIOrganization <NEW_LINE> css_prefix = 'gci_org_page' <NEW_LINE> exclude = PROFILE_EXCLUDE
Django form to create the organization profile.
6259904376d4e153a661dbea
class RoomType(meta.Base, meta.InnoDBMix): <NEW_LINE> <INDENT> __tablename__ = "RoomType" <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String(20)) <NEW_LINE> rooms = relationship("Room", order_by="Room.id", backref="roomType") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "RoomType {0} : {1}".format(self.id, self.name) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not(self.name == other.name) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.name < other.name
The Type of Room e.g. bedroom or kitchen :var Integer id: Id :var String name: Name of the Room :var rooms (FK): What Rooms objects are of this type
6259904323e79379d538d7e9
class Identified: <NEW_LINE> <INDENT> __slots__ = 'publicId', 'systemId' <NEW_LINE> def _identified_mixin_init(self, publicId, systemId): <NEW_LINE> <INDENT> self.publicId = publicId <NEW_LINE> self.systemId = systemId <NEW_LINE> <DEDENT> def _get_publicId(self): <NEW_LINE> <INDENT> return self.publicId <NEW_LINE> <DEDENT> def _get_systemId(self): <NEW_LINE> <INDENT> return self.systemId
Mix-in class that supports the publicId and systemId attributes.
625990433eb6a72ae038b94c
class Menu(models.Model): <NEW_LINE> <INDENT> restaurant = models.ForeignKey(Restaurant, related_name='menus', on_delete=models.CASCADE) <NEW_LINE> item = models.ForeignKey(Item, related_name='menus', on_delete=models.CASCADE)
The model class Menu maintains an many to many mapping between Item and Resaurant
62599043287bf620b6272ed2
class LongestIncreasingSubSequenceImplBinarySearch(LongestIncreasingSubSequence): <NEW_LINE> <INDENT> def length_of_lis(self, nums): <NEW_LINE> <INDENT> if not nums: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> lis = [nums[0]] <NEW_LINE> for index, num in enumerate(nums[1:]): <NEW_LINE> <INDENT> if num > lis[-1]: <NEW_LINE> <INDENT> lis.append(num) <NEW_LINE> <DEDENT> elif num <= lis[0]: <NEW_LINE> <INDENT> lis[0] = num <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lis[self.get_first_index_equal_or_larger_than_target(lis, num)] = num <NEW_LINE> <DEDENT> <DEDENT> return len(lis) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_first_index_equal_or_larger_than_target(array, target): <NEW_LINE> <INDENT> start, end = 0, len(array) - 1 <NEW_LINE> while start + 1 < end: <NEW_LINE> <INDENT> mid = (start + end) // 2 <NEW_LINE> if array[mid] < target: <NEW_LINE> <INDENT> start = mid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end = mid <NEW_LINE> <DEDENT> <DEDENT> return end
经典的滚动数组应用场景: 因为答案只关心长度而不关心sub-sequence内容,所以可以用滚动数组,永远存储当前lis, 当有新的increasing sub-sequence出现的时候,按大小一个一个插入并覆盖滚动数组,如果lis确实变长了,append到结尾就行 这样就能保证滚动数组的长度一定是lis长度,而内容我们是不关心的 Time: O(NlogN) Space: O(N)
625990436e29344779b0193c
class ReduceLROnPlateauCallback(TrackerCallback): <NEW_LINE> <INDENT> def __init__(self, learn:Learner, monitor:str='valid_loss', mode:str='auto', patience:int=0, factor:float=0.2, min_delta:int=0, min_lr:float=0.001): <NEW_LINE> <INDENT> super().__init__(learn, monitor=monitor, mode=mode) <NEW_LINE> self.patience,self.factor,self.min_delta,self.min_lr = patience,factor,min_delta,min_lr <NEW_LINE> if self.operator == np.less: self.min_delta *= -1 <NEW_LINE> <DEDENT> def on_train_begin(self, **kwargs:Any)->None: <NEW_LINE> <INDENT> self.wait, self.opt = 0, self.learn.opt <NEW_LINE> super().on_train_begin(**kwargs) <NEW_LINE> <DEDENT> def on_epoch_end(self, epoch, **kwargs:Any)->None: <NEW_LINE> <INDENT> current = self.get_monitor_value() <NEW_LINE> if current is None: return <NEW_LINE> if self.operator(current - self.min_delta, self.best): self.best,self.wait = current,0 <NEW_LINE> else: <NEW_LINE> <INDENT> self.wait += 1 <NEW_LINE> if self.wait > self.patience and self.opt.lr > self.min_lr: <NEW_LINE> <INDENT> self.opt.lr *= self.factor <NEW_LINE> self.wait = 0 <NEW_LINE> print(f'Epoch {epoch}: reducing lr to {self.opt.lr}')
A `TrackerCallback` that reduces learning rate when a metric has stopped improving.
62599043d10714528d69f002
@cassiopeia.type.core.common.inheritdocs <NEW_LINE> class Participant(cassiopeia.type.dto.common.CassiopeiaDto): <NEW_LINE> <INDENT> def __init__(self, dictionary): <NEW_LINE> <INDENT> self.championId = dictionary.get("championId", 0) <NEW_LINE> self.highestAchievedSeasonTier = dictionary.get("highestAchievedSeasonTier", "") <NEW_LINE> self.masteries = [(Mastery(m) if not isinstance(m, Mastery) else m) for m in dictionary.get("masteries", []) if m] <NEW_LINE> self.participantId = dictionary.get("participantId", 0) <NEW_LINE> self.runes = [(Rune(r) if not isinstance(r, Rune) else r) for r in dictionary.get("runes", []) if r] <NEW_LINE> self.spell1Id = dictionary.get("spell1Id", 0) <NEW_LINE> self.spell2Id = dictionary.get("spell2Id", 0) <NEW_LINE> val = dictionary.get("stats", None) <NEW_LINE> self.stats = ParticipantStats(val) if val and not isinstance(val, ParticipantStats) else val <NEW_LINE> self.teamId = dictionary.get("teamId", 0) <NEW_LINE> val = dictionary.get("timeline", None) <NEW_LINE> self.timeline = ParticipantTimeline(val) if val and not isinstance(val, ParticipantTimeline) else val
Gets all item IDs contained in this object
6259904373bcbd0ca4bcb576
class printing(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): <NEW_LINE> <INDENT> optionList = [("cups", "max size (MiB) to collect per cups log file", "", 50)] <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.addCopySpecs([ "/etc/cups/*.conf", "/etc/cups/lpoptions", "/etc/cups/ppd/*.ppd"]) <NEW_LINE> self.addCopySpecLimit("/var/log/cups", sizelimit=self.isOptionEnabled("cupslogsize")) <NEW_LINE> self.addCmdOutput("/usr/bin/lpstat -t") <NEW_LINE> self.addCmdOutput("/usr/bin/lpstat -s") <NEW_LINE> self.addCmdOutput("/usr/bin/lpstat -d")
printing related information (cups)
625990438e71fb1e983bcdba
class SoftmaxWithCriterion(Criterion): <NEW_LINE> <INDENT> def __init__(self, ignore_label=None, normalize_mode="VALID", bigdl_type="float"): <NEW_LINE> <INDENT> super(SoftmaxWithCriterion, self).__init__(None, bigdl_type, ignore_label, normalize_mode)
Computes the multinomial logistic loss for a one-of-many classification task, passing real-valued predictions through a softmax to get a probability distribution over classes. It should be preferred over separate SoftmaxLayer + MultinomialLogisticLossLayer as its gradient computation is more numerically stable. :param ignoreLabel: (optional) Specify a label value thatshould be ignored when computing the loss. :param normalizeMode: How to normalize the output loss. >>> softmaxWithCriterion = SoftmaxWithCriterion() creating: createSoftmaxWithCriterion >>> softmaxWithCriterion = SoftmaxWithCriterion(1, "FULL") creating: createSoftmaxWithCriterion
62599043d6c5a102081e3411
class TestChannel(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestChannel, self).setUp() <NEW_LINE> self.test_data_str = open( path.dirname(path.dirname(__file__)) + "/data/search/channel.json" ).read() <NEW_LINE> self.test_data = json.loads(self.test_data_str) <NEW_LINE> <DEDENT> def test_model(self): <NEW_LINE> <INDENT> self.test_data = self.test_data['items'][0] <NEW_LINE> parsed_data = ChannelListItemParser(self.test_data) <NEW_LINE> data = Channel.from_channels_result(parsed_data) <NEW_LINE> self.assertEqual(data.etag, self.test_data['etag']) <NEW_LINE> self.assertEqual(data.kind, self.test_data['kind']) <NEW_LINE> self.assertEqual(data.channel_id, self.test_data['id']['channelId']) <NEW_LINE> self.assertEqual(data.id, self.test_data['id']['channelId']) <NEW_LINE> self.assertEqual(data.title, self.test_data['snippet']['title']) <NEW_LINE> self.assertEqual(data.description, self.test_data['snippet']['description']) <NEW_LINE> self.assertEqual(data.publish_date, self.test_data['snippet']['publishedAt']) <NEW_LINE> self.assertIsNotNone(data.thumbnail) <NEW_LINE> self.assertIsNotNone(data.thumbnail.default, self.test_data['snippet']['thumbnails']['default']) <NEW_LINE> self.assertIsNotNone(data.thumbnail.medium, self.test_data['snippet']['thumbnails']['medium']) <NEW_LINE> self.assertIsNotNone(data.thumbnail.high, self.test_data['snippet']['thumbnails']['high']) <NEW_LINE> <DEDENT> def test_model_search_api_data(self): <NEW_LINE> <INDENT> test_data_str = open( path.dirname(path.dirname(__file__)) + "/data/search/search.json" ).read() <NEW_LINE> test_data = json.loads(test_data_str) <NEW_LINE> self.test_data = filter(lambda item: item['id']['kind'] == KIND_CHANNEL, test_data['items'])[0] <NEW_LINE> parsed_data = SearchResult(self.test_data) <NEW_LINE> data = Channel.from_search_result(parsed_data) <NEW_LINE> self.assertEqual(data.etag, self.test_data['etag']) <NEW_LINE> self.assertEqual(data.kind, self.test_data['id']['kind']) <NEW_LINE> self.assertEqual(data.channel_id, self.test_data['id']['channelId']) <NEW_LINE> self.assertEqual(data.id, self.test_data['id']['channelId']) <NEW_LINE> self.assertEqual(data.title, self.test_data['snippet']['title']) <NEW_LINE> self.assertEqual(data.description, self.test_data['snippet']['description']) <NEW_LINE> self.assertEqual(data.publish_date, self.test_data['snippet']['publishedAt']) <NEW_LINE> self.assertIsNotNone(data.thumbnail) <NEW_LINE> self.assertIsNotNone(data.thumbnail.default, self.test_data['snippet']['thumbnails']['default']) <NEW_LINE> self.assertIsNotNone(data.thumbnail.medium, self.test_data['snippet']['thumbnails']['medium']) <NEW_LINE> self.assertIsNotNone(data.thumbnail.high, self.test_data['snippet']['thumbnails']['high'])
Tests for Channels model.
62599043d53ae8145f919749
class ConteudoCreate(CreateView): <NEW_LINE> <INDENT> model = Conteudo <NEW_LINE> form_class = ConteudoForm <NEW_LINE> template_name = 'escola/conteudo/create_conteudo.html' <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if 'pk_parent' in kwargs.keys(): <NEW_LINE> <INDENT> self.parent = kwargs['pk_parent'] <NEW_LINE> <DEDENT> return super(ConteudoCreate, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_initial(self, *args, **kwargs): <NEW_LINE> <INDENT> initial = super(ConteudoCreate, self).get_initial() <NEW_LINE> print(kwargs) <NEW_LINE> if hasattr(self, 'parent'): <NEW_LINE> <INDENT> initial['parent'] = self.parent <NEW_LINE> <DEDENT> return initial <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> self.object = form.save(commit=False) <NEW_LINE> self.object.professor = self.request.user.professor <NEW_LINE> logger.debug(form.cleaned_data) <NEW_LINE> self.object.save() <NEW_LINE> return HttpResponseRedirect(self.get_success_url())
View para criar um Conteudo. Dispatch Args: pk_parent : int - opicional
6259904350485f2cf55dc271
class PluginError(RepositoryError): <NEW_LINE> <INDENT> pass
Indicates an error related to a plugin.
62599043b57a9660fecd2d68
class TagsView(ProtectedRequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.render_to_response(template_file='tags.html', context={'title': 'Tags'})
Returns a panel for monitoring groups of devices & users
62599043596a897236128f25
class Adaptor (base.Base): <NEW_LINE> <INDENT> def __init__ (self) : <NEW_LINE> <INDENT> base.Base.__init__ (self, _ADAPTOR_INFO, _ADAPTOR_OPTIONS) <NEW_LINE> self._default_contexts = [] <NEW_LINE> self._have_defaults = False <NEW_LINE> <DEDENT> def sanity_check (self) : <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _get_default_contexts (self) : <NEW_LINE> <INDENT> if not self._have_defaults : <NEW_LINE> <INDENT> import glob <NEW_LINE> candidate_certs = glob.glob ("%s/.ssh/*" % os.environ['HOME']) <NEW_LINE> for key in candidate_certs : <NEW_LINE> <INDENT> if os.path.isdir (key) : <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif key.endswith ('.pub') : <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif key.endswith ('.pem') : <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> pub = "%s.pub" % key <NEW_LINE> <DEDENT> if not os.path.exists (key ) or not os.path.isfile (key ) : <NEW_LINE> <INDENT> self._logger.info ("ignore ssh key at %s (no private key: %s)" % (key, key)) <NEW_LINE> continue <NEW_LINE> <DEDENT> if not os.path.exists (pub) or not os.path.isfile (pub) : <NEW_LINE> <INDENT> self._logger.info ("ignore ssh key at %s (no public key: %s)" % (key, pub)) <NEW_LINE> continue <NEW_LINE> <DEDENT> try : <NEW_LINE> <INDENT> fh_key = open (key ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self._logger.info ("ignore ssh key at %s (key not readable: %s)" % (key, e)) <NEW_LINE> continue <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> fh_key .close () <NEW_LINE> <DEDENT> try : <NEW_LINE> <INDENT> fh_pub = open (pub ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self._logger.info ("ignore ssh key at %s (public key %s not readable: %s)" % (key, pub, e)) <NEW_LINE> continue <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> fh_pub .close () <NEW_LINE> <DEDENT> import subprocess <NEW_LINE> if not subprocess.call (["sh", "-c", "grep ENCRYPTED %s > /dev/null" % key]) : <NEW_LINE> <INDENT> self._logger.warning ("ignore ssh key at %s (requires passphrase)" % key) <NEW_LINE> continue <NEW_LINE> <DEDENT> c = api.Context ('ssh') <NEW_LINE> c.user_key = key <NEW_LINE> c.user_cert = pub <NEW_LINE> self._default_contexts.append (c) <NEW_LINE> self._logger.info ("default ssh key at %s" % key) <NEW_LINE> <DEDENT> self._have_defaults = True <NEW_LINE> <DEDENT> return self._default_contexts
This is the actual adaptor class, which gets loaded by SAGA (i.e. by the SAGA engine), and which registers the CPI implementation classes which provide the adaptor's functionality.
6259904326238365f5fade46
class Filter: <NEW_LINE> <INDENT> def __init__(self, name, window_size=1, precision=None, entity=None): <NEW_LINE> <INDENT> if isinstance(window_size, int): <NEW_LINE> <INDENT> self.states = deque(maxlen=window_size) <NEW_LINE> self.window_unit = WINDOW_SIZE_UNIT_NUMBER_EVENTS <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.states = deque(maxlen=0) <NEW_LINE> self.window_unit = WINDOW_SIZE_UNIT_TIME <NEW_LINE> <DEDENT> self.precision = precision <NEW_LINE> self._name = name <NEW_LINE> self._entity = entity <NEW_LINE> self._skip_processing = False <NEW_LINE> self._window_size = window_size <NEW_LINE> <DEDENT> @property <NEW_LINE> def window_size(self): <NEW_LINE> <INDENT> return self._window_size <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def skip_processing(self): <NEW_LINE> <INDENT> return self._skip_processing <NEW_LINE> <DEDENT> def _filter_state(self, new_state): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def filter_state(self, new_state): <NEW_LINE> <INDENT> filtered = self._filter_state(FilterState(new_state)) <NEW_LINE> filtered.set_precision(self.precision) <NEW_LINE> self.states.append(copy(filtered)) <NEW_LINE> new_state.state = filtered.state <NEW_LINE> return new_state
Filter skeleton. Args: window_size (int): size of the sliding window that holds previous values precision (int): round filtered value to precision value entity (string): used for debugging only
6259904345492302aabfd7c7
class MemoryStorage(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._results = [] <NEW_LINE> <DEDENT> def add_test_result(self, result): <NEW_LINE> <INDENT> required = {'name', 'version', 'env', 'pytest', 'status'} <NEW_LINE> if not required.issubset(result): <NEW_LINE> <INDENT> raise TypeError('Invalid keys given: %s' % result.keys()) <NEW_LINE> <DEDENT> for index, existing_result in enumerate(self._results): <NEW_LINE> <INDENT> if (existing_result['name'] == result['name'] and existing_result['version'] == result['version'] and existing_result['env'] == result['env'] and existing_result['pytest'] == result['pytest']): <NEW_LINE> <INDENT> self._results[index] = result <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._results.append(result) <NEW_LINE> <DEDENT> <DEDENT> def get_all_results(self): <NEW_LINE> <INDENT> return self._results <NEW_LINE> <DEDENT> def get_test_results(self, name, version): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for entry in self._results: <NEW_LINE> <INDENT> if entry['name'] == name and entry['version'] == version: <NEW_LINE> <INDENT> result.append(entry) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def drop_all(self): <NEW_LINE> <INDENT> self._results[:] = []
Mock class that simulates a PlugsStorage instance. This class simply holds the values in memory, and is used by TestView as a mock to the real storage class, allowing the view to be tested without a database.
62599043d4950a0f3b1117b7
class general: <NEW_LINE> <INDENT> __order__ = ['name', 'filename', 'browse_directory'] <NEW_LINE> label = 'General' <NEW_LINE> stock_id = gtk.STOCK_PREFERENCES <NEW_LINE> class name: <NEW_LINE> <INDENT> label = 'Project Name' <NEW_LINE> rtype = types.string <NEW_LINE> default = 'unnamed' <NEW_LINE> <DEDENT> class filename: <NEW_LINE> <INDENT> label = 'Project file' <NEW_LINE> rtype = types.file <NEW_LINE> default = '' <NEW_LINE> <DEDENT> class browse_directory: <NEW_LINE> <INDENT> rtype = types.directory <NEW_LINE> label = 'Last Browsed Directory' <NEW_LINE> default = '/'
General options relating to the project
62599043a79ad1619776b36c
class Meta(BaseTable.Meta): <NEW_LINE> <INDENT> model = CommandToken <NEW_LINE> fields = ("pk", "platform", "token", "comment", "actions") <NEW_LINE> default_columns = ("pk", "platform", "comment", "actions")
Metaclass attributes of CommandTokenTable.
6259904391af0d3eaad3b111
class WidgetAdminRoles(object): <NEW_LINE> <INDENT> widget_name = url.Widget.ROLES
Locators for Roles widget on Admin Dashboard.
6259904315baa7234946327f
class ServerSideParamMatchingTests(TestWithScenarios, TestCase): <NEW_LINE> <INDENT> scenarios = [ ('should work', dict(key='keyname', value='value', result=True)), ('invalid key', dict(key='k e', value='value', result=False)), ('string value', dict(key='key', value='v e', result=True)), ('string value2', dict(key='key', value='v?e', result=True)), ('string value3', dict(key='key', value='1/2."!@#*&^%', result=True)), ('bool value', dict(key='key', value=False, result=True)), ('int value', dict(key='key', value=123, result=True)), ('int value2', dict(key='key', value=-123, result=True)), ('float value', dict(key='key', value=1.0, result=False)), ('dict value', dict(key='key', value={}, result=False)), ('obj value', dict(key='key', value=TestCase, result=False)), ('int overflow 1', dict(key='key', value=-2147483648, result=True)), ('int overflow 2', dict(key='key', value=-2147483649, result=False)), ('int overflow 3', dict(key='key', value=2147483647, result=True)), ('int overflow 4', dict(key='key', value=2147483648, result=False)), ('unicode string', dict(key='key', value='H\u2026i', result=False)), ] <NEW_LINE> def test_valid_server_side_param(self): <NEW_LINE> <INDENT> self.assertEqual( xpathselect._is_valid_server_side_filter_param( self.key, self.value ), self.result )
Tests for the server side matching decision function.
6259904316aa5153ce4017da
class Todo(models.Model): <NEW_LINE> <INDENT> add_date = models.DateTimeField() <NEW_LINE> text = models.CharField(max_length=150) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Todo' <NEW_LINE> verbose_name_plural = 'Todos' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.text
Model definition for Todo.
625990433eb6a72ae038b94e