code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Tape: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> def inc_val(self): <NEW_LINE> <INDENT> self.cells[self.pointer] += 1 <NEW_LINE> <DEDENT> def dec_val(self): <NEW_LINE> <INDENT> self.cells[self.pointer] -= 1 <NEW_LINE> <DEDENT> def move_right(self): <NEW_LINE> <INDENT> self.pointer += 1 <NEW_LINE> if self.pointer == len(self.cells): <NEW_LINE> <INDENT> self.cells.append(0) <NEW_LINE> <DEDENT> <DEDENT> def move_left(self): <NEW_LINE> <INDENT> if self.pointer == 0: <NEW_LINE> <INDENT> raise Error("Cannot move past the start of the tape") <NEW_LINE> <DEDENT> self.pointer -= 1 <NEW_LINE> <DEDENT> def get_val(self): <NEW_LINE> <INDENT> return self.cells[self.pointer] <NEW_LINE> <DEDENT> def set_val(self, val): <NEW_LINE> <INDENT> self.cells[self.pointer] = val <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.cells = [0] <NEW_LINE> self.pointer = 0
A generic implementation of a record tape for a Turing Machine. It's bounded on the left side and unbounded on the right side. It stores only Python integers.
6259906529b78933be26ac6b
class Normalize(object): <NEW_LINE> <INDENT> def __init__(self, vmin=None, vmax=None, clip=False): <NEW_LINE> <INDENT> self.vmin = vmin <NEW_LINE> self.vmax = vmax <NEW_LINE> self.clip = clip <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def process_value(value): <NEW_LINE> <INDENT> is_scalar = not cbook.iterable(value) <NEW_LINE> if is_scalar: <NEW_LINE> <INDENT> value = [value] <NEW_LINE> <DEDENT> dtype = np.min_scalar_type(value) <NEW_LINE> dtype = (np.float32 if dtype.itemsize <= 2 else np.promote_types(dtype, float)) <NEW_LINE> result = np.ma.array(value, dtype=dtype, copy=True) <NEW_LINE> return result, is_scalar <NEW_LINE> <DEDENT> def __call__(self, value, clip=None): <NEW_LINE> <INDENT> if clip is None: <NEW_LINE> <INDENT> clip = self.clip <NEW_LINE> <DEDENT> result, is_scalar = self.process_value(value) <NEW_LINE> self.autoscale_None(result) <NEW_LINE> vmin, vmax = self.vmin, self.vmax <NEW_LINE> if vmin == vmax: <NEW_LINE> <INDENT> result.fill(0) <NEW_LINE> <DEDENT> elif vmin > vmax: <NEW_LINE> <INDENT> raise ValueError("minvalue must be less than or equal to maxvalue") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if clip: <NEW_LINE> <INDENT> mask = np.ma.getmask(result) <NEW_LINE> result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) <NEW_LINE> <DEDENT> resdat = np.asarray(result.data) <NEW_LINE> resdat -= vmin <NEW_LINE> resdat /= (vmax - vmin) <NEW_LINE> result = np.ma.array(resdat, mask=result.mask, copy=False) <NEW_LINE> <DEDENT> if is_scalar: <NEW_LINE> <INDENT> result = result[0] <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def inverse(self, value): <NEW_LINE> <INDENT> if not self.scaled(): <NEW_LINE> <INDENT> raise ValueError("Not invertible until scaled") <NEW_LINE> <DEDENT> vmin, vmax = self.vmin, self.vmax <NEW_LINE> if cbook.iterable(value): <NEW_LINE> <INDENT> val = np.ma.asarray(value) <NEW_LINE> return vmin + val * (vmax - vmin) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return vmin + value * (vmax - vmin) <NEW_LINE> <DEDENT> <DEDENT> def autoscale(self, A): <NEW_LINE> <INDENT> self.vmin = np.ma.min(A) <NEW_LINE> self.vmax = np.ma.max(A) <NEW_LINE> <DEDENT> def autoscale_None(self, A): <NEW_LINE> <INDENT> if self.vmin is None and np.size(A) > 0: <NEW_LINE> <INDENT> self.vmin = np.ma.min(A) <NEW_LINE> <DEDENT> if self.vmax is None and np.size(A) > 0: <NEW_LINE> <INDENT> self.vmax = np.ma.max(A) <NEW_LINE> <DEDENT> <DEDENT> def scaled(self): <NEW_LINE> <INDENT> return (self.vmin is not None and self.vmax is not None)
A class which, when called, can normalize data into the ``[0.0, 1.0]`` interval.
625990657cff6e4e811b7195
class Payment(Operation): <NEW_LINE> <INDENT> _XDR_OPERATION_TYPE: stellar_xdr.OperationType = stellar_xdr.OperationType.PAYMENT <NEW_LINE> def __init__( self, destination: str, asset: Asset, amount: Union[str, Decimal], source: str = None, ) -> None: <NEW_LINE> <INDENT> super().__init__(source) <NEW_LINE> check_amount(amount) <NEW_LINE> check_ed25519_public_key(destination) <NEW_LINE> self._destination: str = destination <NEW_LINE> self._destination_muxed: Optional[stellar_xdr.MuxedAccount] = None <NEW_LINE> self.asset: Asset = asset <NEW_LINE> self.amount: Union[str, Decimal] = amount <NEW_LINE> <DEDENT> @property <NEW_LINE> def destination(self) -> str: <NEW_LINE> <INDENT> return self._destination <NEW_LINE> <DEDENT> @destination.setter <NEW_LINE> def destination(self, value: str): <NEW_LINE> <INDENT> check_ed25519_public_key(value) <NEW_LINE> self._destination_muxed = None <NEW_LINE> self._destination = value <NEW_LINE> <DEDENT> def _to_operation_body(self) -> stellar_xdr.OperationBody: <NEW_LINE> <INDENT> asset = self.asset.to_xdr_object() <NEW_LINE> if self._destination_muxed is not None: <NEW_LINE> <INDENT> destination = self._destination_muxed <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> destination = Keypair.from_public_key(self._destination).xdr_muxed_account() <NEW_LINE> <DEDENT> amount = stellar_xdr.Int64(Operation.to_xdr_amount(self.amount)) <NEW_LINE> payment_op = stellar_xdr.PaymentOp(destination, asset, amount) <NEW_LINE> body = stellar_xdr.OperationBody( type=self._XDR_OPERATION_TYPE, payment_op=payment_op ) <NEW_LINE> return body <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_xdr_object(cls, xdr_object: stellar_xdr.Operation) -> "Payment": <NEW_LINE> <INDENT> source = Operation.get_source_from_xdr_obj(xdr_object) <NEW_LINE> assert xdr_object.body.payment_op is not None <NEW_LINE> destination = parse_ed25519_account_id_from_muxed_account_xdr_object( xdr_object.body.payment_op.destination ) <NEW_LINE> asset = Asset.from_xdr_object(xdr_object.body.payment_op.asset) <NEW_LINE> amount = Operation.from_xdr_amount(xdr_object.body.payment_op.amount.int64) <NEW_LINE> op = cls(source=source, destination=destination, asset=asset, amount=amount) <NEW_LINE> op._destination_muxed = xdr_object.body.payment_op.destination <NEW_LINE> op._source_muxed = Operation.get_source_muxed_from_xdr_obj(xdr_object) <NEW_LINE> return op <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ( f"<Payment [destination={self.destination}, asset={self.asset}, " f"amount={self.amount}, source={self.source}]>" )
The :class:`Payment` object, which represents a Payment operation on Stellar's network. Sends an amount in a specific asset to a destination account. Threshold: Medium :param destination: The destination account ID. :param asset: The asset to send. :param amount: The amount to send. :param source: The source account for the payment. Defaults to the transaction's source account.
62599065a17c0f6771d5d74d
class GlobalNotification(rdf_structs.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = jobs_pb2.GlobalNotification <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(GlobalNotification, self).__init__(*args, **kwargs) <NEW_LINE> if not self.duration: <NEW_LINE> <INDENT> self.duration = rdfvalue.Duration("2w") <NEW_LINE> <DEDENT> if not self.show_from: <NEW_LINE> <INDENT> self.show_from = rdfvalue.RDFDatetime.Now() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def hash(self): <NEW_LINE> <INDENT> return hash(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def type_name(self): <NEW_LINE> <INDENT> return self.Type.reverse_enum[self.type]
Global notification shown to all the users of GRR.
62599065379a373c97d9a76c
class UnsupportedProxyURI(PublisherError): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> if self.data: <NEW_LINE> <INDENT> scheme = urlsplit(self.data, allow_fragments=0)[0] <NEW_LINE> return _("The proxy URI '{uri}' uses the unsupported " "scheme '{scheme}'. Currently the only supported " "scheme is http://.").format( uri=self.data, scheme=scheme) <NEW_LINE> <DEDENT> return _("The specified proxy URI uses an unsupported scheme." " Currently the only supported scheme is: http://.")
Used to indicate that the specified proxy URI is unsupported.
625990659c8ee82313040d2f
class OutputTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_mad_dots(self): <NEW_LINE> <INDENT> for identifier in ["example", "example.1a", "example.1.2", "example.1-2"]: <NEW_LINE> <INDENT> old = SeqRecord( Seq("ACGT"), id=identifier, name=identifier, description="mad dots", annotations={"molecule_type": "DNA"}, ) <NEW_LINE> new = SeqIO.read(StringIO(old.format("gb")), "gb") <NEW_LINE> self.assertEqual(old.id, new.id) <NEW_LINE> self.assertEqual(old.name, new.name) <NEW_LINE> self.assertEqual(old.description, new.description) <NEW_LINE> self.assertEqual(old.seq, new.seq) <NEW_LINE> <DEDENT> <DEDENT> def test_seqrecord_default_description(self): <NEW_LINE> <INDENT> old = SeqRecord( Seq("ACGT"), id="example", name="short", annotations={"molecule_type": "DNA"}, ) <NEW_LINE> self.assertEqual(old.description, "<unknown description>") <NEW_LINE> txt = old.format("gb") <NEW_LINE> self.assertIn("DEFINITION .\n", txt) <NEW_LINE> new = SeqIO.read(StringIO(txt), "gb") <NEW_LINE> self.assertEqual(old.id, new.id) <NEW_LINE> self.assertEqual(old.name, new.name) <NEW_LINE> self.assertEqual("", new.description) <NEW_LINE> self.assertEqual(old.seq, new.seq) <NEW_LINE> <DEDENT> def test_000_write_invalid_but_parsed_locus_line(self): <NEW_LINE> <INDENT> path = "GenBank/NC_005816.gb" <NEW_LINE> with open(path) as handle: <NEW_LINE> <INDENT> lines = handle.readlines() <NEW_LINE> <DEDENT> invalid_line = ( "LOCUS NC_005816 9609 bp dna circular BCT" " 21-JUL-2008\n" ) <NEW_LINE> lines[0] = invalid_line <NEW_LINE> fake_handle = StringIO("".join(lines)) <NEW_LINE> with warnings.catch_warnings(record=True) as caught: <NEW_LINE> <INDENT> warnings.simplefilter("always") <NEW_LINE> rec = SeqIO.read(fake_handle, "genbank") <NEW_LINE> self.assertEqual(len(caught), 1) <NEW_LINE> self.assertEqual(caught[0].category, BiopythonParserWarning) <NEW_LINE> self.assertEqual( str(caught[0].message), "Non-upper case molecule type in LOCUS line: dna", ) <NEW_LINE> <DEDENT> out_handle = StringIO() <NEW_LINE> ret = SeqIO.write([rec], out_handle, "genbank") <NEW_LINE> self.assertEqual(ret, 1) <NEW_LINE> out_handle.seek(0) <NEW_LINE> out_lines = out_handle.readlines() <NEW_LINE> self.assertEqual(out_lines[0], invalid_line) <NEW_LINE> <DEDENT> def test_write_tsa_data_division(self): <NEW_LINE> <INDENT> with open("GenBank/tsa_acropora.gb") as infile: <NEW_LINE> <INDENT> rec = SeqIO.read(infile, "genbank") <NEW_LINE> infile.seek(0) <NEW_LINE> first_line = infile.readline() <NEW_LINE> <DEDENT> outfile = StringIO() <NEW_LINE> SeqIO.write([rec], outfile, "genbank") <NEW_LINE> outfile.seek(0) <NEW_LINE> first_line_written = outfile.readline() <NEW_LINE> original_division = first_line.split()[-2] <NEW_LINE> written_division = first_line_written.split()[-2] <NEW_LINE> self.assertEqual(original_division, written_division)
GenBank output tests.
62599065f548e778e596ccd8
class MixFeatures(FeatureExtractBase): <NEW_LINE> <INDENT> def __init__(self, features_list): <NEW_LINE> <INDENT> self.features_list = features_list <NEW_LINE> <DEDENT> def extract(self, instance): <NEW_LINE> <INDENT> feature_class_dict = {"ARFeatures":ARFeatures, "FFTFeatures":FFTFeatures, "PLVFeatures":PLVFeatures, "RandomFeatures":RandomFeatures, "SEFeatures":SEFeatures, "LyapunovFeatures":LyapunovFeatures, "StatsFeatures":StatsFeatures} <NEW_LINE> extracted_features_list = [] <NEW_LINE> for feature_string in self.features_list: <NEW_LINE> <INDENT> if feature_string['name'] in feature_class_dict: <NEW_LINE> <INDENT> kwargs = feature_string['args'] <NEW_LINE> feature_object = feature_class_dict[feature_string['name']](**kwargs) <NEW_LINE> extracted_features_list.append(np.hstack(feature_object.extract(instance))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("feature not in list !!!") <NEW_LINE> <DEDENT> <DEDENT> return np.hstack(extracted_features_list)
Class to concatenate output of individual feature classes. @author V&J
625990657047854f46340b02
class SPLexRank(ya_summarizer): <NEW_LINE> <INDENT> def __init__(self,q,words): <NEW_LINE> <INDENT> self.question = q <NEW_LINE> self.words_limit = words <NEW_LINE> print(q.get_author()) <NEW_LINE> nbest_total_words = 0 <NEW_LINE> nbest = q.get_nbest() <NEW_LINE> for ans in nbest: <NEW_LINE> <INDENT> content = ans.get_content() <NEW_LINE> nbest_total_words += nlp.sentence_length(content) <NEW_LINE> <DEDENT> self.word_limit = min(int(nbest_total_words/3),150) <NEW_LINE> print("词数限制",self.word_limit) <NEW_LINE> print("文章题目",self.question.get_title()) <NEW_LINE> <DEDENT> def extract(self): <NEW_LINE> <INDENT> title_text = self.question.get_title() <NEW_LINE> answer_text = self.question.get_nbest_content() <NEW_LINE> print('句子重要性排名,开始分句..') <NEW_LINE> self.nlp = NLP() <NEW_LINE> sent_tokens = self.nlp.sent_tokenize(answer_text) <NEW_LINE> print('获得句子列表,开始计算tfidf..') <NEW_LINE> self.N = len(sent_tokens) <NEW_LINE> self.tfidf = TFIDF(sent_tokens).matrix <NEW_LINE> print('获得tfidf矩阵,开始构建图结构..') <NEW_LINE> nodes = [idx for idx in range(self.N)] <NEW_LINE> self.lex_graph = nx.Graph() <NEW_LINE> self.lex_graph.add_nodes_from(nodes) <NEW_LINE> for i in range(self.N): <NEW_LINE> <INDENT> for j in range(self.N): <NEW_LINE> <INDENT> sim = self.get_cos(self.tfidf[i],self.tfidf[j]) <NEW_LINE> self.lex_graph.add_edge(i,j,weight=sim) <NEW_LINE> <DEDENT> <DEDENT> print('图构建完成,开始计算lexrankscore..') <NEW_LINE> cal_lexrank = nx.pagerank(self.lex_graph) <NEW_LINE> print('计算完成,开始摘要..') <NEW_LINE> orders = sorted(cal_lexrank,key=cal_lexrank.get,reverse=True) <NEW_LINE> k_th = self.get_sum_sents(sent_tokens,orders) <NEW_LINE> str_tmp_list = [] <NEW_LINE> for sidx in range(k_th): <NEW_LINE> <INDENT> str_tmp = sent_tokens[orders[sidx]] <NEW_LINE> str_tmp += '[%.4f]'%(cal_lexrank[sidx]) <NEW_LINE> str_tmp_list.append(str_tmp) <NEW_LINE> <DEDENT> for i in str_tmp_list: <NEW_LINE> <INDENT> print(i) <NEW_LINE> <DEDENT> self.abstrct_text = ' '.join([sent_tokens[orders[ith]] for ith in range(k_th)]) <NEW_LINE> print('摘要完成..') <NEW_LINE> print("写入文件") <NEW_LINE> wp = self.question.get_author() <NEW_LINE> fname = as_res+wp.split("|")[0]+".res" <NEW_LINE> print(fname) <NEW_LINE> f = open(fname,"w") <NEW_LINE> f.write(self.abstrct_text) <NEW_LINE> f.close() <NEW_LINE> return fname <NEW_LINE> <DEDENT> def abstract_output(self,result): <NEW_LINE> <INDENT> print('successfully.') <NEW_LINE> <DEDENT> def get_sum_sents(self,sents,orders): <NEW_LINE> <INDENT> total_num = 0 <NEW_LINE> idx = 0 <NEW_LINE> while(total_num <= self.words_limit and idx < len(sents)): <NEW_LINE> <INDENT> total_num += len(self.nlp.word_tokenize(sents[orders[idx]])) <NEW_LINE> if (total_num > self.words_limit): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> idx += 1 <NEW_LINE> <DEDENT> return idx <NEW_LINE> <DEDENT> def get_cos(self,vec1,vec2): <NEW_LINE> <INDENT> return dot(vec1,vec2) / (norm(vec1) * norm(vec2))
tfidf matrix => graph => pagerank => lexrank scores
6259906521bff66bcd7243b4
class PavoException(Exception): <NEW_LINE> <INDENT> def __init__(self, message: Optional[str] = None): <NEW_LINE> <INDENT> super().__init__(message or self.__doc__)
Pavo's BaseException-like Exception class, uses docstrings to set default error messages.
625990658e71fb1e983bd214
class TestConfiguration(unittest.TestCase): <NEW_LINE> <INDENT> def test_defaults(self): <NEW_LINE> <INDENT> _, temp_config_path = tempfile.mkstemp() <NEW_LINE> self.addCleanup(os.remove, temp_config_path) <NEW_LINE> with open(temp_config_path, 'w') as temp_config: <NEW_LINE> <INDENT> temp_config.write(yaml.dump({})) <NEW_LINE> <DEDENT> with mock.patch.dict( 'os.environ', {'LMODP_CONFIG': temp_config_path}, clear=True ): <NEW_LINE> <INDENT> import lmod_proxy.config <NEW_LINE> imp.reload(lmod_proxy.config) <NEW_LINE> import lmod_proxy.web <NEW_LINE> imp.reload(lmod_proxy.web) <NEW_LINE> self.assertDictContainsSubset( lmod_proxy.config.CONFIG_KEYS, dict(lmod_proxy.web.app.config) ) <NEW_LINE> <DEDENT> <DEDENT> def test_file_config_precedence_(self): <NEW_LINE> <INDENT> test_cert = 'testing' <NEW_LINE> env_test_cert = 'env_testing' <NEW_LINE> _, temp_config_path = tempfile.mkstemp() <NEW_LINE> self.addCleanup(os.remove, temp_config_path) <NEW_LINE> with open(temp_config_path, 'w') as temp_config: <NEW_LINE> <INDENT> temp_config.write(yaml.dump({'LMODP_CERT': test_cert})) <NEW_LINE> <DEDENT> with mock.patch.dict( 'os.environ', {'LMODP_CONFIG': temp_config_path}, clear=True ): <NEW_LINE> <INDENT> import lmod_proxy.config <NEW_LINE> imp.reload(lmod_proxy.config) <NEW_LINE> self.assertEqual( lmod_proxy.config._configure()['LMODP_CERT'], test_cert ) <NEW_LINE> <DEDENT> with mock.patch.dict( 'os.environ', { 'LMODP_CONFIG': temp_config_path, 'LMODP_CERT': env_test_cert, }, clear=True ): <NEW_LINE> <INDENT> import lmod_proxy.config <NEW_LINE> imp.reload(lmod_proxy.config) <NEW_LINE> self.assertEqual( lmod_proxy.config._configure()['LMODP_CERT'], env_test_cert ) <NEW_LINE> <DEDENT> <DEDENT> def test_htpasswd_to_file(self): <NEW_LINE> <INDENT> self.addCleanup(os.remove, '.htpasswd') <NEW_LINE> with open(get_htpasswd_path()) as htpasswd_file: <NEW_LINE> <INDENT> htpasswd = htpasswd_file.read() <NEW_LINE> <DEDENT> with mock.patch.dict( 'os.environ', {'LMODP_HTPASSWD': htpasswd}, clear=True ): <NEW_LINE> <INDENT> import lmod_proxy.config <NEW_LINE> imp.reload(lmod_proxy.config) <NEW_LINE> self.assertTrue(os.path.isfile('.htpasswd')) <NEW_LINE> with open('.htpasswd') as htpasswd_file: <NEW_LINE> <INDENT> self.assertEqual(htpasswd, htpasswd_file.read()) <NEW_LINE> <DEDENT> self.assertEqual( lmod_proxy.config._configure()['LMODP_HTPASSWD_PATH'], os.path.abspath('.htpasswd') ) <NEW_LINE> <DEDENT> <DEDENT> def test_cert_string_to_file(self): <NEW_LINE> <INDENT> self.addCleanup(os.remove, '.cert.pem') <NEW_LINE> cert_string = 'hello cert!' <NEW_LINE> with mock.patch.dict( 'os.environ', {'LMODP_CERT_STRING': cert_string}, clear=True ): <NEW_LINE> <INDENT> import lmod_proxy.config <NEW_LINE> imp.reload(lmod_proxy.config) <NEW_LINE> self.assertTrue(os.path.isfile('.cert.pem')) <NEW_LINE> with open('.cert.pem') as cert_file: <NEW_LINE> <INDENT> self.assertEqual(cert_string, cert_file.read()) <NEW_LINE> <DEDENT> self.assertEqual( lmod_proxy.config._configure()['LMODP_CERT'], os.path.abspath('.cert.pem') )
Test out configuration defaults, loading yaml/environ config, etc.
62599065b7558d5895464ad6
class _DirectoryBase(object): <NEW_LINE> <INDENT> def walk(self, top=None, class_pattern=None): <NEW_LINE> <INDENT> return utils.walk(self, top, class_pattern=class_pattern) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return self.Get(attr) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self.Get(name) <NEW_LINE> <DEDENT> @wrap_path_handling <NEW_LINE> def Get(self, name): <NEW_LINE> <INDENT> thing = asrootpy(self.__class__.__bases__[-1].Get(self, name)) <NEW_LINE> if not thing: <NEW_LINE> <INDENT> raise DoesNotExist <NEW_LINE> <DEDENT> return thing <NEW_LINE> <DEDENT> @wrap_path_handling <NEW_LINE> def GetDirectory(self, name): <NEW_LINE> <INDENT> dir = asrootpy(self.__class__.__bases__[-1].GetDirectory(self, name)) <NEW_LINE> if not dir: <NEW_LINE> <INDENT> raise DoesNotExist <NEW_LINE> <DEDENT> return dir
A mixin (can't stand alone). To be improved.
6259906597e22403b383c65c
class Histogram(object): <NEW_LINE> <INDENT> def __init__(self, data, scale=20, formatter=None): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.scale = scale <NEW_LINE> self.formatter = formatter or str <NEW_LINE> self.max_key_len = max([len(str(key)) for key, count in self.data]) <NEW_LINE> self.total = sum([count for key, count in self.data]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def FromCountDict(count_dict, scale=20, formatter=None, key_names=None): <NEW_LINE> <INDENT> namer = None <NEW_LINE> if key_names: <NEW_LINE> <INDENT> namer = lambda key: key_names[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> namer = lambda key: key <NEW_LINE> <DEDENT> hist = [(namer(key), count) for key, count in count_dict.items()] <NEW_LINE> return Histogram(hist, scale, formatter) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def FromKeyList(key_list, scale=20, formatter=None, key_names=None): <NEW_LINE> <INDENT> count_dict = defaultdict(int) <NEW_LINE> for key in key_list: <NEW_LINE> <INDENT> count_dict[key] += 1 <NEW_LINE> <DEDENT> return Histogram.FromCountDict(count_dict, scale, formatter, key_names) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> hist_lines = [] <NEW_LINE> hist_bar = '|' <NEW_LINE> for key, count in self.data: <NEW_LINE> <INDENT> if self.total: <NEW_LINE> <INDENT> bar_len = count * self.scale // self.total <NEW_LINE> hist_bar = '|%s|' % ('#' * bar_len).ljust(self.scale) <NEW_LINE> <DEDENT> line = '%s %s %s' % ( str(key).ljust(self.max_key_len), hist_bar, self.formatter(count)) <NEW_LINE> percent_str = format_utils.NumToPercent(count, self.total) <NEW_LINE> if percent_str: <NEW_LINE> <INDENT> line += ' (%s)' % percent_str <NEW_LINE> <DEDENT> hist_lines.append(line) <NEW_LINE> <DEDENT> return '\n'.join(hist_lines) <NEW_LINE> <DEDENT> def GetKeys(self): <NEW_LINE> <INDENT> return [key for key, _ in self.data]
A histogram generating object. This object serves the sole purpose of formatting (key, val) pairs as an ASCII histogram, including bars and percentage markers, and taking care of label alignment, scaling, etc. In addition to the standard __init__ interface, two static methods are provided for conveniently converting data in different formats into a histogram. Histogram generation is exported via its __str__ method, and looks as follows: Yes |################ | 5 (83.3%) No |### | 1 (16.6%) TODO(garnold) we may want to add actual methods for adding data or tweaking the output layout and formatting. For now, though, this is fine.
62599065097d151d1a2c27bb
class TCPServer(object): <NEW_LINE> <INDENT> is_workflow = True <NEW_LINE> def __init__(self, network_endpoint): <NEW_LINE> <INDENT> self.network_endpoint = network_endpoint <NEW_LINE> self.established_connections = set() <NEW_LINE> <DEDENT> def wait_for_syns(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> source, message = self.poll_interface() <NEW_LINE> if message is not None and message is "SYN": <NEW_LINE> <INDENT> self.send_syn_ack(source) <NEW_LINE> self.wait_for_ack() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wait_for_ack(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> source_address, message = self.poll_interface() <NEW_LINE> if message is "ACK": <NEW_LINE> <INDENT> self.established_connections.add(source_address) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @default_cost(1) <NEW_LINE> def poll_interface(self): <NEW_LINE> <INDENT> return self.network_endpoint.read_message() <NEW_LINE> <DEDENT> @default_cost(1) <NEW_LINE> def send_syn_ack(self, source): <NEW_LINE> <INDENT> self.network_endpoint.send_message(source, "SYN_ACK")
Models the tasks and state fof a TCP server.
625990654f88993c371f10c6
class EMM_STATUS(Layer3NASEMM): <NEW_LINE> <INDENT> constructorList = [ie for ie in EMMHeader(Type=96)] + [ Int('EMMCause', Pt=0, Type='uint8', Dict=EMMCause_dict)]
Net <-> UE Local
62599065462c4b4f79dbd155
class PointerType(ScalarType): <NEW_LINE> <INDENT> def __init__(self, datatype, label_get): <NEW_LINE> <INDENT> super().__init__('pointer', constant=None) <NEW_LINE> self.datatype = datatype <NEW_LINE> self.label_get = label_get <NEW_LINE> <DEDENT> def from_data(self, rom, offset, project, context, parents): <NEW_LINE> <INDENT> data_offset = super().from_data(rom, offset, project, context, parents) <NEW_LINE> if data_offset is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> datatype = project.model[self.datatype] <NEW_LINE> return datatype.from_data(rom, data_offset, project, context, parents) <NEW_LINE> <DEDENT> def to_assembly(self, data, project, context, parents, label=None, alignment=None, global_label=None): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> return super().to_assembly(data, project, context, parents, label=label, alignment=alignment, global_label=global_label) <NEW_LINE> <DEDENT> data_label, data_alignment, data_global_label = self.label_get(project, context, parents) <NEW_LINE> assembly, additional_blocks = super().to_assembly(data_label, project, context, parents, label=label, alignment=alignment, global_label=global_label) <NEW_LINE> datatype = project.model[self.datatype] <NEW_LINE> data_assembly, data_additional_blocks = datatype.to_assembly(data, project, context, parents, label=data_label, alignment=data_alignment, global_label=data_global_label) <NEW_LINE> additional_blocks.append(data_assembly) <NEW_LINE> additional_blocks += data_additional_blocks <NEW_LINE> return assembly, additional_blocks <NEW_LINE> <DEDENT> def __call__(self, project, context, parents): <NEW_LINE> <INDENT> datatype = project.model[self.datatype] <NEW_LINE> return datatype(project, context, parents) <NEW_LINE> <DEDENT> def get_constants(self, data, project, context, parents): <NEW_LINE> <INDENT> datatype = project.model[self.datatype] <NEW_LINE> if data is None: <NEW_LINE> <INDENT> return set() <NEW_LINE> <DEDENT> return datatype.get_constants(data, project, context, parents)
Class to models pointers.
62599065f548e778e596ccd9
class LocationEvent(BaseEvent): <NEW_LINE> <INDENT> event = "location" <NEW_LINE> latitude = FloatField("Latitude", 0.0) <NEW_LINE> longitude = FloatField("Longitude", 0.0) <NEW_LINE> precision = FloatField("Precision", 0.0)
上报地理位置事件 详情请参阅 https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_event_pushes.html
62599065baa26c4b54d509f4
class DistillKL(nn.Module): <NEW_LINE> <INDENT> def __init__(self, T): <NEW_LINE> <INDENT> super(DistillKL, self).__init__() <NEW_LINE> self.T = T <NEW_LINE> <DEDENT> def forward(self, y_s, y_t): <NEW_LINE> <INDENT> p_s = F.log_softmax(y_s/self.T, dim=1) <NEW_LINE> p_t = F.softmax(y_t/self.T, dim=1) <NEW_LINE> loss = F.kl_div(p_s, p_t, size_average=False) * (self.T**2) / y_s.shape[0] <NEW_LINE> return loss
KL divergence for distillation
62599065d486a94d0ba2d718
@skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> @skipIf(not HAS_PYVMOMI, 'The \'pyvmomi\' library is missing') <NEW_LINE> class GetPropertiesOfManagedObjectTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> patches = ( ('salt.utils.vmware.get_service_instance_from_managed_object', MagicMock()), ('salt.utils.vmware.get_mors_with_properties', MagicMock(return_value=[MagicMock()])) ) <NEW_LINE> for mod, mock in patches: <NEW_LINE> <INDENT> patcher = patch(mod, mock) <NEW_LINE> patcher.start() <NEW_LINE> self.addCleanup(patcher.stop) <NEW_LINE> <DEDENT> self.mock_si = MagicMock() <NEW_LINE> self.fake_mo_ref = vim.ManagedEntity('Fake') <NEW_LINE> self.mock_props = MagicMock() <NEW_LINE> self.mock_item_name = {'name': 'fake_name'} <NEW_LINE> self.mock_item = MagicMock() <NEW_LINE> <DEDENT> def test_get_service_instance_from_managed_object_call(self): <NEW_LINE> <INDENT> mock_get_instance_from_managed_object = MagicMock() <NEW_LINE> with patch( 'salt.utils.vmware.get_service_instance_from_managed_object', mock_get_instance_from_managed_object): <NEW_LINE> <INDENT> salt.utils.vmware.get_properties_of_managed_object( self.fake_mo_ref, self.mock_props) <NEW_LINE> <DEDENT> mock_get_instance_from_managed_object.assert_called_once_with( self.fake_mo_ref) <NEW_LINE> <DEDENT> def test_get_mors_with_properties_calls(self): <NEW_LINE> <INDENT> mock_get_mors_with_properties = MagicMock(return_value=[MagicMock()]) <NEW_LINE> with patch( 'salt.utils.vmware.get_service_instance_from_managed_object', MagicMock(return_value=self.mock_si)): <NEW_LINE> <INDENT> with patch('salt.utils.vmware.get_mors_with_properties', mock_get_mors_with_properties): <NEW_LINE> <INDENT> salt.utils.vmware.get_properties_of_managed_object( self.fake_mo_ref, self.mock_props) <NEW_LINE> <DEDENT> <DEDENT> mock_get_mors_with_properties.assert_has_calls( [call(self.mock_si, vim.ManagedEntity, container_ref=self.fake_mo_ref, property_list=['name'], local_properties=True), call(self.mock_si, vim.ManagedEntity, container_ref=self.fake_mo_ref, property_list=self.mock_props, local_properties=True)]) <NEW_LINE> <DEDENT> def test_managed_object_no_name_property(self): <NEW_LINE> <INDENT> with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(side_effect=[vmodl.query.InvalidProperty(), []])): <NEW_LINE> <INDENT> with self.assertRaises(VMwareApiError) as excinfo: <NEW_LINE> <INDENT> salt.utils.vmware.get_properties_of_managed_object( self.fake_mo_ref, self.mock_props) <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual('Properties of managed object \'<unnamed>\' weren\'t ' 'retrieved', excinfo.exception.strerror) <NEW_LINE> <DEDENT> def test_no_items_named_object(self): <NEW_LINE> <INDENT> with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(side_effect=[[self.mock_item_name], []])): <NEW_LINE> <INDENT> with self.assertRaises(VMwareApiError) as excinfo: <NEW_LINE> <INDENT> salt.utils.vmware.get_properties_of_managed_object( self.fake_mo_ref, self.mock_props) <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual('Properties of managed object \'fake_name\' weren\'t ' 'retrieved', excinfo.exception.strerror)
Tests for salt.utils.get_properties_of_managed_object
6259906545492302aabfdc2c
class AlphabeticalIndex(Report): <NEW_LINE> <INDENT> def __init__(self, database, options, user): <NEW_LINE> <INDENT> Report.__init__(self, database, options, user) <NEW_LINE> self._user = user <NEW_LINE> menu = options.menu <NEW_LINE> <DEDENT> def write_report(self): <NEW_LINE> <INDENT> self.doc.insert_index()
This report class generates an alphabetical index for a book.
6259906571ff763f4b5e8ef7
class _Col(object): <NEW_LINE> <INDENT> def __init__(self, data=None): <NEW_LINE> <INDENT> if data == None: <NEW_LINE> <INDENT> self.__data = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__data = data <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.__data[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, val): <NEW_LINE> <INDENT> self.__data[key] = val
Please don't use this.
62599065379a373c97d9a76e
class FieldOrientedDrive(DriveInterface): <NEW_LINE> <INDENT> def __init__(self, interface, ahrs, offset=0.0): <NEW_LINE> <INDENT> self.interface = interface <NEW_LINE> self.ahrs = ahrs <NEW_LINE> self.origin = 0.0 <NEW_LINE> self.offset = offset <NEW_LINE> <DEDENT> def zero(self): <NEW_LINE> <INDENT> self.origin = self._getYawRadians() <NEW_LINE> <DEDENT> def drive(self, magnitude, direction, turn): <NEW_LINE> <INDENT> direction -= self.getRobotOffset() <NEW_LINE> return self.interface.drive(magnitude, direction, turn) <NEW_LINE> <DEDENT> def _getYawRadians(self): <NEW_LINE> <INDENT> return - math.radians(self.ahrs.getAngle()) <NEW_LINE> <DEDENT> def getRobotOffset(self): <NEW_LINE> <INDENT> return self._getYawRadians() - self.origin - self.offset
Wraps another drive interface, and provides field orientation.
625990657c178a314d78e794
class BlameRepoState(Persistable): <NEW_LINE> <INDENT> TEMPLATE = {'sha_date_author': lambda: {}}
Repository level persisted data structures Currently this is just sha_date_author.
625990654428ac0f6e659c82
class Garlicoin(Bitcoin): <NEW_LINE> <INDENT> name = 'garlicoin' <NEW_LINE> symbols = ('GRLC', ) <NEW_LINE> seeds = ('dnsseed.brennanmcdonald.io', 'dnsseed.rshaw.space', ) <NEW_LINE> port = 42069 <NEW_LINE> message_start = b'\xd2\xc6\xb6\xdb' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 38, 'SCRIPT_ADDR': 5, 'SECRET_KEY': 176 }
Class with all the necessary Garlicoin (GRLC) network information based on https://github.com/GarlicoinOrg/Garlicoin/blob/master/src/chainparams.cpp (date of access: 02/16/2018)
6259906555399d3f05627c71
class TestTumDataloader(object): <NEW_LINE> <INDENT> path = "/home/akashsharma/Documents/datasets/tum/rgbd_dataset_freiburg1_xyz/" <NEW_LINE> sequence = "1" <NEW_LINE> def test_init(self): <NEW_LINE> <INDENT> with pytest.raises(AssertionError): <NEW_LINE> <INDENT> tum = loader.TumDataloader("/home/akashsharma/Documents/datasets/tum/rgbd_dataset_freiburg1_xyz/", "0") <NEW_LINE> <DEDENT> with pytest.raises(AssertionError): <NEW_LINE> <INDENT> tum = loader.TumDataloader("SomeRandomPath/path", "1") <NEW_LINE> <DEDENT> tum = loader.TumDataloader(TestTumDataloader.path, TestTumDataloader.sequence) <NEW_LINE> assert tum.sequence == 1 <NEW_LINE> assert len(tum._matches) <= len(tum._readFileList(tum.path / "rgb.txt")) <NEW_LINE> <DEDENT> def test_len(self): <NEW_LINE> <INDENT> tum = loader.TumDataloader(TestTumDataloader.path, TestTumDataloader.sequence) <NEW_LINE> assert len(tum) == len(tum._matches) <NEW_LINE> <DEDENT> def test_get_item(self): <NEW_LINE> <INDENT> tum = loader.TumDataloader(TestTumDataloader.path, TestTumDataloader.sequence) <NEW_LINE> for i in [0, len(tum)-1]: <NEW_LINE> <INDENT> data_dict = tum[i] <NEW_LINE> rgb = data_dict["rgb"] <NEW_LINE> depth = data_dict["depth"] <NEW_LINE> cam_params = data_dict["cam_params"] <NEW_LINE> assert rgb.dtype == np.uint8 <NEW_LINE> assert rgb.shape == (480, 640, 3) <NEW_LINE> assert depth.dtype == np.uint8 <NEW_LINE> assert depth.shape == (480, 640, 3) <NEW_LINE> cv2.imshow("test_get_item", rgb) <NEW_LINE> cv2.waitKey(100); <NEW_LINE> cv2.imshow("test_get_item", depth) <NEW_LINE> cv2.waitKey(100) <NEW_LINE> assert cam_params["intrinsic_matrix"].shape == (3, 3)
Docstring for TestTumDataloader.
625990650c0af96317c57907
class GroupManager(models.Manager): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> return GroupQuerySet(self.model, using=self._db) <NEW_LINE> <DEDENT> def active(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_queryset().active(*args, **kwargs) <NEW_LINE> <DEDENT> def inactive(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_queryset().inactive(*args, **kwargs)
Group model manager.
625990656e29344779b01da0
class TwoLayerNet: <NEW_LINE> <INDENT> def __init__(self, n_input, n_output, hidden_layer_size, reg): <NEW_LINE> <INDENT> self.fcl1 = FullyConnectedLayer(n_input, hidden_layer_size) <NEW_LINE> self.fcl2 = FullyConnectedLayer(hidden_layer_size, n_output) <NEW_LINE> self.relu = ReLULayer() <NEW_LINE> self.reg = reg <NEW_LINE> self.w1 = self.fcl1.params()['W'] <NEW_LINE> self.w2 = self.fcl2.params()['W'] <NEW_LINE> self.b1 = self.fcl1.params()['B'] <NEW_LINE> self.b2 = self.fcl1.params()['B'] <NEW_LINE> <DEDENT> def compute_loss_and_gradients(self, X, y): <NEW_LINE> <INDENT> [self.params()[param].grad.fill(0) for param in self.params().keys()] <NEW_LINE> hidden_res_forward = self.fcl1.forward(X) <NEW_LINE> hidden_res_forward = self.relu.forward(hidden_res_forward) <NEW_LINE> output = self.fcl2.forward(hidden_res_forward) <NEW_LINE> loss, dprediction = softmax_with_cross_entropy(output, y) <NEW_LINE> hidden_res_backward = self.fcl2.backward(dprediction) <NEW_LINE> hidden_res_backward = self.relu.backward(hidden_res_backward) <NEW_LINE> self.fcl1.backward(hidden_res_backward) <NEW_LINE> for param in self.params().values(): <NEW_LINE> <INDENT> reg_loss, reg_grad = l2_regularization(param.value, self.reg) <NEW_LINE> loss += reg_loss <NEW_LINE> param.grad += reg_grad <NEW_LINE> <DEDENT> return loss <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> pred = np.zeros(X.shape[0], np.int) <NEW_LINE> pred = np.argmax(softmax(self.fcl2.forward(self.relu.forward(self.fcl1.forward(X)))), 1) <NEW_LINE> return pred <NEW_LINE> <DEDENT> def params(self): <NEW_LINE> <INDENT> result = {"W1": self.w1, "W2": self.w2, "B1": self.b1, "B2": self.b2} <NEW_LINE> return result
Neural network with two fully connected layers
62599065442bda511e95d902
class RGetopt(RPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/trevorld/getopt" <NEW_LINE> url = "https://cloud.r-project.org/src/contrib/getopt_1.20.1.tar.gz" <NEW_LINE> list_url = "https://cloud.r-project.org/src/contrib/Archive/getopt" <NEW_LINE> version('1.20.3', sha256='531f5fdfdcd6b96a73df2b39928418de342160ac1b0043861e9ea844f9fbf57f') <NEW_LINE> version('1.20.2', sha256='3d6c12d32d6cd4b2909be626e570e158b3ed960e4739510e3a251e7f172de38e') <NEW_LINE> version('1.20.1', sha256='1522c35b13e8546979725a68b75e3bc9d156fb06569067472405f6b8591d8654')
Package designed to be used with Rscript to write "#!" shebang scripts that accept short and long flags/options. Many users will prefer using instead the packages optparse or argparse which add extra features like automatically generated help option and usage, support for default values, positional argument support, etc.
6259906545492302aabfdc2d
class ReadResult(object): <NEW_LINE> <INDENT> def __init__(self, value=None, vector_clock=None,): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.vector_clock = vector_clock <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.value = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.MAP: <NEW_LINE> <INDENT> self.vector_clock = {} <NEW_LINE> (_ktype1, _vtype2, _size0) = iprot.readMapBegin() <NEW_LINE> for _i4 in range(_size0): <NEW_LINE> <INDENT> _key5 = iprot.readI32() <NEW_LINE> _val6 = iprot.readI32() <NEW_LINE> self.vector_clock[_key5] = _val6 <NEW_LINE> <DEDENT> iprot.readMapEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('ReadResult') <NEW_LINE> if self.value is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('value', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.value.encode('utf-8') if sys.version_info[0] == 2 else self.value) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.vector_clock is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('vector_clock', TType.MAP, 2) <NEW_LINE> oprot.writeMapBegin(TType.I32, TType.I32, len(self.vector_clock)) <NEW_LINE> for kiter7, viter8 in self.vector_clock.items(): <NEW_LINE> <INDENT> oprot.writeI32(kiter7) <NEW_LINE> oprot.writeI32(viter8) <NEW_LINE> <DEDENT> oprot.writeMapEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - value - vector_clock
62599065d268445f2663a705
class NyRemoveGroupRoleEvent(object): <NEW_LINE> <INDENT> implements(INyRemoveGroupRoleEvent) <NEW_LINE> def __init__(self, context, group, roles): <NEW_LINE> <INDENT> super(NyRemoveGroupRoleEvent, self).__init__() <NEW_LINE> self.context, self.group, self.roles = context, group, roles
Group roles will be removed
6259906566673b3332c31b4e
class Tag(Base): <NEW_LINE> <INDENT> name = Column(String(64), unique = False, nullable = False) <NEW_LINE> opportunities = relationship('Opportunity', secondary = tags, back_populates = 'tags') <NEW_LINE> users_following = relationship('User', secondary = tags_following, back_populates = 'tags_following') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Tag ID: %d>' % self.id
Tag model to store a tag for an opportunity
62599065d7e4931a7ef3d72e
class Match(object): <NEW_LINE> <INDENT> def __init__(self, match_results): <NEW_LINE> <INDENT> if match_results.shape.ndims != 1: <NEW_LINE> <INDENT> raise ValueError('match_results should have rank 1') <NEW_LINE> <DEDENT> if match_results.dtype != tf.int32: <NEW_LINE> <INDENT> raise ValueError('match_results should be an int32 or int64 scalar ' 'tensor') <NEW_LINE> <DEDENT> self._match_results = match_results <NEW_LINE> <DEDENT> @property <NEW_LINE> def match_results(self): <NEW_LINE> <INDENT> return self._match_results <NEW_LINE> <DEDENT> def matched_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(tf.greater(self._match_results, -1))) <NEW_LINE> <DEDENT> def matched_column_indicator(self): <NEW_LINE> <INDENT> return tf.greater_equal(self._match_results, 0) <NEW_LINE> <DEDENT> def num_matched_columns(self): <NEW_LINE> <INDENT> return tf.size(input=self.matched_column_indices()) <NEW_LINE> <DEDENT> def unmatched_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(tf.equal(self._match_results, -1))) <NEW_LINE> <DEDENT> def unmatched_column_indicator(self): <NEW_LINE> <INDENT> return tf.equal(self._match_results, -1) <NEW_LINE> <DEDENT> def num_unmatched_columns(self): <NEW_LINE> <INDENT> return tf.size(input=self.unmatched_column_indices()) <NEW_LINE> <DEDENT> def ignored_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(self.ignored_column_indicator())) <NEW_LINE> <DEDENT> def ignored_column_indicator(self): <NEW_LINE> <INDENT> return tf.equal(self._match_results, -2) <NEW_LINE> <DEDENT> def num_ignored_columns(self): <NEW_LINE> <INDENT> return tf.size(input=self.ignored_column_indices()) <NEW_LINE> <DEDENT> def unmatched_or_ignored_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(tf.greater(0, self._match_results))) <NEW_LINE> <DEDENT> def matched_row_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast( tf.gather(self._match_results, self.matched_column_indices())) <NEW_LINE> <DEDENT> def _reshape_and_cast(self, t): <NEW_LINE> <INDENT> return tf.cast(tf.reshape(t, [-1]), tf.int32) <NEW_LINE> <DEDENT> def gather_based_on_match(self, input_tensor, unmatched_value, ignored_value): <NEW_LINE> <INDENT> input_tensor = tf.concat([tf.stack([ignored_value, unmatched_value]), input_tensor], axis=0) <NEW_LINE> gather_indices = tf.maximum(self.match_results + 2, 0) <NEW_LINE> gathered_tensor = tf.gather(input_tensor, gather_indices) <NEW_LINE> return gathered_tensor
Class to store results from the matcher. This class is used to store the results from the matcher. It provides convenient methods to query the matching results.
62599065cb5e8a47e493cd2d
class CsvContainer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._name = '' <NEW_LINE> self._telephone = None <NEW_LINE> reload(sys) <NEW_LINE> sys.setdefaultencoding("utf-8") <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, val): <NEW_LINE> <INDENT> self._name = str(val) <NEW_LINE> <DEDENT> @property <NEW_LINE> def telephone(self): <NEW_LINE> <INDENT> return self._telephone <NEW_LINE> <DEDENT> @telephone.setter <NEW_LINE> def telephone(self, val): <NEW_LINE> <INDENT> self._telephone = val
CSVの定形フォーマットで保持するクラス
625990653539df3088ecd9f0
class PkgStatus(BASE): <NEW_LINE> <INDENT> __tablename__ = 'pkg_status' <NEW_LINE> status = sa.Column(sa.String(50), primary_key=True) <NEW_LINE> def __init__(self, status): <NEW_LINE> <INDENT> self.status = status <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def all_txt(cls, session): <NEW_LINE> <INDENT> return [ item.status for item in session.query(cls).order_by(cls.status).all()]
Table storing the statuses a package can have.
6259906556b00c62f0fb401f
class GitPathTool(object): <NEW_LINE> <INDENT> _cwd = None <NEW_LINE> _root = None <NEW_LINE> @classmethod <NEW_LINE> def set_cwd(cls, cwd): <NEW_LINE> <INDENT> if isinstance(cwd, six.binary_type): <NEW_LINE> <INDENT> cwd = cwd.decode(sys.getdefaultencoding()) <NEW_LINE> <DEDENT> cls._cwd = cwd <NEW_LINE> cls._root = cls._git_root() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def relative_path(cls, git_diff_path): <NEW_LINE> <INDENT> root_rel_path = os.path.relpath(cls._cwd, cls._root) <NEW_LINE> rel_path = os.path.relpath(git_diff_path, root_rel_path) <NEW_LINE> return rel_path <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def absolute_path(cls, src_path): <NEW_LINE> <INDENT> return os.path.join(cls._root, src_path) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _git_root(cls): <NEW_LINE> <INDENT> command = ['git', 'rev-parse', '--show-toplevel', '--encoding=utf-8'] <NEW_LINE> git_root = execute(command)[0] <NEW_LINE> return git_root.split('\n')[0] if git_root else u''
Converts `git diff` paths to absolute paths or relative paths to cwd. This class should be used throughout the project to change paths from the paths yielded by `git diff` to correct project paths
625990655166f23b2e244b22
class _LSE(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def solve(self, coeficients, results): <NEW_LINE> <INDENT> ...
Interface for Least Square Estimation methods
625990652ae34c7f260ac839
class VolumeAttachment(BASE, CinderBase): <NEW_LINE> <INDENT> __tablename__ = 'volume_attachment' <NEW_LINE> id = sa.Column(sa.String(36), primary_key=True) <NEW_LINE> volume_id = sa.Column( sa.String(36), sa.ForeignKey('volumes.id'), nullable=False, index=True ) <NEW_LINE> volume = relationship( Volume, backref="volume_attachment", foreign_keys=volume_id, primaryjoin='and_(' 'VolumeAttachment.volume_id == Volume.id,' 'VolumeAttachment.deleted == False)', ) <NEW_LINE> instance_uuid = sa.Column(sa.String(36)) <NEW_LINE> attached_host = sa.Column(sa.String(255)) <NEW_LINE> mountpoint = sa.Column(sa.String(255)) <NEW_LINE> attach_time = sa.Column(sa.DateTime) <NEW_LINE> detach_time = sa.Column(sa.DateTime) <NEW_LINE> attach_status = sa.Column(sa.String(255)) <NEW_LINE> attach_mode = sa.Column(sa.String(36)) <NEW_LINE> connection_info = sa.Column(sa.Text) <NEW_LINE> connector = sa.Column(sa.Text) <NEW_LINE> @staticmethod <NEW_LINE> def delete_values(): <NEW_LINE> <INDENT> now = timeutils.utcnow() <NEW_LINE> return {'deleted': True, 'deleted_at': now, 'attach_status': 'detached', 'detach_time': now}
Represents a volume attachment for a vm.
625990652ae34c7f260ac83a
class CountingAI(AIMixin): <NEW_LINE> <INDENT> ...
Representation ============== Count number of pieces of all players in each megatile. Use the representation of the megatile the AI should play in linked to the number of pieces in the other megatiles as a key to learn. E.g. (this is probably an illegal board) | | | | | | | | -+-+- | -+-+- | -+-+- X| |X | |O| | |O| -+-+- | -+-+- | -+-+- |X| | |X| | |O| | | ------+-------+------ | | X| | | O|O|X | | | -+-+- | -+-+- | -+-+- | |X | |X|O | O| | -+-+- | -+-+- | -+-+- | | | O| | | | | | | ------+-------+------ |+-----+| | | || | | || | | -+-+- ||-+-+-|| -+-+- | | || |O| || | | -+-+- ||-+-+-|| -+-+- | |X ||X| | || |O| +-----+ ^ ^ ^ O's last move ^ X's next move Will be represented something like: | | | | 3 0|1 1|0 2 -----+-----+----- | | | O | 2 0|2 4|0 1 -----+-----+----- | | X | | 1 0|1 1|0 1 This will result in (10!) ** 9 * 3 ** 9 =about 10 ** 63 possibilities, which renders it quite impossible to use. Learning ======== during the game: If a board hasn't been encountered before, each of the nine cells will get a certain initial value. A weighted random choice is made between the cells and is saved to the history of this game (i.e. a board with a choice will be saved). If the game ends in a victory: all choices will be "rewarded" by increasing the value assigned to that choice. If the game end in a loss: all choices will be "punished" by decreasing the value assigned to that choice. If the game is a draw: I have no idea what works best yet.
62599065aad79263cf42ff0f
class StorageVolumeManager(BaseManager): <NEW_LINE> <INDENT> def __init__(self, storage_group): <NEW_LINE> <INDENT> query_props = [ 'name', 'fulfillment-state', 'maximum-size', 'minimum-size', 'usage', ] <NEW_LINE> super(StorageVolumeManager, self).__init__( resource_class=StorageVolume, class_name=RC_STORAGE_VOLUME, session=storage_group.manager.session, parent=storage_group, base_uri='{}/storage-volumes'.format(storage_group.uri), oid_prop='element-id', uri_prop='element-uri', name_prop='name', query_props=query_props) <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage_group(self): <NEW_LINE> <INDENT> return self._parent <NEW_LINE> <DEDENT> @logged_api_call <NEW_LINE> def list(self, full_properties=False, filter_args=None): <NEW_LINE> <INDENT> resource_obj_list = [] <NEW_LINE> resource_obj = self._try_optimized_lookup(filter_args) <NEW_LINE> if resource_obj: <NEW_LINE> <INDENT> resource_obj_list.append(resource_obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> query_parms, client_filters = divide_filter_args( self._query_props, filter_args) <NEW_LINE> resources_name = 'storage-volumes' <NEW_LINE> uri = '{}/{}{}'.format(self.storage_group.uri, resources_name, query_parms) <NEW_LINE> result = self.session.get(uri) <NEW_LINE> if result: <NEW_LINE> <INDENT> props_list = result[resources_name] <NEW_LINE> for props in props_list: <NEW_LINE> <INDENT> resource_obj = self.resource_class( manager=self, uri=props[self._uri_prop], name=props.get(self._name_prop, None), properties=props) <NEW_LINE> if matches_filters(resource_obj, client_filters): <NEW_LINE> <INDENT> resource_obj_list.append(resource_obj) <NEW_LINE> if full_properties: <NEW_LINE> <INDENT> resource_obj.pull_full_properties() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> self._name_uri_cache.update_from(resource_obj_list) <NEW_LINE> return resource_obj_list <NEW_LINE> <DEDENT> @logged_api_call <NEW_LINE> def create(self, properties, email_to_addresses=None, email_cc_addresses=None, email_insert=None): <NEW_LINE> <INDENT> volreq_obj = copy.deepcopy(properties) <NEW_LINE> volreq_obj['operation'] = 'create' <NEW_LINE> body = { 'storage-volumes': [volreq_obj], } <NEW_LINE> if email_to_addresses: <NEW_LINE> <INDENT> body['email-to-addresses'] = email_to_addresses <NEW_LINE> if email_cc_addresses: <NEW_LINE> <INDENT> body['email-cc-addresses'] = email_cc_addresses <NEW_LINE> <DEDENT> if email_insert: <NEW_LINE> <INDENT> body['email-insert'] = email_insert <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if email_cc_addresses: <NEW_LINE> <INDENT> raise ValueError("email_cc_addresses must not be specified if " "there is no email_to_addresses: %r" % email_cc_addresses) <NEW_LINE> <DEDENT> if email_insert: <NEW_LINE> <INDENT> raise ValueError("email_insert must not be specified if " "there is no email_to_addresses: %r" % email_insert) <NEW_LINE> <DEDENT> <DEDENT> result = self.session.post( self.storage_group.uri + '/operations/modify', body=body) <NEW_LINE> uri = result['element-uris'][0] <NEW_LINE> storage_volume = self.resource_object(uri, properties) <NEW_LINE> return storage_volume
Manager providing access to the :term:`storage volumes <storage volume>` in a particular :term:`storage group`. Derived from :class:`~zhmcclient.BaseManager`; see there for common methods and attributes. Objects of this class are not directly created by the user; they are accessible via the following instance variable of a :class:`~zhmcclient.StorageGroup` object: * :attr:`~zhmcclient.StorageGroup.storage_volumes`
625990657047854f46340b06
@six.add_metaclass(ABCMeta) <NEW_LINE> class Protocol(object): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def data_received(self, data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def connection_made(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def connection_lost(self, reason): <NEW_LINE> <INDENT> pass
Interface for various protocols. Protocol usually encloses a transport/connection/socket to peer/client/server and encodes and decodes communication/messages. Protocol can also maintain any related state machine, protocol message encoding or decoding utilities. This interface identifies minimum methods to support to facilitate or provide hooks to sub-classes to override behavior as appropriate.
62599065f548e778e596ccdc
class MetricDefinition(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'resource_uri': {'key': 'resourceUri', 'type': 'str'}, 'name': {'key': 'name', 'type': 'LocalizableString'}, 'category': {'key': 'category', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'primary_aggregation_type': {'key': 'primaryAggregationType', 'type': 'str'}, 'supported_aggregation_types': {'key': 'supportedAggregationTypes', 'type': '[str]'}, 'metric_availabilities': {'key': 'metricAvailabilities', 'type': '[MetricAvailability]'}, 'id': {'key': 'id', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, resource_id: Optional[str] = None, resource_uri: Optional[str] = None, name: Optional["LocalizableString"] = None, category: Optional[str] = None, unit: Optional[Union[str, "Unit"]] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, primary_aggregation_type: Optional[Union[str, "AggregationType"]] = None, supported_aggregation_types: Optional[List[Union[str, "AggregationType"]]] = None, metric_availabilities: Optional[List["MetricAvailability"]] = None, id: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(MetricDefinition, self).__init__(**kwargs) <NEW_LINE> self.resource_id = resource_id <NEW_LINE> self.resource_uri = resource_uri <NEW_LINE> self.name = name <NEW_LINE> self.category = category <NEW_LINE> self.unit = unit <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.primary_aggregation_type = primary_aggregation_type <NEW_LINE> self.supported_aggregation_types = supported_aggregation_types <NEW_LINE> self.metric_availabilities = metric_availabilities <NEW_LINE> self.id = id
Metric definition class specifies the metadata for a metric. :param resource_id: The resource identifier of the resource that emitted the metric. :type resource_id: str :param resource_uri: The resource identifier of the resource that emitted the metric. :type resource_uri: str :param name: the name and the display name of the metric, i.e. it is a localizable string. :type name: ~$(python-base-namespace).v2015_07_01.models.LocalizableString :param category: The category of this metric. :type category: str :param unit: the unit of the metric. Possible values include: "Count", "Bytes", "Seconds", "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds". :type unit: str or ~$(python-base-namespace).v2015_07_01.models.Unit :param start_time: Start time of the metadata request timespan. :type start_time: ~datetime.datetime :param end_time: End time of the metadata request timespan. :type end_time: ~datetime.datetime :param primary_aggregation_type: the primary aggregation type value defining how to use the values for display. Possible values include: "None", "Average", "Count", "Minimum", "Maximum", "Total". :type primary_aggregation_type: str or ~$(python-base-namespace).v2015_07_01.models.AggregationType :param supported_aggregation_types: List of all aggregations that are applicable for this metric. :type supported_aggregation_types: list[str or ~$(python-base-namespace).v2015_07_01.models.AggregationType] :param metric_availabilities: the collection of what aggregation intervals are available to be queried. :type metric_availabilities: list[~$(python-base-namespace).v2015_07_01.models.MetricAvailability] :param id: the resource identifier of the metric definition. :type id: str
625990657c178a314d78e795
class Guild: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> async def convert(cls, ctx, arg): <NEW_LINE> <INDENT> guild = None <NEW_LINE> if arg.isdigit(): <NEW_LINE> <INDENT> guild = ctx.bot.get_guild(int(arg)) <NEW_LINE> <DEDENT> if not guild: <NEW_LINE> <INDENT> guild = ctx.bot.find_guild(arg) <NEW_LINE> <DEDENT> return guild
Convert Guild object by ID or name. Returns -------- :class:`discord.Guild`
62599065fff4ab517ebcef6e
class QueryInputSet(InputSet): <NEW_LINE> <INDENT> def set_Components(self, value): <NEW_LINE> <INDENT> super(QueryInputSet, self)._set_input('Components', value) <NEW_LINE> <DEDENT> def set_SetID(self, value): <NEW_LINE> <INDENT> super(QueryInputSet, self)._set_input('SetID', value)
An InputSet with methods appropriate for specifying the inputs to the Query Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259906532920d7e50bc7799
class C51ClassicControlPreset(Preset): <NEW_LINE> <INDENT> def __init__(self, env, name, device, **hyperparameters): <NEW_LINE> <INDENT> super().__init__(name, device, hyperparameters) <NEW_LINE> self.model = hyperparameters['model_constructor'](env, atoms=hyperparameters['atoms']).to(device) <NEW_LINE> self.n_actions = env.action_space.n <NEW_LINE> <DEDENT> def agent(self, writer=DummyWriter(), train_steps=float('inf')): <NEW_LINE> <INDENT> optimizer = Adam(self.model.parameters(), lr=self.hyperparameters['lr']) <NEW_LINE> q = QDist( self.model, optimizer, self.n_actions, self.hyperparameters['atoms'], v_min=self.hyperparameters['v_min'], v_max=self.hyperparameters['v_max'], target=FixedTarget(self.hyperparameters['target_update_frequency']), writer=writer, ) <NEW_LINE> replay_buffer = ExperienceReplayBuffer( self.hyperparameters['replay_buffer_size'], device=self.device ) <NEW_LINE> return C51( q, replay_buffer, exploration=LinearScheduler( self.hyperparameters['initial_exploration'], self.hyperparameters['final_exploration'], 0, self.hyperparameters["final_exploration_step"] - self.hyperparameters["replay_start_size"], name="epsilon", writer=writer, ), discount_factor=self.hyperparameters["discount_factor"], minibatch_size=self.hyperparameters["minibatch_size"], replay_start_size=self.hyperparameters["replay_start_size"], update_frequency=self.hyperparameters["update_frequency"], writer=writer ) <NEW_LINE> <DEDENT> def test_agent(self): <NEW_LINE> <INDENT> q_dist = QDist( copy.deepcopy(self.model), None, self.n_actions, self.hyperparameters['atoms'], v_min=self.hyperparameters['v_min'], v_max=self.hyperparameters['v_max'], ) <NEW_LINE> return C51TestAgent(q_dist, self.n_actions, self.hyperparameters["test_exploration"])
Categorical DQN (C51) Atari preset. Args: env (all.environments.AtariEnvironment): The environment for which to construct the agent. name (str): A human-readable name for the preset. device (torch.device): The device on which to load the agent. Keyword Args: discount_factor (float): Discount factor for future rewards. lr (float): Learning rate for the Adam optimizer. minibatch_size (int): Number of experiences to sample in each training update. update_frequency (int): Number of timesteps per training update. target_update_frequency (int): Number of timesteps between updates the target network. replay_start_size (int): Number of experiences in replay buffer when training begins. replay_buffer_size (int): Maximum number of experiences to store in the replay buffer. initial_exploration (float): Initial probability of choosing a random action, decayed over course of training. final_exploration (float): Final probability of choosing a random action. final_exploration_step (int): The step at which exploration decay is finished test_exploration (float): The exploration rate of the test Agent atoms (int): The number of atoms in the categorical distribution used to represent the distributional value function. v_min (int): The expected return corresponding to the smallest atom. v_max (int): The expected return correspodning to the larget atom. model_constructor (function): The function used to construct the neural model.
62599065009cb60464d02c8b
class WHavenSpider: <NEW_LINE> <INDENT> def __init__(self,topic_url): <NEW_LINE> <INDENT> self.topic_url = topic_url <NEW_LINE> <DEDENT> def getHtml(self,url): <NEW_LINE> <INDENT> req = requests.get(url) <NEW_LINE> html = req.content <NEW_LINE> return html <NEW_LINE> <DEDENT> def selectPartlist(self,html): <NEW_LINE> <INDENT> soup = BeautifulSoup(html,'lxml') <NEW_LINE> part_list = soup.find('section',class_='thumb-listing-page').find("ul").find_all('a',class_='preview') <NEW_LINE> return part_list <NEW_LINE> <DEDENT> def selectImg(self,img_html): <NEW_LINE> <INDENT> img_soup = BeautifulSoup(img_html,'lxml') <NEW_LINE> img = img_soup.find('main').find('section',id='showcase').find("img")['src'] <NEW_LINE> img_url = 'https:' + img <NEW_LINE> return img_url <NEW_LINE> <DEDENT> def download(self,name,pic_html): <NEW_LINE> <INDENT> f = open("/home/lmile/Documents/DotaImg/"+name,'wb') <NEW_LINE> f.write(pic_html) <NEW_LINE> f.close <NEW_LINE> <DEDENT> def work(self): <NEW_LINE> <INDENT> topic_html = self.getHtml(self.topic_url) <NEW_LINE> for url in self.getTopic(topic_html): <NEW_LINE> <INDENT> html = self.getHtml(url) <NEW_LINE> part_list = self.selectPartlist(html) <NEW_LINE> for part in part_list: <NEW_LINE> <INDENT> page_url = part['href'] <NEW_LINE> page_html = self.getHtml(page_url) <NEW_LINE> img_url = self.selectImg(page_html) <NEW_LINE> name = img_url[-10:] <NEW_LINE> img_html = self.getHtml(img_url) <NEW_LINE> self.download(name,img_html) <NEW_LINE> print("==========%s图片下载完毕=========="%name) <NEW_LINE> <DEDENT> print("==========第%s页下载完毕=========="%url.rpartition('=')[-1]) <NEW_LINE> <DEDENT> <DEDENT> def getTopic(self,topic_html): <NEW_LINE> <INDENT> topic_soup = BeautifulSoup(topic_html,'lxml') <NEW_LINE> num_generator = topic_soup.h2.strings <NEW_LINE> num_list = list(num_generator) <NEW_LINE> num = num_list[-1].split(' ')[-1] <NEW_LINE> for i in range(1,int(num)): <NEW_LINE> <INDENT> topic_url = self.topic_url + 'page=' + str(i) <NEW_LINE> yield topic_url
用于爬取WallHaven高清壁纸
625990650a50d4780f706969
class LyricMode(InputMode): <NEW_LINE> <INDENT> pass
A \lyricmode, \lyrics or \addlyrics expression.
62599065097d151d1a2c27bf
class AuthenticationError(Exception): <NEW_LINE> <INDENT> pass
Error authenticating
625990658da39b475be0493c
class AttributesEqual(Validator): <NEW_LINE> <INDENT> message = u"{a} and {b} must be equal." <NEW_LINE> def __init__(self, a, b): <NEW_LINE> <INDENT> self.a = a <NEW_LINE> self.b = b <NEW_LINE> <DEDENT> def validate(self, element, context): <NEW_LINE> <INDENT> if (not self.is_unusable(element) and getattr(element, self.a[1]).value == getattr(element, self.b[1]).value ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self.note_error( element, self.message, substitutions={"a": self.a[0], "b": self.b[0]} ) <NEW_LINE> return False
Validator that fails with :attr:`message` if two attributes of the value are unequal. Similar to :class:`ItemsEqual` the attributes are defined with the tuples `a` and `b` each of which consists of two element in the form ``(label, attribute_name)``. `attribute_name` is used to determine the attributes to compare and the `label` is used for substitution in the message.
625990654e4d562566373b5a
class Context(object): <NEW_LINE> <INDENT> def __init__(self, handle_error): <NEW_LINE> <INDENT> print('__init__({})'.format(handle_error)) <NEW_LINE> self.handle_error = handle_error <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> print('__enter__()') <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> print('__exit__({}, {}, {})'.format(exc_type, exc_val, exc_tb)) <NEW_LINE> if exc_type == ZeroDivisionError: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
from Doug Hellmann, PyMOTW http://pymotw.com/2/contextlib/#module-contextlib
62599065a219f33f346c7f5a
class InputReader(TokenReader): <NEW_LINE> <INDENT> def next_token(self): <NEW_LINE> <INDENT> return input()
Token reader that reads tokens from standard input.
6259906501c39578d7f142de
class InvalidMerchant(ValueError): <NEW_LINE> <INDENT> pass
Provided order belongs to a different merchant!
625990651f5feb6acb16433f
class ExtraUploadForm(UploadForm): <NEW_LINE> <INDENT> author_name = forms.CharField( label=_('Author name'), required=False, help_text=_('Leave empty for using currently logged in user.') ) <NEW_LINE> author_email = forms.EmailField( label=_('Author email'), required=False, help_text=_('Leave empty for using currently logged in user.') )
Advanced upload form for users who can override authorship.
62599065f548e778e596ccdd
class pg_functions(functions): <NEW_LINE> <INDENT> class svg(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class kml(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class gml(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class geojson(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class expand(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class simplify(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class estimated_extent(BaseFunction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _within_distance(compiler, geom1, geom2, distance, *args): <NEW_LINE> <INDENT> return and_(func.ST_Expand(geom2, distance).op('&&')(geom1), func.ST_Expand(geom1, distance).op('&&')(geom2), func.ST_Distance(geom1, geom2) <= distance)
Functions only supported by PostGIS
625990652c8b7c6e89bd4f42
@command_lib.CommandRegexParser(r's ' r'/?(?:"(.+?)"|([\S]+))' r'/(?:"(.*?)"|([\S]*))' r'(?:/(.*?))?' r' ([\s\S]+?)') <NEW_LINE> class SubCommand(command_lib.BaseCommand): <NEW_LINE> <INDENT> @command_lib.MainChannelOnly <NEW_LINE> @command_lib.LimitPublicLines() <NEW_LINE> def _Handle(self, channel: Channel, user: str, multi_word_search: str, single_word_search: str, multi_word_replace: str, single_word_replace: str, options: str, message: str) -> hypecore.MessageType: <NEW_LINE> <INDENT> search_str = multi_word_search or single_word_search <NEW_LINE> replace_str = multi_word_replace or single_word_replace or '' <NEW_LINE> haystack = message.split('\n') <NEW_LINE> replies = [] <NEW_LINE> flags = 0 <NEW_LINE> if options and 'i' in options.lower(): <NEW_LINE> <INDENT> flags = re.IGNORECASE <NEW_LINE> <DEDENT> for stalk in haystack: <NEW_LINE> <INDENT> replies.append(re.sub(search_str, replace_str, stalk, flags=flags)) <NEW_LINE> <DEDENT> return replies
Substitute lines according to a pattern.
625990654a966d76dd5f0649
class ReceiveSignal(QtCore.QObject): <NEW_LINE> <INDENT> sig_mqtt_message = QtCore.pyqtSignal(mqtt.MQTTMessage)
Why a whole new class? See here: https://stackoverflow.com/a/25930966/2441026
62599065baa26c4b54d509f8
class ActionList(Receiver): <NEW_LINE> <INDENT> def __init__(self, actions=None): <NEW_LINE> <INDENT> if isinstance(actions, (str, dict)): <NEW_LINE> <INDENT> actions = [actions] <NEW_LINE> <DEDENT> self.actions = tuple(Action.make(a) for a in actions or ()) <NEW_LINE> <DEDENT> def set_project(self, project): <NEW_LINE> <INDENT> for a in self.actions: <NEW_LINE> <INDENT> a.set_project(project) <NEW_LINE> <DEDENT> <DEDENT> def receive(self, msg): <NEW_LINE> <INDENT> values = tuple(msg.values()) <NEW_LINE> for action in self.actions: <NEW_LINE> <INDENT> action.receive(values) <NEW_LINE> <DEDENT> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self.actions) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ' + '.join(str(a) for a in self.actions)
A list of Actions.
6259906576e4537e8c3f0cd6
class TvShow(Video): <NEW_LINE> <INDENT> VALID_RATINGS = ["TV-Y", "TV-Y7", "TV-G", "TV-PG", "TV-14", "TV-MA"] <NEW_LINE> def __init__(self, details): <NEW_LINE> <INDENT> Video.__init__(self, details) <NEW_LINE> self.seasons = details['seasons'] <NEW_LINE> self.network_url = details['network_url'] <NEW_LINE> self.rating = Video.get_rating(self, TvShow.VALID_RATINGS, details['rating'])
Holds and provides information on a Television show.
62599065d7e4931a7ef3d72f
class InitFromCheckpointHook(tf.estimator.SessionRunHook): <NEW_LINE> <INDENT> def __init__(self, model_dir, ckpt_to_init_from, vars_to_restore_fn=None): <NEW_LINE> <INDENT> self._ckpt = None if tf.train.latest_checkpoint( model_dir) else ckpt_to_init_from <NEW_LINE> self._vars_to_restore_fn = vars_to_restore_fn <NEW_LINE> <DEDENT> def begin(self): <NEW_LINE> <INDENT> if not self._ckpt: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> logging.info('%s will be used for initialization.', self._ckpt) <NEW_LINE> self._reset_step = None <NEW_LINE> if tf.train.get_global_step() is not None: <NEW_LINE> <INDENT> self._reset_step = tf.train.get_global_step().assign(0) <NEW_LINE> <DEDENT> if not self._vars_to_restore_fn: <NEW_LINE> <INDENT> logging.info( 'All variables will be initialized form the checkpoint.') <NEW_LINE> self._saver = tf.get_collection(tf.GraphKeys.SAVERS)[0] <NEW_LINE> return <NEW_LINE> <DEDENT> vars_to_restore = self._vars_to_restore_fn() <NEW_LINE> restored_vars_string = ( 'The following variables are to be initialized from the checkpoint:\n') <NEW_LINE> for ckpt_name in sorted(vars_to_restore): <NEW_LINE> <INDENT> restored_vars_string += '%s --> %s\n' % ( ckpt_name, vars_to_restore[ckpt_name].op.name) <NEW_LINE> <DEDENT> logging.info(restored_vars_string) <NEW_LINE> self._saver = tf.train.Saver(vars_to_restore) <NEW_LINE> <DEDENT> def after_create_session(self, session, coord): <NEW_LINE> <INDENT> del coord <NEW_LINE> if not self._ckpt: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._saver.restore(session, self._ckpt) <NEW_LINE> self._saver.restore(session, self._ckpt) <NEW_LINE> if self._reset_step is not None: <NEW_LINE> <INDENT> session.run(self._reset_step)
A hook for initializing training from a checkpoint. Although the Estimator framework supports initialization from a checkpoint via https://www.tensorflow.org/api_docs/python/tf/estimator/WarmStartSettings, the only way to build mapping between the variables and the checkpoint names is via providing a regex. This class provides the same functionality, but the mapping can be built by a callback, which provides more flexibility and readability.
625990657b25080760ed888b
class AvrStaticInfoMessage(AvrMessage): <NEW_LINE> <INDENT> def __init__(self, state): <NEW_LINE> <INDENT> super().__init__('static_info', {}, state)
An AvrMessage containing the static avr information
62599065627d3e7fe0e085de
class SpatialAnalysisOperationFocus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> CENTER = "center" <NEW_LINE> BOTTOM_CENTER = "bottomCenter" <NEW_LINE> FOOTPRINT = "footprint"
The operation focus type.
625990657c178a314d78e796
class UserCustomAction(BaseEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def client_side_component_id(self): <NEW_LINE> <INDENT> return self.properties.get("ClientSideComponentId", None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def script_block(self): <NEW_LINE> <INDENT> return self.properties.get("ScriptBlock", None) <NEW_LINE> <DEDENT> @script_block.setter <NEW_LINE> def script_block(self, value): <NEW_LINE> <INDENT> self.set_property("ScriptBlock", value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def script_src(self): <NEW_LINE> <INDENT> return self.properties.get("ScriptSrc", None) <NEW_LINE> <DEDENT> @script_src.setter <NEW_LINE> def script_src(self, value): <NEW_LINE> <INDENT> self.set_property("ScriptSrc", value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self.properties.get("Url", None)
Represents a custom action associated with a SharePoint list, Web site, or subsite.
62599065fff4ab517ebcef70
class CustomerFollowUp(models.Model): <NEW_LINE> <INDENT> customer = models.ForeignKey("Customer", on_delete=models.CASCADE) <NEW_LINE> content = models.TextField(verbose_name="跟进内容") <NEW_LINE> consultant = models.ForeignKey("UserProfile", on_delete=models.CASCADE) <NEW_LINE> intention_choices = ((0, '2周内报名'), (1, '1个月内报名'), (2, '近期无报名计划'), (3, '已在其它机构报名'), (4, '已报名'), (5, '已拉黑'),) <NEW_LINE> intention = models.SmallIntegerField(choices=intention_choices) <NEW_LINE> date = models.DateTimeField(auto_now_add=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s" %self.customer.qq <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural="客户跟进表"
客户跟进表
625990656e29344779b01da4
class BoreholeForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Borehole <NEW_LINE> fields = [ "borehole_reference", "borehole_northing", "borehole_easting", "ground_level", "drilling_equipment", "borehole_diameter" ]
Form for creating and updating borehole details.
625990658e71fb1e983bd21a
class Colorstr(str): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self._msg = str(msg) <NEW_LINE> super().__init__() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_built_in_type(o: object): <NEW_LINE> <INDENT> return Colorstr(print(o, raw=True)) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return Colorstr(str(self) + str(other)) <NEW_LINE> <DEDENT> def __radd__(self, other): <NEW_LINE> <INDENT> return Colorstr(str(other) + str(self)) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> return Colorstr(super().__mul__(other)) <NEW_LINE> <DEDENT> def __rmul__(self, other): <NEW_LINE> <INDENT> return self.__mul__(other)
带颜色的字符串类
62599065442bda511e95d904
class SkipTestException(TestException): <NEW_LINE> <INDENT> pass
Exception class for reporting skipped test cases.
62599065009cb60464d02c8d
class UpdateCommentV1(UpdateMessage): <NEW_LINE> <INDENT> body_schema = { 'id': f'{SCHEMA_URL}/v1/bodhi.update.comment#', '$schema': 'http://json-schema.org/draft-04/schema#', 'description': 'Schema for message sent when a comment is added to an update', 'type': 'object', 'properties': { 'comment': { 'type': 'object', 'description': 'The comment added to an update', 'properties': { 'karma': { 'type': 'integer', 'description': 'The karma associated with the comment', }, 'text': { 'type': 'string', 'description': 'The text of the comment', }, 'timestamp': { 'type': 'string', 'description': 'The timestamp that the comment was left on.' }, 'update': UpdateV1.schema(), 'user': UserV1.schema(), }, 'required': ['karma', 'text', 'timestamp', 'update', 'user'], }, }, 'required': ['comment'], 'definitions': { 'build': BuildV1.schema(), } } <NEW_LINE> topic = "bodhi.update.comment" <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.body['comment']['text'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def karma(self) -> int: <NEW_LINE> <INDENT> return self.body['comment']['karma'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def summary(self) -> str: <NEW_LINE> <INDENT> return (f"{self.user.name} commented on bodhi update {self.update.alias} " f"(karma: {self.karma})") <NEW_LINE> <DEDENT> @property <NEW_LINE> def agent(self) -> str: <NEW_LINE> <INDENT> return self.user.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def user(self) -> UserV1: <NEW_LINE> <INDENT> return UserV1(self.body['comment']['user']['name']) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _update(self) -> dict: <NEW_LINE> <INDENT> return self.body['comment']['update']
Sent when a comment is made on an update.
62599065462c4b4f79dbd15b
class State(Base): <NEW_LINE> <INDENT> __tablename__ = "states" <NEW_LINE> id = Column(Integer, nullable=False, primary_key=True) <NEW_LINE> name = Column(String(128), nullable=False)
State Class containing id and name definitions
625990651f037a2d8b9e5415
class ListarEmpresasView(generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = ClienteSerializer <NEW_LINE> queryset = Cliente.objects.all()
Permite listar empresas.
625990654f88993c371f10c9
class SubmissionTracker(object): <NEW_LINE> <INDENT> def __init__(self, analysis_client, task_completion=None): <NEW_LINE> <INDENT> self.__analysis_client = analysis_client <NEW_LINE> if not task_completion: <NEW_LINE> <INDENT> task_completion = TaskCompletion(analysis_client) <NEW_LINE> <DEDENT> self.__task_completion = task_completion <NEW_LINE> self.__tracked_uuids = set() <NEW_LINE> self.__min_timestamp = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def min_timestamp(self): <NEW_LINE> <INDENT> return self.__min_timestamp <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_tracked_uuids(self): <NEW_LINE> <INDENT> return len(self.__tracked_uuids) <NEW_LINE> <DEDENT> def get_tracked_uuids(self): <NEW_LINE> <INDENT> return set(self.__tracked_uuids) <NEW_LINE> <DEDENT> def track_submission(self, task_uuid, submission_utc_timestamp): <NEW_LINE> <INDENT> self.__tracked_uuids.add(task_uuid) <NEW_LINE> if self.__min_timestamp: <NEW_LINE> <INDENT> self.__min_timestamp = min( self.__min_timestamp, submission_utc_timestamp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__min_timestamp = submission_utc_timestamp <NEW_LINE> <DEDENT> <DEDENT> def get_completed(self): <NEW_LINE> <INDENT> if not self.__tracked_uuids: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> assert self.__min_timestamp is not None, "SubmissionTracker has no min_timestamp!" <NEW_LINE> after = self.__min_timestamp <NEW_LINE> before = self.__analysis_client.get_api_utc_timestamp() <NEW_LINE> for completed_task in self.__task_completion.get_completed(after, before): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__tracked_uuids.remove(completed_task.task_uuid) <NEW_LINE> yield completed_task <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.__min_timestamp = before
Helper class to track the state of submissions until they're completed :param analysis_client: analysis_apiclient.AnalysisClientBase :param task_completion: analysis_apiclient.TaskCompletion or None If not provided, will create one from the analysis_client. Providing this parameter explicitly is mainly for testing. - `track_submission()` is used to add the submission to the list of tasks that we are keeping track of. - `get_completed()` is used to get the results of tracked submissions that have completed so far Invocations of the two methods can be interleaved to add new tasks to keep track of while others are still waiting to be completed.
62599065a219f33f346c7f5b
class PFDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, csv_file, training_image_path,output_size=(240,240),transform=None): <NEW_LINE> <INDENT> self.out_h, self.out_w = output_size <NEW_LINE> self.train_data = pd.read_csv(csv_file) <NEW_LINE> self.img_A_names = self.train_data.iloc[:,0] <NEW_LINE> self.img_B_names = self.train_data.iloc[:,1] <NEW_LINE> self.point_A_coords = self.train_data.iloc[:, 2:22].as_matrix().astype('float') <NEW_LINE> self.point_B_coords = self.train_data.iloc[:, 22:].as_matrix().astype('float') <NEW_LINE> self.training_image_path = training_image_path <NEW_LINE> self.transform = transform <NEW_LINE> self.affineTnf = GeometricTnf(out_h=self.out_h, out_w=self.out_w, use_cuda = False) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.train_data) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> image_A,im_size_A = self.get_image(self.img_A_names,idx) <NEW_LINE> image_B,im_size_B = self.get_image(self.img_B_names,idx) <NEW_LINE> point_A_coords = self.get_points(self.point_A_coords,idx) <NEW_LINE> point_B_coords = self.get_points(self.point_B_coords,idx) <NEW_LINE> L_pck = torch.FloatTensor([torch.max(point_A_coords.max(1)[0]-point_A_coords.min(1)[0])]) <NEW_LINE> sample = {'source_image': image_A, 'target_image': image_B, 'source_im_size': im_size_A, 'target_im_size': im_size_B, 'source_points': point_A_coords, 'target_points': point_B_coords, 'L_pck': L_pck} <NEW_LINE> if self.transform: <NEW_LINE> <INDENT> sample = self.transform(sample) <NEW_LINE> <DEDENT> return sample <NEW_LINE> <DEDENT> def get_image(self,img_name_list,idx): <NEW_LINE> <INDENT> img_name = os.path.join(self.training_image_path, img_name_list[idx]) <NEW_LINE> image = io.imread(img_name) <NEW_LINE> im_size = np.asarray(image.shape) <NEW_LINE> image = np.expand_dims(image.transpose((2,0,1)),0) <NEW_LINE> image = torch.Tensor(image.astype(np.float32)) <NEW_LINE> image_var = Variable(image,requires_grad=False) <NEW_LINE> image = self.affineTnf(image_var).data.squeeze(0) <NEW_LINE> im_size = torch.Tensor(im_size.astype(np.float32)) <NEW_LINE> return (image, im_size) <NEW_LINE> <DEDENT> def get_points(self,point_coords_list,idx): <NEW_LINE> <INDENT> point_coords = point_coords_list[idx, :].reshape(2,10) <NEW_LINE> point_coords = torch.Tensor(point_coords.astype(np.float32)) <NEW_LINE> return point_coords
Proposal Flow image pair dataset Args: csv_file (string): Path to the csv file with image names and transformations. training_image_path (string): Directory with the images. output_size (2-tuple): Desired output size transform (callable): Transformation for post-processing the training pair (eg. image normalization)
62599065435de62698e9d55e
class TCPServer(socketserver.ForkingTCPServer): <NEW_LINE> <INDENT> def server_bind(self): <NEW_LINE> <INDENT> self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> self.socket.bind(self.server_address)
Our server with custom server_bind()
62599065ac7a0e7691f73c3a
class InlineResponse20012(object): <NEW_LINE> <INDENT> swagger_types = { 'params': 'InlineResponse20012Params' } <NEW_LINE> attribute_map = { 'params': 'params' } <NEW_LINE> def __init__(self, params=None): <NEW_LINE> <INDENT> self._params = None <NEW_LINE> self.discriminator = None <NEW_LINE> if params is not None: <NEW_LINE> <INDENT> self.params = params <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def params(self): <NEW_LINE> <INDENT> return self._params <NEW_LINE> <DEDENT> @params.setter <NEW_LINE> def params(self, params): <NEW_LINE> <INDENT> self._params = params <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InlineResponse20012): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990654e4d562566373b5d
class DataSourceCSVSignal(DataSource): <NEW_LINE> <INDENT> def __init__(self, data, **kwargs): <NEW_LINE> <INDENT> assert isinstance(data, csv.DictReader) <NEW_LINE> self.data = data <NEW_LINE> self.source_id = kwargs.get("source_id", None) <NEW_LINE> self.start = kwargs.get('start') <NEW_LINE> self.end = kwargs.get('end') <NEW_LINE> self.sids = kwargs.get('sids', None) <NEW_LINE> self.sid_filter = kwargs.get('sid_filter', None) <NEW_LINE> self.arg_string = hash_args(data, **kwargs) <NEW_LINE> self._raw_data = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def instance_hash(self): <NEW_LINE> <INDENT> return self.arg_string <NEW_LINE> <DEDENT> def raw_data_gen(self): <NEW_LINE> <INDENT> previous_ts = None <NEW_LINE> for row in self.data: <NEW_LINE> <INDENT> dt64 = pd.Timestamp(np.datetime64(row["dt"])) <NEW_LINE> ts = pd.Timestamp(dt64).tz_localize(self.tz_in).tz_convert('utc') <NEW_LINE> if ts < self.start or ts > self.end: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if previous_ts is None or ts.date() != previous_ts.date(): <NEW_LINE> <INDENT> start_ts = gen_ts(ts.date(), self.start_time) <NEW_LINE> end_ts = gen_ts(ts.date(), self.end_time) <NEW_LINE> <DEDENT> volumes = {} <NEW_LINE> price_volumes = {} <NEW_LINE> sid = row["sid"] <NEW_LINE> if self.sid_filter and sid in self.sid_filter: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif self.sids is None or sid in self.sids: <NEW_LINE> <INDENT> if sid not in volumes: <NEW_LINE> <INDENT> volumes[sid] = 0 <NEW_LINE> price_volumes[sid] = 0 <NEW_LINE> <DEDENT> if ts < start_ts or ts > end_ts: <NEW_LINE> <INDENT> continue <NEW_LINE> event = {"sid": sid, "type": "CUSTOM", "dt": ts, "signal": row["signal"]} <NEW_LINE> yield event <NEW_LINE> <DEDENT> <DEDENT> previous_ts = ts <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def raw_data(self): <NEW_LINE> <INDENT> if not self._raw_data: <NEW_LINE> <INDENT> self._raw_data = self.raw_data_gen() <NEW_LINE> <DEDENT> return self._raw_data <NEW_LINE> <DEDENT> @property <NEW_LINE> def mapping(self): <NEW_LINE> <INDENT> return { 'sid': (lambda x: x, 'symbol'), 'dt': (lambda x: x, 'dt'), 'signal': (lambda x: x, 'signal'), }
expects dictReader for a csv file in form with header dt, sid, signal dt expected in ISO format
62599065dd821e528d6da52c
class ClusterProbabilityPrediction(ClusterPrediction): <NEW_LINE> <INDENT> def predict(self, test: pd.DataFrame) -> pd.DataFrame: <NEW_LINE> <INDENT> indx_clusters = [] <NEW_LINE> for i in range(self.n_clusters): <NEW_LINE> <INDENT> cluster = test.loc[self.clusters_test == i, :] <NEW_LINE> prediction = self.models[i].predict_proba(cluster.values) <NEW_LINE> indx_clusters.append((cluster.index, prediction)) <NEW_LINE> <DEDENT> nclasses = indx_cluster[0][1].shape[1] <NEW_LINE> dfs = [pd.DataFrame(p, index=indx_cluster, columns=[f'p_{i}' for i in range(nclasses)]) for indx_cluster, p in indx_clusters] <NEW_LINE> predictions = pd.concat(dfs).sort_index().values <NEW_LINE> return predictions
Predict the probability for each class using clustering
6259906538b623060ffaa3fc
class MissingConfigValue(Exception): <NEW_LINE> <INDENT> pass
General exception catch all for missing config values
625990658a43f66fc4bf38e6
class EventList(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.events = [] <NEW_LINE> <DEDENT> def add(self, event): <NEW_LINE> <INDENT> self.events.append(event) <NEW_LINE> <DEDENT> def unprocessed_events(self): <NEW_LINE> <INDENT> return [e for e in self.events if not e.is_processed] <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> for event in self.unprocessed_events(): <NEW_LINE> <INDENT> event.process()
see http://martinfowler.com/eaaDev/AgreementDispatcher.html
625990655166f23b2e244b26
class InvalidRequest(Exception): <NEW_LINE> <INDENT> pass
invalid request argument
625990652ae34c7f260ac83d
class Darker(Density): <NEW_LINE> <INDENT> _name = 'darker' <NEW_LINE> def __init__(self, override_name=None, bit_depth=8, max_output_value=None, eps=1e-5, data_mean=None): <NEW_LINE> <INDENT> Density.__init__(self, override_name=override_name, bit_depth=bit_depth, max_output_value=max_output_value, dmin=0, mmult=40, eps=eps, data_mean=data_mean)
The density remap using parameters for darker results.
625990652ae34c7f260ac83e
class EntityFieldsMetaClass(type): <NEW_LINE> <INDENT> def __new__(mcs, class_name, bases, class_attrs): <NEW_LINE> <INDENT> schema = class_attrs.get("_schema") <NEW_LINE> if schema: <NEW_LINE> <INDENT> for f_name, f in schema.fields.items(): <NEW_LINE> <INDENT> class_attrs[f_name] = EntityFieldDescriptor(f_name) <NEW_LINE> <DEDENT> <DEDENT> entity_methods = [] <NEW_LINE> for base in bases: <NEW_LINE> <INDENT> entity_methods.extend(getattr(base, "_entity_methods", [])) <NEW_LINE> <DEDENT> entity_methods.extend(class_attrs.get("_entity_methods", [])) <NEW_LINE> for m_name in entity_methods: <NEW_LINE> <INDENT> class_attrs[m_name] = ReadOnlyFieldDescriptor(m_name) <NEW_LINE> <DEDENT> return type.__new__(mcs, class_name, bases, class_attrs)
Magic: we take fields info from class._schema and attach EntityFieldDescriptor to Resource classes
62599065f548e778e596cce0
class LocalizedString(fields.String): <NEW_LINE> <INDENT> def format(self, value): <NEW_LINE> <INDENT> return fields.String.format(self, _(value))
Custom LocalizedString to return localized strings
625990657c178a314d78e797
class GroupPipelineSearch(): <NEW_LINE> <INDENT> __groupRank__ = {'CBC' :1, 'Burst':0, } <NEW_LINE> __pipelineRank__ = {'gstlal' :0, 'MBTAOnline' :0, 'pycbc' :0, 'gstlal-spiir':0, 'CWB' :0, 'LIB' :0, } <NEW_LINE> __searchRank__ = {'LowMass' :1, 'HighMass':1, 'AllSky' :1, '' :0, None :0, } <NEW_LINE> def __init__(self, group, pipeline, search=None): <NEW_LINE> <INDENT> self.group = group <NEW_LINE> if self.__groupRank__.has_key(group): <NEW_LINE> <INDENT> self.groupRank = self.__groupRank__[group] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.groupRank = -1 <NEW_LINE> <DEDENT> self.pipeline = pipeline <NEW_LINE> if self.__pipelineRank__.has_key(pipeline): <NEW_LINE> <INDENT> self.pipelineRank = self.__pipelineRank__[pipeline] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pipelineRank = -1 <NEW_LINE> <DEDENT> self.search = search <NEW_LINE> if self.__searchRank__.has_key(search): <NEW_LINE> <INDENT> self.searchRank = self.__searchRank__[search] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.searchRank = -1 <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s, %s, %s : %d, %d, %d"%(self.group, self.pipeline, self.search, self.groupRank, self.pipelineRank, self.searchRank) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.groupRank==other.groupRank) and (self.pipelineRank==other.pipelineRank) and (self.searchRank==other.searchRank) <NEW_LINE> <DEDENT> def __neq__(self, other): <NEW_LINE> <INDENT> return (not self==other) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if self.groupRank == other.groupRank: <NEW_LINE> <INDENT> if self.pipelineRank == other.pipelineRank: <NEW_LINE> <INDENT> return self.searchRank < other.searchRank <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.pipelineRank < other.pipelineRank <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.groupRank < other.groupRank <NEW_LINE> <DEDENT> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> if self.groupRank == other.groupRank: <NEW_LINE> <INDENT> if self.pipelineRank == other.pipelineRank: <NEW_LINE> <INDENT> return self.searchRank > other.searchRank <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.pipelineRank > other.pipelineRank <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.groupRank > other.groupRank <NEW_LINE> <DEDENT> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return (not self < other) <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return (not self > other)
a simple wrapper for the group_pipeline_search combinations that knows how to compare them and find a preference this is done by mapping group, pipeline, search combinations into integers and then comparing the integers NOTE: bigger things are more preferred and the relative ranking is hard coded into immutable attributes of this class comparison is done first by group. If that is inconclusive, we then compare pipelines. If that is inconclusive, we then check search. we prefer: cbc over burst no pipeline is prefered events with 'search' specified are preferred over events without 'search' specified WARNING: if we do not know about a pariticular group, pipeline, or search, we assign a rank of -infty because we don't know about this type of event
62599065fff4ab517ebcef72
class ToTensor(object): <NEW_LINE> <INDENT> def __call__(self, sample): <NEW_LINE> <INDENT> image, category = sample['image'], sample['category'] <NEW_LINE> if len(image.shape) == 2: <NEW_LINE> <INDENT> image = image.reshape(image.shape[0], image.shape[1], 1) <NEW_LINE> <DEDENT> image = image.transpose((2, 0, 1)) <NEW_LINE> return {'image': torch.from_numpy(image), 'category': category}
Convert ndarrays to Tensors
62599065009cb60464d02c8f
class Events(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=50) <NEW_LINE> date = models.DateField(default=timezone.now) <NEW_LINE> image_url = models.CharField(max_length=1000) <NEW_LINE> description = models.CharField(max_length=1000) <NEW_LINE> link = models.URLField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title
The Events and Happenings model :fields: * Title : The title of the event * date : The date of the event * image_url : The url of the image inside the event * description : The description of the event * link : The link of the event/ What the event Redirects to :Functions: * __unicode__ : Returns the Event title of calling the python object.
62599065442bda511e95d905
class AccountDetailSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> user = UserDetailSerializer() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Account <NEW_LINE> fields = [ 'number', 'user', 'balance', 'is_active', 'created_at' ]
Retrieve serializer class for Account model
62599065f548e778e596cce1
class Microplay: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def scraper_dealer(url): <NEW_LINE> <INDENT> url_pages = [] <NEW_LINE> http = tools.get_html(url) <NEW_LINE> html = http[2] <NEW_LINE> http_code = http[1] <NEW_LINE> if html is not None: <NEW_LINE> <INDENT> pages = html.cssselect('#contenido > div.ficha > div.barra-filtros > div.paginar > div > span > a') <NEW_LINE> if pages: <NEW_LINE> <INDENT> max = len(pages) <NEW_LINE> for i in range(1,max): <NEW_LINE> <INDENT> url_pages.append('{0}/page:{1}'.format(url,i)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return url_pages, http_code <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def scraper_links(url): <NEW_LINE> <INDENT> urls = [] <NEW_LINE> http = tools.get_html(url) <NEW_LINE> html = http[2] <NEW_LINE> http_code = http[1] <NEW_LINE> if html is not None: <NEW_LINE> <INDENT> links = html.cssselect('#contenido > div.ficha > div.juegos-similares.lista > ul > li > h2 > a') <NEW_LINE> if links: <NEW_LINE> <INDENT> for l in links: <NEW_LINE> <INDENT> urls.append(l.get('href')) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return urls, http_code <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def scraper_info(url): <NEW_LINE> <INDENT> http = tools.get_html(url) <NEW_LINE> html = http[2] <NEW_LINE> http_code = http[1] <NEW_LINE> product = MicorplayInfo() <NEW_LINE> name = html.cssselect('#contenido > div.ficha > h1') <NEW_LINE> if name: <NEW_LINE> <INDENT> name = name[0].text_content().strip() <NEW_LINE> product.name = tools.clear_name(name) <NEW_LINE> <DEDENT> platform = html.cssselect('#contenido > div.titulo-seccion.prod > h2') <NEW_LINE> if platform: <NEW_LINE> <INDENT> platform = platform[0].text_content() <NEW_LINE> product.platform = tools.clear_platform(platform).upper() <NEW_LINE> <DEDENT> price = html.cssselect('#contenido > div.ficha > div.informacion > div > div.precios > strong') <NEW_LINE> if price: <NEW_LINE> <INDENT> product.price = price[0].text_content().split('$')[1].replace('.', '').strip() <NEW_LINE> <DEDENT> product.stock = '' <NEW_LINE> product.url = url <NEW_LINE> product.image_url = Microplay.get_image(url, html) <NEW_LINE> return product, http_code <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_image(url, html): <NEW_LINE> <INDENT> if html is None: <NEW_LINE> <INDENT> http = tools.get_html(url) <NEW_LINE> html = http[2] <NEW_LINE> <DEDENT> image_url = html.cssselect('#contenido > div.ficha > div.img-portada > img') <NEW_LINE> if image_url: <NEW_LINE> <INDENT> image_url = image_url[0].get('src') <NEW_LINE> <DEDENT> if image_url == []: <NEW_LINE> <INDENT> image_url = '' <NEW_LINE> <DEDENT> return image_url
Scrapping for www.microplay.cl
625990654e4d562566373b5f
class KaggleHubmapTestConfig(tfds.core.BuilderConfig): <NEW_LINE> <INDENT> def __init__(self, size, kaggle_data_version, **kwargs): <NEW_LINE> <INDENT> super(KaggleHubmapTestConfig, self).__init__(**kwargs) <NEW_LINE> self.size = size <NEW_LINE> self.kaggle_data_version = kaggle_data_version
BuilderConfig for KaggleHubmapTest.
62599066009cb60464d02c90
class VirtualMachine(Node, object): <NEW_LINE> <INDENT> def __init__(self, freq=1, name=None, vm_size=1): <NEW_LINE> <INDENT> super(VirtualMachine, self).__init__(name) <NEW_LINE> self.phys_parent = None <NEW_LINE> self.size = vm_size <NEW_LINE> self.freq = freq <NEW_LINE> self.pair = None <NEW_LINE> <DEDENT> def replicate(self): <NEW_LINE> <INDENT> vm = VirtualMachine(freq=self.freq, name=self.name, vm_size=self.size) <NEW_LINE> vm.assign_pair(self.pair) <NEW_LINE> return vm <NEW_LINE> <DEDENT> def assign_pair(self, vm_pair): <NEW_LINE> <INDENT> if type(vm_pair) is VirtualMachine: <NEW_LINE> <INDENT> self.pair = vm_pair <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Pair of this Virtual Machine should be another Virtual Machine") <NEW_LINE> <DEDENT> <DEDENT> def assign_parent(self, parent): <NEW_LINE> <INDENT> self.phys_parent = parent <NEW_LINE> <DEDENT> def get_parent(self): <NEW_LINE> <INDENT> return self.phys_parent <NEW_LINE> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> return self.size
Represents a virtual machine in a topology. Args: self.phys_parent (PhysicalHost): the host that stores this VM self.size (int): the number of memory units needed to store this VM self.freq (int): the frequency this VM communicates with its pair self.pair (VirtualMachine): the other VM paired up with this VM
625990668a43f66fc4bf38e8
class ClothesConfig(Config): <NEW_LINE> <INDENT> NAME = "clothes" <NEW_LINE> GPU_COUNT = 1 <NEW_LINE> IMAGES_PER_GPU = 2 <NEW_LINE> NUM_CLASSES = 1+5 <NEW_LINE> NUM_KPS = 24 <NEW_LINE> IMAGE_MIN_DIM = 512 <NEW_LINE> IMAGE_MAX_DIM = 512 <NEW_LINE> MINI_MASK_SHAPE = (56, 56) <NEW_LINE> KP_MASK_POOL_SIZE = 28 <NEW_LINE> KP_MASK_SHAPE = [56, 56] <NEW_LINE> MAX_GT_INSTANCES = 32 <NEW_LINE> RPN_ANCHOR_SCALES = (8, 16, 32, 64, 128) <NEW_LINE> TRAIN_ROIS_PER_IMAGE = 12 <NEW_LINE> STEPS_PER_EPOCH = 2000 <NEW_LINE> VALIDATION_STEPS = 200
Configuration for training on the toy shapes dataset. Derives from the base Config class and overrides values specific to the toy shapes dataset.
62599066a17c0f6771d5d752
class BlinkSensor(SensorEntity): <NEW_LINE> <INDENT> def __init__(self, data, camera, description: SensorEntityDescription): <NEW_LINE> <INDENT> self.entity_description = description <NEW_LINE> self._attr_name = f"{DOMAIN} {camera} {description.name}" <NEW_LINE> self.data = data <NEW_LINE> self._camera = data.cameras[camera] <NEW_LINE> self._attr_unique_id = f"{self._camera.serial}-{description.key}" <NEW_LINE> self._sensor_key = ( "temperature_calibrated" if description.key == "temperature" else description.key ) <NEW_LINE> self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, self._camera.serial)}, name=camera, manufacturer=DEFAULT_BRAND, model=self._camera.camera_type, ) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.data.refresh() <NEW_LINE> try: <NEW_LINE> <INDENT> self._attr_native_value = self._camera.attributes[self._sensor_key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self._attr_native_value = None <NEW_LINE> _LOGGER.error( "%s not a valid camera attribute. Did the API change?", self._sensor_key )
A Blink camera sensor.
625990667047854f46340b0c
class OmegaNocTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> app.config.from_pyfile(os.path.join(omeganoc_tests.__path__[0], 'test-config.cfg')) <NEW_LINE> app.config['TESTING'] = True <NEW_LINE> self.app = app.test_client() <NEW_LINE> self.create_db() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> db.session.remove() <NEW_LINE> db.drop_all() <NEW_LINE> pass <NEW_LINE> <DEDENT> def create_db(self): <NEW_LINE> <INDENT> db.drop_all() <NEW_LINE> db.create_all() <NEW_LINE> user = User('admin', 'admin', True) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit()
Base class for all unit tests
62599066fff4ab517ebcef73
class TesneniTyp4(Tesneni): <NEW_LINE> <INDENT> r_2 = 0 <NEW_LINE> fi_G = 0 <NEW_LINE> def calcb_Gifirst(self): <NEW_LINE> <INDENT> self.b_Gi = (12 * self.r_2 * cos(self.fi_G) * self.b_Gt * self.Q_smax / self.E[0])**(1/2) <NEW_LINE> <DEDENT> def calcb_Gi(self,obj1,obj2,F_G0): <NEW_LINE> <INDENT> self.b_Gi = ((12 * self.r_2 * cos(self.fi_G) * F_G0)/(pi * self.d_Ge * self.E_G0) + (F_G0 / (pi * self.d_Ge * self.Q_smax))**2)**(1/2) <NEW_LINE> <DEDENT> def calcd_Ge(self): <NEW_LINE> <INDENT> self.d_Ge = self.d_Gt <NEW_LINE> <DEDENT> def calcE_Gm(self,F_G0): <NEW_LINE> <INDENT> pass
description of class
6259906655399d3f05627c79
class Credentials: <NEW_LINE> <INDENT> def __init__(self, name=None, usage=None): <NEW_LINE> <INDENT> self.host = name.host if name else '' <NEW_LINE> self.server = usage == 'accept' <NEW_LINE> <DEDENT> @property <NEW_LINE> def mechs(self): <NEW_LINE> <INDENT> if self.server: <NEW_LINE> <INDENT> return [0] if 'unknown_mech' in self.host else [1, 2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [2]
Stub class for GSS credentials
6259906621bff66bcd7243be
class IPilgrimage(model.Schema): <NEW_LINE> <INDENT> title = schema.TextLine( title=_(u"Title"), required=False, ) <NEW_LINE> description = schema.Text( title=_(u"Description"), required=False, ) <NEW_LINE> body = RichTextField( title=_(u"Body"), required=False, )
Pilgrimage Type
625990660c0af96317c5790b
class AutomaticSpinner(object): <NEW_LINE> <INDENT> def __init__(self, label, show_time=True): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> self.show_time = show_time <NEW_LINE> self.shutdown_event = multiprocessing.Event() <NEW_LINE> self.subprocess = multiprocessing.Process(target=self._target) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.subprocess.start() <NEW_LINE> <DEDENT> def __exit__(self, exc_type=None, exc_value=None, traceback=None): <NEW_LINE> <INDENT> self.shutdown_event.set() <NEW_LINE> self.subprocess.join() <NEW_LINE> <DEDENT> def _target(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> timer = Timer() if self.show_time else None <NEW_LINE> with Spinner(label=self.label, timer=timer) as spinner: <NEW_LINE> <INDENT> while not self.shutdown_event.is_set(): <NEW_LINE> <INDENT> spinner.step() <NEW_LINE> spinner.sleep() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> pass
Show a spinner on the terminal that automatically starts animating. This class shows a spinner on the terminal (just like :class:`Spinner` does) that automatically starts animating. This class should be used as a context manager using the :keyword:`with` statement. The animation continues for as long as the context is active. :class:`AutomaticSpinner` provides an alternative to :class:`Spinner` for situations where it is not practical for the caller to periodically call :func:`~Spinner.step()` to advance the animation, e.g. because you're performing a blocking call and don't fancy implementing threading or subprocess handling just to provide some user feedback. This works using the :mod:`multiprocessing` module by spawning a subprocess to render the spinner while the main process is busy doing something more useful. By using the :keyword:`with` statement you're guaranteed that the subprocess is properly terminated at the appropriate time.
62599066b7558d5895464adb
class Snakemake(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://snakemake.readthedocs.io/en/stable/" <NEW_LINE> url = "https://pypi.io/packages/source/s/snakemake/snakemake-3.11.2.tar.gz" <NEW_LINE> version('3.11.2', '6bf834526078522b38d271fdf73e6b22') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> depends_on('py-requests', type=('build', 'run')) <NEW_LINE> depends_on('py-setuptools', type=('build', 'run')) <NEW_LINE> depends_on('py-wrapt', type=('build', 'run'))
Snakemake is an MIT-licensed workflow management system.
6259906699fddb7c1ca6397b
class JSONEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, o): <NEW_LINE> <INDENT> if isinstance(o, datetime): <NEW_LINE> <INDENT> return str(round(datetime.timestamp(o))) <NEW_LINE> <DEDENT> elif isinstance(o, set): <NEW_LINE> <INDENT> return list(o) <NEW_LINE> <DEDENT> elif hasattr(o, 'as_dict'): <NEW_LINE> <INDENT> return o.as_dict() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return json.JSONEncoder.default(self, o) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return [self.default(child_obj) for child_obj in o] <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return json.JSONEncoder.default(self, o)
JSONEncoder that supports Home Assistant objects.
62599066442bda511e95d906
class HADES_UNAUTH_DHCP_LEASE_TIME(Option): <NEW_LINE> <INDENT> default = timedelta(minutes=2) <NEW_LINE> type = timedelta <NEW_LINE> static_check = check.greater_than(timedelta(0))
DHCP lease time for unauth users This lease time should be set rather short, so that unauthenticated will quickly obtain a new address if they become authenticated.
62599066097d151d1a2c27c5
class Form( BoxLayout ) : <NEW_LINE> <INDENT> shared_navigation_controller = ObjectProperty( None ) <NEW_LINE> title = StringProperty( '' ) <NEW_LINE> background_color = ListProperty( None ) <NEW_LINE> def __init__( self, **kargs ) : <NEW_LINE> <INDENT> if 'shared_navigation_controller' not in kargs.keys() : <NEW_LINE> <INDENT> raise ValueError( 'You MUST provide a valid controller for shared_navigation_controller' ) <NEW_LINE> <DEDENT> if 'background_color' not in kargs.keys() : <NEW_LINE> <INDENT> kargs['background_color'] = kargs['shared_navigation_controller'].background_color <NEW_LINE> <DEDENT> super( Form, self ).__init__( **kargs ) <NEW_LINE> <DEDENT> def push( self ) : <NEW_LINE> <INDENT> self.shared_navigation_controller.push( self, title=self.title ) <NEW_LINE> <DEDENT> def pop( self ) : <NEW_LINE> <INDENT> self.shared_navigation_controller.pop() <NEW_LINE> <DEDENT> def on_push( self, controller ) : <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_pop( self, controller ) : <NEW_LINE> <INDENT> pass
Very simple class to manage you're app views. Use it as if it were an Android's Activity or and iOS's View Controller.
6259906667a9b606de54764f
class ProductPatchRequest(BaseModel): <NEW_LINE> <INDENT> doc_repo: Optional[HttpUrl] = None <NEW_LINE> title: Optional[str] = None <NEW_LINE> class Config: <NEW_LINE> <INDENT> extra = "forbid"
Model for a PATCH /products/<slug> request body.
62599066d268445f2663a709
class MessageRedirector: <NEW_LINE> <INDENT> def __init__(self, info=None, warn='stdout', errr='stdout'): <NEW_LINE> <INDENT> if info is None: <NEW_LINE> <INDENT> info = '' <NEW_LINE> <DEDENT> if not isinstance(info, str): <NEW_LINE> <INDENT> raise error('wrong info argument for MessageRedirector constructor') <NEW_LINE> <DEDENT> elif info in {'stdout', 'stderr', 'cout', 'cerr'}: <NEW_LINE> <INDENT> self.info = pysirfreg.newTextPrinter(info) <NEW_LINE> self.info_case = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.info = pysirfreg.newTextWriter(info) <NEW_LINE> self.info_case = 1 <NEW_LINE> <DEDENT> pysirfreg.openChannel(0, self.info) <NEW_LINE> if warn is None: <NEW_LINE> <INDENT> warn = '' <NEW_LINE> <DEDENT> if not isinstance(warn, str): <NEW_LINE> <INDENT> raise error('wrong warn argument for MessageRedirector constructor') <NEW_LINE> <DEDENT> elif warn in {'stdout', 'stderr', 'cout', 'cerr'}: <NEW_LINE> <INDENT> self.warn = pysirfreg.newTextPrinter(warn) <NEW_LINE> self.warn_case = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.warn = pysirfreg.newTextWriter(warn) <NEW_LINE> self.warn_case = 1 <NEW_LINE> <DEDENT> pysirfreg.openChannel(1, self.warn) <NEW_LINE> if errr is None: <NEW_LINE> <INDENT> errr = '' <NEW_LINE> <DEDENT> if not isinstance(errr, str): <NEW_LINE> <INDENT> raise error('wrong errr argument for MessageRedirector constructor') <NEW_LINE> <DEDENT> elif errr in {'stdout', 'stderr', 'cout', 'cerr'}: <NEW_LINE> <INDENT> self.errr = pysirfreg.newTextPrinter(errr) <NEW_LINE> self.errr_case = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.errr = pysirfreg.newTextWriter(errr) <NEW_LINE> self.errr_case = 1 <NEW_LINE> <DEDENT> pysirfreg.openChannel(2, self.errr) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self.info_case == 0: <NEW_LINE> <INDENT> try_calling(pysirfreg.deleteTextPrinter(self.info)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try_calling(pysirfreg.deleteTextWriter(self.info)) <NEW_LINE> <DEDENT> pysirfreg.closeChannel(0, self.info) <NEW_LINE> if self.warn_case == 0: <NEW_LINE> <INDENT> try_calling(pysirfreg.deleteTextPrinter(self.warn)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try_calling(pysirfreg.deleteTextWriter(self.warn)) <NEW_LINE> <DEDENT> pysirfreg.closeChannel(1, self.warn) <NEW_LINE> if self.errr_case == 0: <NEW_LINE> <INDENT> try_calling(pysirfreg.deleteTextPrinter(self.errr)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try_calling(pysirfreg.deleteTextWriter(self.errr)) <NEW_LINE> <DEDENT> pysirfreg.closeChannel(2, self.errr)
Class for SIRFReg printing redirection to files/stdout/stderr.
62599066f548e778e596cce3
class HQFormData(FormDataBase): <NEW_LINE> <INDENT> domain = models.CharField(max_length=200) <NEW_LINE> username = models.CharField(max_length=200, blank=True) <NEW_LINE> @property <NEW_LINE> def app_id(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return XFormInstance.get(self.instanceID).app_id <NEW_LINE> <DEDENT> except (ResourceNotFound, AttributeError, KeyError): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def _get_username(self): <NEW_LINE> <INDENT> if self.userID: <NEW_LINE> <INDENT> return user_id_to_username(self.userID) or "" <NEW_LINE> <DEDENT> return "" <NEW_LINE> <DEDENT> def update(self, instance): <NEW_LINE> <INDENT> super(HQFormData, self).update(instance) <NEW_LINE> if not hasattr(instance, "domain") or not instance.domain: <NEW_LINE> <INDENT> raise InvalidFormUpdateException("No domain found in instance %s!" % (instance.get_id)) <NEW_LINE> <DEDENT> self.domain = instance.domain <NEW_LINE> self.username = self._get_username() <NEW_LINE> <DEDENT> def matches_exact(self, instance): <NEW_LINE> <INDENT> return super(HQFormData, self).matches_exact(instance) and self.domain == instance.domain
HQ's implementation of FormData. In addition to the standard attributes we save additional HQ-specific things like the domain of the form, and some additional user data.
62599066e5267d203ee6cf6b