code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class PytestTester: <NEW_LINE> <INDENT> def __init__(self, module_name): <NEW_LINE> <INDENT> self.module_name = module_name <NEW_LINE> <DEDENT> def __call__(self, label='fast', verbose=1, extra_argv=None, doctests=False, coverage=False, durations=-1, tests=None): <NEW_LINE> <INDENT> import pytest <NEW_LINE> import warnings <NEW_LINE> module = sys.modules[self.module_name] <NEW_LINE> module_path = os.path.abspath(module.__path__[0]) <NEW_LINE> pytest_args = ["-l"] <NEW_LINE> pytest_args += ["-q"] <NEW_LINE> with warnings.catch_warnings(): <NEW_LINE> <INDENT> warnings.simplefilter("always") <NEW_LINE> from numpy.distutils import cpuinfo <NEW_LINE> <DEDENT> pytest_args += [ "-W ignore:Not importing directory", "-W ignore:numpy.dtype size changed", "-W ignore:numpy.ufunc size changed", "-W ignore::UserWarning:cpuinfo", ] <NEW_LINE> pytest_args += [ "-W ignore:the matrix subclass is not", "-W ignore:Importing from numpy.matlib is", ] <NEW_LINE> if doctests: <NEW_LINE> <INDENT> raise ValueError("Doctests not supported") <NEW_LINE> <DEDENT> if extra_argv: <NEW_LINE> <INDENT> pytest_args += list(extra_argv) <NEW_LINE> <DEDENT> if verbose > 1: <NEW_LINE> <INDENT> pytest_args += ["-" + "v"*(verbose - 1)] <NEW_LINE> <DEDENT> if coverage: <NEW_LINE> <INDENT> pytest_args += ["--cov=" + module_path] <NEW_LINE> <DEDENT> if label == "fast": <NEW_LINE> <INDENT> from numpy.testing import IS_PYPY <NEW_LINE> if IS_PYPY: <NEW_LINE> <INDENT> pytest_args += ["-m", "not slow and not slow_pypy"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pytest_args += ["-m", "not slow"] <NEW_LINE> <DEDENT> <DEDENT> elif label != "full": <NEW_LINE> <INDENT> pytest_args += ["-m", label] <NEW_LINE> <DEDENT> if durations >= 0: <NEW_LINE> <INDENT> pytest_args += ["--durations=%s" % durations] <NEW_LINE> <DEDENT> if tests is None: <NEW_LINE> <INDENT> tests = [self.module_name] <NEW_LINE> <DEDENT> pytest_args += ["--pyargs"] + list(tests) <NEW_LINE> _show_numpy_info() <NEW_LINE> try: <NEW_LINE> <INDENT> code = pytest.main(pytest_args) <NEW_LINE> <DEDENT> except SystemExit as exc: <NEW_LINE> <INDENT> code = exc.code <NEW_LINE> <DEDENT> return code == 0
Pytest test runner. A test function is typically added to a package's __init__.py like so:: from numpy._pytesttester import PytestTester test = PytestTester(__name__).test del PytestTester Calling this test function finds and runs all tests associated with the module and all its sub-modules. Attributes ---------- module_name : str Full path to the package to test. Parameters ---------- module_name : module name The name of the module to test. Notes ----- Unlike the previous ``nose``-based implementation, this class is not publicly exposed as it performs some ``numpy``-specific warning suppression.
6259903a8da39b475be043cd
class TestMain(unittest.TestCase): <NEW_LINE> <INDENT> @mock.patch('pulp.server.maintenance.monthly.RepoProfileApplicabilityManager.remove_orphans') <NEW_LINE> def test_main_calls_remove_orphans(self, remove_orphans): <NEW_LINE> <INDENT> monthly.main() <NEW_LINE> remove_orphans.assert_called_once_with()
Test the main() function.
6259903ab57a9660fecd2c5a
class NewDataEmail(EmailTemplate): <NEW_LINE> <INDENT> def __init__(self, send_from, processor_name, last_data_time, attachments=()): <NEW_LINE> <INDENT> subject = f'No New Data for Summit {processor_name[0].upper() + processor_name[1:]}' <NEW_LINE> body = (f'There has been no new data for the {processor_name} processor in {datetime.now() - last_data_time}.\n' + 'This is the ONLY email that will be recieved for this error untill it is resolved.') <NEW_LINE> send_to_list = processor_email_list <NEW_LINE> self.send_from = send_from <NEW_LINE> self.processor = processor_name <NEW_LINE> self.last_data_time = last_data_time <NEW_LINE> super().__init__(send_from, send_to_list, body, subject=subject, attachments=attachments) <NEW_LINE> <DEDENT> def send_resolution(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.resolution_body = (f'New data for {self.processor} ' + f'was found after {datetime.now() - self.last_data_time}.') <NEW_LINE> auth = json.loads(auth_file.read_text()) <NEW_LINE> user, passw = (auth.get('gmail_username'), auth.get('gmail_password')) <NEW_LINE> send_email(self.send_from, self.send_to, f'Resolved - {self.subject}', self.resolution_body, user, passw, attach=self.attachments) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e.args) <NEW_LINE> print('Handle the new exception!')
NewDataEmails subclass EmailTemplates and are specifically for being passed to Errors that are triggered when no new data is found.
6259903bd6c5a102081e3305
class NonRefCatalogContent(BaseContentMixin): <NEW_LINE> <INDENT> isReferenceable = None <NEW_LINE> def _register(self, *args, **kwargs): pass <NEW_LINE> def _unregister(self, *args, **kwargs): pass <NEW_LINE> def _updateCatalog(self, *args, **kwargs): pass <NEW_LINE> def _referenceApply(self, *args, **kwargs): pass <NEW_LINE> def _uncatalogUID(self, *args, **kwargs): pass <NEW_LINE> def _uncatalogRefs(self, *args, **kwargs): pass <NEW_LINE> def indexObject(self, *args, **kwargs): pass <NEW_LINE> def unindexObject(self, *args, **kwargs): pass <NEW_LINE> def reindexObject(self, *args, **kwargs): pass
Base class for content that is neither referenceable nor in the catalog
6259903b71ff763f4b5e897a
class TestScoreOnlyResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testScoreOnlyResponse(self): <NEW_LINE> <INDENT> pass
ScoreOnlyResponse unit test stubs
6259903b30dc7b76659a0a11
class RunCmdPlugin(base.BaseCloudConfigPlugin): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _unify_scripts(commands, env_header): <NEW_LINE> <INDENT> script_content = env_header + os.linesep <NEW_LINE> entries = 0 <NEW_LINE> for command in commands: <NEW_LINE> <INDENT> if isinstance(command, six.string_types): <NEW_LINE> <INDENT> script_content += command <NEW_LINE> <DEDENT> elif isinstance(command, (list, tuple)): <NEW_LINE> <INDENT> subcommand_content = [] <NEW_LINE> for subcommand in command: <NEW_LINE> <INDENT> subcommand_content.append("%s" % subcommand) <NEW_LINE> <DEDENT> script_content += ' '.join(subcommand_content) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exception.CloudbaseInitException( "Unrecognized type '%r' in cmd content" % type(command)) <NEW_LINE> <DEDENT> script_content += os.linesep <NEW_LINE> entries += 1 <NEW_LINE> <DEDENT> LOG.info("Found %d cloud-config runcmd entries." % entries) <NEW_LINE> return script_content <NEW_LINE> <DEDENT> def process(self, data): <NEW_LINE> <INDENT> if not data: <NEW_LINE> <INDENT> LOG.info('No cloud-config runcmd entries found.') <NEW_LINE> return <NEW_LINE> <DEDENT> LOG.info("Running cloud-config runcmd entries.") <NEW_LINE> osutils = factory.get_os_utils() <NEW_LINE> env_header = osutils.get_default_script_exec_header() <NEW_LINE> try: <NEW_LINE> <INDENT> ret_val = userdatautils.execute_user_data_script( self._unify_scripts(data, env_header).encode()) <NEW_LINE> _, reboot = execcmd.get_plugin_return_value(ret_val) <NEW_LINE> return reboot <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> LOG.warning("An error occurred during runcmd execution: '%s'" % ex) <NEW_LINE> <DEDENT> return False
Aggregate and execute cloud-config runcmd entries in a shell. The runcmd entries can be a string or an array of strings. The prefered shell is given by the OS platform. Example for Windows, where cmd.exe is the prefered shell: #cloud-config runcmd: - ['dir', 'C:'] - 'dir C:'
6259903bb57a9660fecd2c5b
class Unit: <NEW_LINE> <INDENT> def __init__(self, spec=None, genre=HIDDEN, log_names=('net', 'I_net', 'v_m', 'act', 'v_m_eq', 'adapt')): <NEW_LINE> <INDENT> self.genre = genre <NEW_LINE> self.spec = spec <NEW_LINE> if self.spec is None: <NEW_LINE> <INDENT> self.spec = UnitSpec() <NEW_LINE> <DEDENT> self.log_names = log_names <NEW_LINE> self.logs = {name: [] for name in self.log_names} <NEW_LINE> self.reset() <NEW_LINE> self.avg_ss = self.spec.avg_init <NEW_LINE> self.avg_s = self.spec.avg_init <NEW_LINE> self.avg_m = self.spec.avg_init <NEW_LINE> self.avg_l = self.spec.avg_l_init <NEW_LINE> self.avg_s_eff = 0.0 <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.ex_inputs = [] <NEW_LINE> self.g_e = 0 <NEW_LINE> self.I_net = 0 <NEW_LINE> self.I_net_r = self.I_net <NEW_LINE> self.v_m = self.spec.v_m_init <NEW_LINE> self.v_m_eq = self.v_m <NEW_LINE> self.act_ext = None <NEW_LINE> self.act = 0 <NEW_LINE> self.act_nd = self.act <NEW_LINE> self.act_m = self.act <NEW_LINE> self.adapt = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def act_eq(self): <NEW_LINE> <INDENT> return self.act <NEW_LINE> <DEDENT> @property <NEW_LINE> def avg_l_lrn(self): <NEW_LINE> <INDENT> return self.spec.avg_l_lrn(self) <NEW_LINE> <DEDENT> def cycle(self, phase, g_i=0.0, dt_integ=1): <NEW_LINE> <INDENT> return self.spec.cycle(self, phase, g_i=g_i, dt_integ=dt_integ) <NEW_LINE> <DEDENT> def calculate_net_in(self): <NEW_LINE> <INDENT> return self.spec.calculate_net_in(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def net(self): <NEW_LINE> <INDENT> return self.spec.g_bar_e * self.g_e <NEW_LINE> <DEDENT> def force_activity(self, act_ext): <NEW_LINE> <INDENT> assert len(self.ex_inputs) == 0 <NEW_LINE> self.act_ext = act_ext <NEW_LINE> self.spec.force_activity(self) <NEW_LINE> <DEDENT> def add_excitatory(self, inp_act): <NEW_LINE> <INDENT> self.ex_inputs.append(inp_act) <NEW_LINE> <DEDENT> def update_avg_l(self): <NEW_LINE> <INDENT> return self.spec.update_avg_l(self) <NEW_LINE> <DEDENT> def update_logs(self): <NEW_LINE> <INDENT> for name in self.logs.keys(): <NEW_LINE> <INDENT> self.logs[name].append(getattr(self, name)) <NEW_LINE> <DEDENT> <DEDENT> def show_config(self): <NEW_LINE> <INDENT> print('Parameters:') <NEW_LINE> for name in ['dt_v_m', 'dt_net', 'g_l', 'g_bar_e', 'g_bar_l', 'g_bar_i', 'e_rev_e', 'e_rev_l', 'e_rev_i', 'act_thr', 'act_gain']: <NEW_LINE> <INDENT> print(' {}: {:.2f}'.format(name, getattr(self.spec, name))) <NEW_LINE> <DEDENT> print('State:') <NEW_LINE> for name in ['g_e', 'I_net', 'v_m', 'act', 'v_m_eq']: <NEW_LINE> <INDENT> print(' {}: {:.2f}'.format(name, getattr(self, name)))
Leabra Unit (as implemented in emergent 8.0)
6259903b50485f2cf55dc161
class ReadWriteMultipleRegistersRequest(): <NEW_LINE> <INDENT> function_code = 23 <NEW_LINE> _rtu_byte_count_pos = 10 <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.read_address = kwargs.get('read_address', 0x00) <NEW_LINE> self.read_count = kwargs.get('read_count', 0) <NEW_LINE> self.write_address = kwargs.get('write_address', 0x00) <NEW_LINE> self.write_registers = kwargs.get('write_registers', None) <NEW_LINE> if not hasattr(self.write_registers, '__iter__'): <NEW_LINE> <INDENT> self.write_registers = [self.write_registers] <NEW_LINE> <DEDENT> self.write_count = len(self.write_registers) <NEW_LINE> self.write_byte_count = self.write_count * 2 <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> result = struct.pack('>HHHHB', self.read_address, self.read_count, self.write_address, self.write_count, self.write_byte_count) <NEW_LINE> for register in self.write_registers: <NEW_LINE> <INDENT> result += struct.pack('>H', register) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def decode(self, data): <NEW_LINE> <INDENT> self.read_address, self.read_count, self.write_address, self.write_count, self.write_byte_count = struct.unpack('>HHHHB', data[:9]) <NEW_LINE> self.write_registers = [] <NEW_LINE> for i in range(9, self.write_byte_count + 9, 2): <NEW_LINE> <INDENT> register = struct.unpack('>H', data[i:i + 2])[0] <NEW_LINE> self.write_registers.append(register) <NEW_LINE> <DEDENT> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> if not (1 <= self.read_count <= 0x07d): <NEW_LINE> <INDENT> return self.doException(merror.IllegalValue) <NEW_LINE> <DEDENT> if not (1 <= self.write_count <= 0x079): <NEW_LINE> <INDENT> return self.doException(merror.IllegalValue) <NEW_LINE> <DEDENT> if (self.write_byte_count != self.write_count * 2): <NEW_LINE> <INDENT> return self.doException(merror.IllegalValue) <NEW_LINE> <DEDENT> if not context.validate(self.function_code, self.write_address, self.write_count): <NEW_LINE> <INDENT> return self.doException(merror.IllegalAddress) <NEW_LINE> <DEDENT> if not context.validate(self.function_code, self.read_address, self.read_count): <NEW_LINE> <INDENT> return self.doException(merror.IllegalAddress) <NEW_LINE> <DEDENT> context.setValues(self.function_code, self.write_address, self.write_registers) <NEW_LINE> registers = context.getValues(self.function_code, self.read_address, self.read_count) <NEW_LINE> return ReadWriteMultipleRegistersResponse(registers) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> params = (self.read_address, self.read_count, self.write_address, self.write_count) <NEW_LINE> return "ReadWriteNRegisterRequest R(%d,%d) W(%d,%d)" % params
This function code performs a combination of one read operation and one write operation in a single MODBUS transaction. The write operation is performed before the read. Holding registers are addressed starting at zero. Therefore holding registers 1-16 are addressed in the PDU as 0-15. The request specifies the starting address and number of holding registers to be read as well as the starting address, number of holding registers, and the data to be written. The byte count specifies the number of bytes to follow in the write data field."
6259903bdc8b845886d54795
class RenderXHMTLTests(TestCase): <NEW_LINE> <INDENT> def mkdtemp(self): <NEW_LINE> <INDENT> tempDir = FilePath(self.mktemp()) <NEW_LINE> if not tempDir.exists(): <NEW_LINE> <INDENT> tempDir.makedirs() <NEW_LINE> <DEDENT> return tempDir <NEW_LINE> <DEDENT> def test_renderXHTML(self): <NEW_LINE> <INDENT> def mockCSS2XSLFO(xhtmlPath, xslfoPath): <NEW_LINE> <INDENT> return succeed(None) <NEW_LINE> <DEDENT> def mockFop(xslfoPath, pdfPath, configFile=None): <NEW_LINE> <INDENT> return succeed('pdf') <NEW_LINE> <DEDENT> def cb(pdfData): <NEW_LINE> <INDENT> self.assertIdentical(str, type(pdfData)) <NEW_LINE> self.assertEquals('pdf', pdfData) <NEW_LINE> self.assertFalse(tempDir.exists()) <NEW_LINE> <DEDENT> tempDir = self.mkdtemp() <NEW_LINE> d = renderXHTML( 'markup', tempDir=tempDir, css2xslfo=mockCSS2XSLFO, fop=mockFop) <NEW_LINE> d.addCallback(cb) <NEW_LINE> return d <NEW_LINE> <DEDENT> def test_renderXHTMLCSS2XSLFOFails(self): <NEW_LINE> <INDENT> def mockCSS2XSLFO(xslfoPath, xhtmlPath): <NEW_LINE> <INDENT> return fail(RuntimeError(1)) <NEW_LINE> <DEDENT> def mockFop(xslfoPath, pdfPath, configFile=None): <NEW_LINE> <INDENT> self.fail('Never get here') <NEW_LINE> <DEDENT> def cb(e): <NEW_LINE> <INDENT> self.assertEquals('1', str(e)) <NEW_LINE> self.assertTrue(tempDir.exists()) <NEW_LINE> <DEDENT> tempDir = self.mkdtemp() <NEW_LINE> d = renderXHTML( 'markup', tempDir=tempDir, css2xslfo=mockCSS2XSLFO, fop=mockFop) <NEW_LINE> d = self.assertFailure(d, RuntimeError) <NEW_LINE> d.addCallback(cb) <NEW_LINE> return d <NEW_LINE> <DEDENT> def test_renderXHTMLFopFails(self): <NEW_LINE> <INDENT> def mockCSS2XSLFO(xhtmlPath, xslfoPath): <NEW_LINE> <INDENT> return succeed(None) <NEW_LINE> <DEDENT> def mockFop(xslfoPath, pdfPath, configFile=None): <NEW_LINE> <INDENT> return fail(RuntimeError(2)) <NEW_LINE> <DEDENT> def cb(e): <NEW_LINE> <INDENT> self.assertEquals('2', str(e)) <NEW_LINE> self.assertTrue(tempDir.exists()) <NEW_LINE> <DEDENT> tempDir = self.mkdtemp() <NEW_LINE> d = renderXHTML( 'markup', tempDir=tempDir, css2xslfo=mockCSS2XSLFO, fop=mockFop) <NEW_LINE> self.assertFailure(d, RuntimeError) <NEW_LINE> d.addCallback(cb) <NEW_LINE> return d
Tests for L{documint.util.renderXHTML}.
6259903b30c21e258be999ee
class ExExit(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit, line_range=None): <NEW_LINE> <INDENT> w = self.view.window() <NEW_LINE> if w.active_view().is_dirty(): <NEW_LINE> <INDENT> w.run_command('save') <NEW_LINE> <DEDENT> w.run_command('close') <NEW_LINE> if len(w.views()) == 0: <NEW_LINE> <INDENT> w.run_command('close')
Ex command(s): :x[it], :exi[t] Like :wq, but write only when changes have been made. TODO: Support ranges, like :w.
6259903b8e05c05ec3f6f74b
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = DiscreteDistribution() <NEW_LINE> for p in self.legalPositions: <NEW_LINE> <INDENT> self.beliefs[p] = 1.0 <NEW_LINE> <DEDENT> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observeUpdate(self, observation, gameState): <NEW_LINE> <INDENT> pacmanPos = gameState.getPacmanPosition() <NEW_LINE> jailPos = self.getJailPosition() <NEW_LINE> for key in self.beliefs.keys(): <NEW_LINE> <INDENT> self.beliefs[key] *= self.getObservationProb(observation, pacmanPos, key, jailPos) <NEW_LINE> <DEDENT> self.beliefs.normalize() <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> helper = dict() <NEW_LINE> for k in self.allPositions: <NEW_LINE> <INDENT> helper[k] = 0 <NEW_LINE> <DEDENT> for pos in self.allPositions: <NEW_LINE> <INDENT> positionDistribution = self.getPositionDistribution(gameState, pos) <NEW_LINE> for k in self.allPositions: <NEW_LINE> <INDENT> dst = positionDistribution[k] * self.beliefs[pos] <NEW_LINE> helper[k] += dst <NEW_LINE> <DEDENT> <DEDENT> for k in self.allPositions: <NEW_LINE> <INDENT> self.beliefs[k] = helper[k] <NEW_LINE> <DEDENT> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs
The exact dynamic inference module should use forward algorithm updates to compute the exact belief function at each time step.
6259903bd164cc6175822155
class TransferTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> create_database() <NEW_LINE> <DEDENT> def test_NC_005816(self): <NEW_LINE> <INDENT> self.trans("GenBank/NC_005816.gb", "gb") <NEW_LINE> <DEDENT> def test_NC_000932(self): <NEW_LINE> <INDENT> self.trans("GenBank/NC_000932.gb", "gb") <NEW_LINE> <DEDENT> def test_NT_019265(self): <NEW_LINE> <INDENT> self.trans("GenBank/NT_019265.gb", "gb") <NEW_LINE> <DEDENT> def test_protein_refseq2(self): <NEW_LINE> <INDENT> self.trans("GenBank/protein_refseq2.gb", "gb") <NEW_LINE> <DEDENT> def test_no_ref(self): <NEW_LINE> <INDENT> self.trans("GenBank/noref.gb", "gb") <NEW_LINE> <DEDENT> def test_one_of(self): <NEW_LINE> <INDENT> self.trans("GenBank/one_of.gb", "gb") <NEW_LINE> <DEDENT> def test_cor6_6(self): <NEW_LINE> <INDENT> self.trans("GenBank/cor6_6.gb", "gb") <NEW_LINE> <DEDENT> def test_arab1(self): <NEW_LINE> <INDENT> self.trans("GenBank/arab1.gb", "gb") <NEW_LINE> <DEDENT> def trans(self, filename, format): <NEW_LINE> <INDENT> original_records = list(SeqIO.parse(open(filename, "rU"), format)) <NEW_LINE> server = BioSeqDatabase.open_database(driver = DBDRIVER, user = DBUSER, passwd = DBPASSWD, host = DBHOST, db = TESTDB) <NEW_LINE> db_name = "test_trans1_%s" % filename <NEW_LINE> db = server.new_database(db_name) <NEW_LINE> count = db.load(original_records) <NEW_LINE> self.assertEqual(count, len(original_records)) <NEW_LINE> server.commit() <NEW_LINE> biosql_records = [db.lookup(name=rec.name) for rec in original_records] <NEW_LINE> self.assertTrue(compare_records(original_records, biosql_records)) <NEW_LINE> db_name = "test_trans2_%s" % filename <NEW_LINE> db = server.new_database(db_name) <NEW_LINE> count = db.load(biosql_records) <NEW_LINE> self.assertEqual(count, len(original_records)) <NEW_LINE> biosql_records2 = [db.lookup(name=rec.name) for rec in original_records] <NEW_LINE> self.assertTrue(compare_records(original_records, biosql_records2)) <NEW_LINE> server.close() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> destroy_database()
Test file -> BioSQL, BioSQL -> BioSQL.
6259903b8a349b6b43687424
class AaaPolicy(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required = [ "name"] <NEW_LINE> self.b_key = "aaa-policy" <NEW_LINE> self.a10_url="/axapi/v3/aam/aaa-policy/{name}/stats" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.aaa_rule_list = [] <NEW_LINE> self.stats = {} <NEW_LINE> self.name = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
Class Description:: Statistics for the object aaa-policy. Class aaa-policy supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` :param aaa_rule_list: {"minItems": 1, "items": {"type": "aaa-rule"}, "uniqueItems": true, "array": [{"required": ["index"], "properties": {}}], "type": "array", "$ref": "/axapi/v3/aam/aaa-policy/{name}/aaa-rule/{index}"} :param name: {"description": "Specify AAA policy name", "format": "string", "minLength": 1, "oid": "1001", "optional": false, "maxLength": 63, "type": "string"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/aam/aaa-policy/{name}/stats`.
6259903b73bcbd0ca4bcb46a
class Designation(models.Model): <NEW_LINE> <INDENT> department = models.OneToOneField(Department, on_delete=models.CASCADE) <NEW_LINE> name = models.CharField(max_length=120, null=True, blank=True)
Designation information
6259903b711fe17d825e158c
class SimplifiedStaticFeedbackMetaMixin(object): <NEW_LINE> <INDENT> model = models.StaticFeedback <NEW_LINE> resultfields = FieldSpec('id', 'grade', 'is_passing_grade', 'saved_by', 'save_timestamp', 'delivery', 'rendered_view', candidates=['delivery__deadline__assignment_group__candidates__identifier'], period=['delivery__deadline__assignment_group__parentnode__parentnode__id', 'delivery__deadline__assignment_group__parentnode__parentnode__short_name', 'delivery__deadline__assignment_group__parentnode__parentnode__long_name'], subject=['delivery__deadline__assignment_group__parentnode__parentnode__parentnode__id', 'delivery__deadline__assignment_group__parentnode__parentnode__parentnode__short_name', 'delivery__deadline__assignment_group__parentnode__parentnode__parentnode__long_name'], assignment=['delivery__deadline__assignment_group__parentnode__id', 'delivery__deadline__assignment_group__parentnode__short_name', 'delivery__deadline__assignment_group__parentnode__long_name'], assignment_group=['delivery__deadline__assignment_group', 'delivery__deadline__assignment_group__name'], delivery=['delivery__time_of_delivery', 'delivery__number', 'delivery__delivered_by']) <NEW_LINE> searchfields = FieldSpec('delivery__deadline__assignment_group__parentnode__parentnode__parentnode__short_name', 'delivery__deadline__assignment_group__parentnode__parentnode__parentnode__long_name', 'delivery__deadline__assignment_group__parentnode__parentnode__short_name', 'delivery__deadline__assignment_group__parentnode__parentnode__long_name', 'delivery__deadline__assignment_group__parentnode__short_name', 'delivery__deadline__assignment_group__parentnode__long_name', 'delivery__number') <NEW_LINE> filters = FilterSpecs(FilterSpec('id'), FilterSpec('delivery'), ForeignFilterSpec('delivery__deadline__assignment_group__parentnode__parentnode', FilterSpec('start_time'), FilterSpec('end_time')), )
Defines the django model to be used, resultfields returned by search and which fields can be used to search for a StaticFeedback object using the Simplified API
6259903b15baa72349463179
class AwsSupportRDSSecurityGroupsYellow(AwsSupportRDSSecurityGroups): <NEW_LINE> <INDENT> active = True <NEW_LINE> def __init__(self, app): <NEW_LINE> <INDENT> self.title = "RDS Security Groups: Ingress is restricted for flagged ports" <NEW_LINE> self.description = ( "Checks that there are no Security Groups associated with RDS Instances allowing inbound access from any " "address (IP/0) to a list of restricted ports including common database ports and FTP. Open access to the " "following ports is flagged: 20, 21, 22, 1433, 1434, 3306, 3389, 4333, 5432, 5500." ) <NEW_LINE> self.why_is_it_important = ( "If these ports are globally accessible, " "then it may be possible that an unauthorized person can gain access to the database, " "and the potentially sensitive data contained inside." ) <NEW_LINE> super(AwsSupportRDSSecurityGroupsYellow, self).__init__(app) <NEW_LINE> <DEDENT> def evaluate(self, event, item, whitelist=[]): <NEW_LINE> <INDENT> compliance_type = "COMPLIANT" <NEW_LINE> self.annotation = "" <NEW_LINE> if item["status"] == "warning": <NEW_LINE> <INDENT> compliance_type = "NON_COMPLIANT" <NEW_LINE> self.annotation = ( f'The RDS security group "{item["metadata"][1]}" in region "{item["metadata"][0]}" ' "has very permissive access to IP ranges and/or ports." ) <NEW_LINE> <DEDENT> return self.build_evaluation( item["resourceId"], compliance_type, event, self.resource_type, self.annotation, )
Subclass checking for port mismatch between the ELB and VPC.
6259903bd4950a0f3b111730
class SlimServerMessage(SlimMessage): <NEW_LINE> <INDENT> header_size = 6 <NEW_LINE> @classmethod <NEW_LINE> def name_from_data(cls, data): <NEW_LINE> <INDENT> return struct.unpack('! 4s', data[2:6])[0].decode('ascii').lower() <NEW_LINE> <DEDENT> def add_header(self, binarydata): <NEW_LINE> <INDENT> header = struct.pack('! H 4s', len(binarydata) + 4, bytearray(self.name.lower(), 'ascii')) <NEW_LINE> result = header + binarydata <NEW_LINE> return result
Messages sent from a server to the client. They hav a header of six bytes (H 4s) that contains the message-length of the body (including command name, excluding the length Word) and the command name command names are lowercase
6259903b23849d37ff85229a
class StagingConfig(Config): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> DATABASE_URL = os.getenv('DATABASE_TEST_URL')
Configurations for Staging.
6259903b66673b3332c315d8
class Usuario(models.Model): <NEW_LINE> <INDENT> TIPOS = ( ('A', 'Administrador'), ('U', 'Usuario' ), ('C', 'Cliente' ), ) <NEW_LINE> cedula = models.IntegerField(max_length=8, primary_key = True) <NEW_LINE> carnet = models.CharField(max_length=8, unique = True) <NEW_LINE> nombre = models.CharField(max_length=32) <NEW_LINE> fecha = models.DateTimeField('fecha de inscripcion') <NEW_LINE> tipo = models.CharField(max_length=1, choices=TIPOS) <NEW_LINE> saldo = models.DecimalField(max_digits=9, decimal_places=2) <NEW_LINE> def saldo_str(self): return str(self.saldo).rstrip('0').rstrip('.') <NEW_LINE> saldo_str.short_description = 'Saldo' <NEW_LINE> def aporte(self): <NEW_LINE> <INDENT> movs = MovimientoVentas.objects.filter(factura__usuario = self).exclude(tipo='D').exclude(tipo='R').aggregate(r=Sum('cantidad')); <NEW_LINE> return movs['r'] <NEW_LINE> <DEDENT> def aporte_str(self): return str(self.aporte()).rstrip('0').rstrip('.') <NEW_LINE> aporte_str.short_description = 'Aporte' <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.nombre <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['fecha']
Usuario que usa el sistema
6259903b94891a1f408b9fe8
class DevelopmentError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> assert isinstance(message, str), 'Invalid string message %s' % message <NEW_LINE> self.message = message <NEW_LINE> Exception.__init__(self, message)
Wraps exceptions that are related to wrong development usage.
6259903b63f4b57ef0086665
class TestConcentration(unittest.TestCase): <NEW_LINE> <INDENT> def test_perm3(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> quantity.Concentration(1.0, "m^-3") <NEW_LINE> self.fail('Allowed invalid unit type "m^-3".') <NEW_LINE> <DEDENT> except quantity.QuantityError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_molperm3(self): <NEW_LINE> <INDENT> q = quantity.Concentration(1.0, "mol/m^3") <NEW_LINE> self.assertAlmostEqual(q.value, 1.0, 6) <NEW_LINE> self.assertAlmostEqual(q.value_si, 1.0, delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "mol/m^3") <NEW_LINE> <DEDENT> def test_moleculesperm3(self): <NEW_LINE> <INDENT> q = quantity.Concentration(1.0, "molecules/m^3") <NEW_LINE> self.assertAlmostEqual(q.value, 1.0, 6) <NEW_LINE> self.assertAlmostEqual(q.value_si * constants.Na, 1.0, delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "molecules/m^3")
Contains unit tests of the Concentration unit type object.
6259903b3c8af77a43b6882d
class HttpError(ImmediateHttpResponse): <NEW_LINE> <INDENT> def __init__(self, status: Status, code_index: int=0, message: str=None, developer_message: str=None, meta: Any=None, headers: StringMap=None): <NEW_LINE> <INDENT> super().__init__( Error.from_status(status, code_index, message, developer_message, meta), status, headers )
An error response that should be returned immediately.
6259903b15baa7234946317a
class Genotype(tuple, Paranoid): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert self.run_paranoia_checks() <NEW_LINE> <DEDENT> def squeeze(self): <NEW_LINE> <INDENT> return str.join('', [str(_) for _ in tuple(self)]) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"[{str.join(', ', [repr(_) for _ in tuple(self)])}]" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return object.__repr__(self) <NEW_LINE> <DEDENT> def run_paranoia_checks(self) -> bool: <NEW_LINE> <INDENT> return super().run_paranoia_checks()
A tuple containing the organism’s actual genes (their values).
6259903bbe383301e02549fa
class MyPrettyPrinter(leoBeautify.PythonTokenBeautifier): <NEW_LINE> <INDENT> def __init__ (self,c): <NEW_LINE> <INDENT> leoBeautify.PythonTokenBeautifier.__init__(self,c) <NEW_LINE> self.tracing = False
An example subclass of Leo's PrettyPrinter class. Not all the base class methods are shown here: just the ones you are likely to want to override.
6259903b507cdc57c63a5f7e
class UpdateFlowRequest(BaseRequest): <NEW_LINE> <INDENT> def __init__(self, ts_connection, new_project_id=None, new_owner_id=None, is_certified_flag=None, certification_note=None): <NEW_LINE> <INDENT> super().__init__(ts_connection) <NEW_LINE> self._new_project_id = new_project_id <NEW_LINE> self._new_owner_id = new_owner_id <NEW_LINE> self._is_certified_flag = is_certified_flag <NEW_LINE> self._certification_note = certification_note <NEW_LINE> self.base_update_flow_request <NEW_LINE> <DEDENT> @property <NEW_LINE> def optional_project_param_keys(self): <NEW_LINE> <INDENT> return ['id'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def optional_owner_param_keys(self): <NEW_LINE> <INDENT> return ['id'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def optional_project_param_values(self): <NEW_LINE> <INDENT> return [self._new_project_id] <NEW_LINE> <DEDENT> @property <NEW_LINE> def optional_owner_param_values(self): <NEW_LINE> <INDENT> return [self._new_owner_id] <NEW_LINE> <DEDENT> @property <NEW_LINE> def base_update_flow_request(self): <NEW_LINE> <INDENT> self._request_body.update({'flow': {}}) <NEW_LINE> return self._request_body <NEW_LINE> <DEDENT> @property <NEW_LINE> def modified_update_flow_request(self): <NEW_LINE> <INDENT> if any(self.optional_project_param_keys): <NEW_LINE> <INDENT> self._request_body['flow'].update({'project': {}}) <NEW_LINE> self._request_body['flow']['project'].update( self._get_parameters_dict(self.optional_project_param_keys, self.optional_project_param_values)) <NEW_LINE> <DEDENT> if any(self.optional_owner_param_keys): <NEW_LINE> <INDENT> self._request_body['flow'].update({'owner': {}}) <NEW_LINE> self._request_body['flow']['owner'].update( self._get_parameters_dict(self.optional_owner_param_keys, self.optional_owner_param_values)) <NEW_LINE> <DEDENT> return self._request_body <NEW_LINE> <DEDENT> def get_request(self): <NEW_LINE> <INDENT> return self.modified_update_flow_request
Update flow request for generating API request URLs to Tableau Server. :param ts_connection: The Tableau Server connection object. :type ts_connection: class :param new_project_id: (Optional) The ID of the project to add the data source to. :type new_project_id: string :param new_owner_id: (Optional) The ID of the user who will own the flow. :type new_owner_id: string
6259903b30c21e258be999f1
class MemcacheKeygen(BaseKeyGenerator): <NEW_LINE> <INDENT> def __init__(self, memcache_client=None, counter_key=None, **kwargs): <NEW_LINE> <INDENT> super(MemcacheKeygen, self).__init__(**kwargs) <NEW_LINE> if counter_key is None: <NEW_LINE> <INDENT> raise ValueError('a counter key is required') <NEW_LINE> <DEDENT> self.counter_key = counter_key <NEW_LINE> self._mc = memcache_client <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> ckey = self.counter_key <NEW_LINE> start = self.start <NEW_LINE> mc = self._mc <NEW_LINE> lock = Lock() <NEW_LINE> lock.acquire() <NEW_LINE> current = mc.get(ckey) <NEW_LINE> if current is None: <NEW_LINE> <INDENT> mc.set(ckey, 0) <NEW_LINE> <DEDENT> lock.release() <NEW_LINE> while True: <NEW_LINE> <INDENT> i = int(mc.incr(ckey)) + start - 1 <NEW_LINE> yield self.encode(i)
Creates keys in-memory. Keys are always generated in increasing order.
6259903b287bf620b6272dce
class SortBigList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.input_file = None <NEW_LINE> self.output_file = None <NEW_LINE> <DEDENT> def set_input_data(self, file_path_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not isinstance(file_path_name, str): <NEW_LINE> <INDENT> raise InvalidInput <NEW_LINE> <DEDENT> elif not os.path.isfile(file_path_name): <NEW_LINE> <INDENT> raise InvalidValue <NEW_LINE> <DEDENT> elif '.csv' not in file_path_name: <NEW_LINE> <INDENT> raise InvalidExtension <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.input_file = file_path_name <NEW_LINE> return 'OK' <NEW_LINE> <DEDENT> <DEDENT> except InvalidValue: <NEW_LINE> <INDENT> return 'File does not exist in path' <NEW_LINE> <DEDENT> except InvalidInput: <NEW_LINE> <INDENT> return 'File name must be a string' <NEW_LINE> <DEDENT> except InvalidExtension: <NEW_LINE> <INDENT> return 'File name must end with csv' <NEW_LINE> <DEDENT> <DEDENT> def set_output_data(self, file_path_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not isinstance(file_path_name, str): <NEW_LINE> <INDENT> raise InvalidInput <NEW_LINE> <DEDENT> elif '.csv' not in file_path_name: <NEW_LINE> <INDENT> raise InvalidExtension <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output_file = file_path_name <NEW_LINE> return 'OK' <NEW_LINE> <DEDENT> <DEDENT> except InvalidInput: <NEW_LINE> <INDENT> return 'File name must be a string' <NEW_LINE> <DEDENT> except InvalidExtension: <NEW_LINE> <INDENT> return 'File name must end with csv' <NEW_LINE> <DEDENT> <DEDENT> def execute_merge_sort(self, clean_list): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not isinstance(clean_list, list): <NEW_LINE> <INDENT> big_list = open(self.input_file, "r") <NEW_LINE> all_text = big_list.read() <NEW_LINE> clean_list = list(ast.literal_eval(all_text)) <NEW_LINE> <DEDENT> if len(clean_list) > 1: <NEW_LINE> <INDENT> half = len(clean_list)//2 <NEW_LINE> left = clean_list[:half] <NEW_LINE> right = clean_list[half:] <NEW_LINE> self.execute_merge_sort(left) <NEW_LINE> self.execute_merge_sort(right) <NEW_LINE> i = 0 <NEW_LINE> j = 0 <NEW_LINE> k = 0 <NEW_LINE> while i < len(left) and j < len(right): <NEW_LINE> <INDENT> if left[i] < right[j]: <NEW_LINE> <INDENT> clean_list[k] = left[i] <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> clean_list[k] = right[j] <NEW_LINE> j += 1 <NEW_LINE> <DEDENT> k += 1 <NEW_LINE> <DEDENT> while i < len(left): <NEW_LINE> <INDENT> clean_list[k] = left[i] <NEW_LINE> i += 1 <NEW_LINE> k += 1 <NEW_LINE> <DEDENT> while j < len(right): <NEW_LINE> <INDENT> clean_list[k] = right[j] <NEW_LINE> j += 1 <NEW_LINE> k += 1 <NEW_LINE> <DEDENT> <DEDENT> final_file = open(self.output_file, 'w') <NEW_LINE> final_file.write(str(clean_list)) <NEW_LINE> final_file.close() <NEW_LINE> return 'Output file created' <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return 'File must contain numbers separated by commas only'
This class should be used to sort large amount of data from csv files. The class has two properties: input_file and output_file which are strings with the information to be used to read from a file and write to a file. It also has a merge sort to sort the values read from a file as long as the file is properly formatted
6259903b6e29344779b01836
class Registry(object): <NEW_LINE> <INDENT> __metaclass__ = DeclarativeMetaclass <NEW_LINE> def __init__(self, model): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.has_admin = self._has_admin() <NEW_LINE> self.fields['model'] = self.model <NEW_LINE> self.fields['verbose_name'] = unicode(self.model._meta.verbose_name) <NEW_LINE> self.fields['verbose_name_plural'] = unicode(self.model._meta.verbose_name_plural) <NEW_LINE> if 'url' not in self.fields: <NEW_LINE> <INDENT> self.fields['url'] = self._get_url() <NEW_LINE> <DEDENT> if 'icon' not in self.fields: <NEW_LINE> <INDENT> self.fields['icon'] = self._get_icon() <NEW_LINE> <DEDENT> if 'packages' in self.fields: <NEW_LINE> <INDENT> if not isinstance(self.fields['packages'], tuple): <NEW_LINE> <INDENT> raise FieldError(_('Registry field packages must be of type tuple')) <NEW_LINE> <DEDENT> self._test_packages(self.fields['packages']) <NEW_LINE> <DEDENT> <DEDENT> def _test_packages(self, packages): <NEW_LINE> <INDENT> for package in packages: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> __import__(package) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> exception = 'Plugin dependency package `%(package)s` could not be imported. ' 'Try `pip install %(package)s`?' <NEW_LINE> raise PackageImportError(_(exception) % { 'package': package, }) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _get_icon(self): <NEW_LINE> <INDENT> default_plugin_url = 'plugin-media' <NEW_LINE> return '/%s/%s/images/icon.png' % ( default_plugin_url, unicode(self.model._meta.verbose_name_plural.lower()) ) <NEW_LINE> <DEDENT> def _get_url(self): <NEW_LINE> <INDENT> url = { 'add': '', 'search': '', 'list': '', } <NEW_LINE> admin_add = 'admin:%s_%s_add' % ( self.model._meta.app_label, self.model._meta.module_name, ) <NEW_LINE> admin_index = 'admin:%s_%s_changelist' % ( self.model._meta.app_label, self.model._meta.module_name, ) <NEW_LINE> if self.has_admin: <NEW_LINE> <INDENT> url.update({ 'add': self._reverse(admin_add), 'search': self._reverse(admin_index), 'list': self._reverse(admin_index) }) <NEW_LINE> <DEDENT> return url <NEW_LINE> <DEDENT> def _has_admin(self): <NEW_LINE> <INDENT> for model in admin_site._registry.keys(): <NEW_LINE> <INDENT> if self.model is model: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def _reverse(self, namespace): <NEW_LINE> <INDENT> return lazy_reverse(namespace)
Base registry class for core and plugin applications Example Registry: from tendenci.core.registry import site from tendenci.core.registry.base import CoreRegistry, lazy_reverse from tendenci.addons.models import ExmpleModel class ExampleRegistry(CoreRegistry): version = '1.0' author = 'Glen Zangirolami' author_email = '[email protected]' description = 'Create staff pages easily with photo, position, bio and more ..' url = { 'add': lazy_reverse('page.add'), 'search': lazy_reverse('page.search'), } site.register(ExampleModel, ExampleRegistry)
6259903b30dc7b76659a0a15
class template_element(object): <NEW_LINE> <INDENT> def __init__(self,title="__Nonefined__",image_url="__Nonefined__",subtitle="__Nonefined__",action="__Nonefined__",buttons=[]): <NEW_LINE> <INDENT> if title != "_Nonefined__": <NEW_LINE> <INDENT> self.title = title <NEW_LINE> <DEDENT> if image_url != "_Nonefined__": <NEW_LINE> <INDENT> self.image_url = image_url <NEW_LINE> <DEDENT> if subtitle != "_Nonefined__": <NEW_LINE> <INDENT> self.subtitle = subtitle <NEW_LINE> <DEDENT> if action != "_Nonefined__": <NEW_LINE> <INDENT> self.action = action <NEW_LINE> <DEDENT> self.buttons = buttons
Used to create elements inside templates
6259903bb57a9660fecd2c5f
class BezierCurve(object): <NEW_LINE> <INDENT> __slots__ = ('_control_points',) <NEW_LINE> def __init__(self, *control_points): <NEW_LINE> <INDENT> self._control_points = [] <NEW_LINE> for point in control_points: <NEW_LINE> <INDENT> self._control_points.append(ControlPoint(point)) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> from ControlPoint import ControlPoint <NEW_LINE> x = float(x) <NEW_LINE> x0 = self._control_points[0][0] <NEW_LINE> x1 = self._control_points[-1][0] <NEW_LINE> t = (x - x0) / (x1 - x0) <NEW_LINE> point = self._evaluate(self._control_points, t) <NEW_LINE> return point[0][1] <NEW_LINE> <DEDENT> def __contains__(self, x): <NEW_LINE> <INDENT> if self._control_points[0][0] <= x <= self._control_points[-1][0]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._control_points[-1][0] - self._control_points[0][0] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> P = [] <NEW_LINE> for p in self._control_points: <NEW_LINE> <INDENT> p = ', '.join(str(x) for x in p) <NEW_LINE> p = '({})'.format(p) <NEW_LINE> P.append(p) <NEW_LINE> <DEDENT> P = ', '.join(P) <NEW_LINE> return 'BezierCurve(' + P + ')' <NEW_LINE> <DEDENT> def _evaluate(self, control_points, t): <NEW_LINE> <INDENT> if len(control_points) > 1: <NEW_LINE> <INDENT> new_points = [] <NEW_LINE> for i in range(len(control_points)-1): <NEW_LINE> <INDENT> x0 = control_points[i][0] <NEW_LINE> x1 = control_points[i+1][0] <NEW_LINE> y0 = control_points[i][1] <NEW_LINE> y1 = control_points[i+1][1] <NEW_LINE> xt = x0 + t * (x1 - x0) <NEW_LINE> yt = y0 + t * (y1 - y0) <NEW_LINE> new_points.append([xt, yt]) <NEW_LINE> <DEDENT> return self._evaluate(new_points, t) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return control_points <NEW_LINE> <DEDENT> <DEDENT> def degree(self): <NEW_LINE> <INDENT> return len(self._control_points) - 1
An arbitrary degree two-dimensional Bezier curve. :: >>> b = BezierCurve((0, 0), (50, 100), (100, 0)) >>> b BezierCurve((0, 0), (50, 100), (100, 0)) >>> len(b) 100 >>> x = 25 >>> x in b True >>> b(x) 37.5
6259903b66673b3332c315da
class AUCCallback(LoaderMetricCallback): <NEW_LINE> <INDENT> def __init__( self, input_key: str, target_key: str, compute_per_class_metrics: bool = SETTINGS.compute_per_class_metrics, prefix: str = None, suffix: str = None, ): <NEW_LINE> <INDENT> super().__init__( metric=AUCMetric( compute_per_class_metrics=compute_per_class_metrics, prefix=prefix, suffix=suffix, ), input_key=input_key, target_key=target_key, ) <NEW_LINE> <DEDENT> def on_experiment_start(self, runner: "IRunner") -> None: <NEW_LINE> <INDENT> assert ( not runner.engine.use_fp16 ), "AUCCallback could not work within amp training" <NEW_LINE> return super().on_experiment_start(runner)
ROC-AUC metric callback. Args: input_key: input key to use for auc calculation, specifies our ``y_true``. target_key: output key to use for auc calculation, specifies our ``y_pred``. compute_per_class_metrics: boolean flag to compute per-class metrics (default: SETTINGS.compute_per_class_metrics or False). prefix: metric prefix suffix: metric suffix Examples: .. code-block:: python import torch from torch.utils.data import DataLoader, TensorDataset from catalyst import dl # sample data num_samples, num_features, num_classes = int(1e4), int(1e1), 4 X = torch.rand(num_samples, num_features) y = (torch.rand(num_samples,) * num_classes).to(torch.int64) # pytorch loaders dataset = TensorDataset(X, y) loader = DataLoader(dataset, batch_size=32, num_workers=1) loaders = {"train": loader, "valid": loader} # model, criterion, optimizer, scheduler model = torch.nn.Linear(num_features, num_classes) criterion = torch.nn.CrossEntropyLoss() optimizer = torch.optim.Adam(model.parameters()) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, [2]) # model training runner = dl.SupervisedRunner( input_key="features", output_key="logits", target_key="targets", loss_key="loss" ) runner.train( model=model, criterion=criterion, optimizer=optimizer, scheduler=scheduler, loaders=loaders, logdir="./logdir", num_epochs=3, valid_loader="valid", valid_metric="accuracy03", minimize_valid_metric=False, verbose=True, callbacks=[ dl.AccuracyCallback( input_key="logits", target_key="targets", num_classes=num_classes ), dl.PrecisionRecallF1SupportCallback( input_key="logits", target_key="targets", num_classes=num_classes ), dl.AUCCallback(input_key="logits", target_key="targets"), ], ) .. note:: Please follow the `minimal examples`_ sections for more use cases. .. _`minimal examples`: https://github.com/catalyst-team/catalyst#minimal-examples # noqa: E501, W505
6259903b76d4e153a661db65
class BioDataFilter(DataFilter): <NEW_LINE> <INDENT> sample = filters.ModelChoiceFilter(field_name='entity', queryset=Sample.objects.all())
Filter the data endpoint. Enable filtering data by the sample. .. IMPORTANT:: :class:`DataViewSet` must be patched before using it in urls to enable this feature: .. code:: python DataViewSet.filter_class = BioDataFilter
6259903b004d5f362081f8d6
class PostThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(PostThread, self).__init__() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> if tieba_queue.empty(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("tieba_queue 队列大小:", end="") <NEW_LINE> print(str(tieba_queue.qsize())) <NEW_LINE> ever_page_url = tieba_queue.get() <NEW_LINE> post_url_list = crawler.post_url(ever_page_url) <NEW_LINE> for url in post_url_list: <NEW_LINE> <INDENT> post_queue.put(url) <NEW_LINE> <DEDENT> <DEDENT> time.sleep(random.uniform(0.8, 1.5))
从 tieba_queue 队列取贴吧每一页的 url, 调用 crawler 的 post_url 方法,将返回的该页下的所有帖子的 url 放进 post_queue
6259903b26068e7796d4db2b
class CameraCoords(AstrometryBase): <NEW_LINE> <INDENT> camera = None <NEW_LINE> allow_multiple_chips = False <NEW_LINE> @cached <NEW_LINE> def get_chipName(self): <NEW_LINE> <INDENT> xPupil, yPupil = (self.column_by_name('x_pupil'), self.column_by_name('y_pupil')) <NEW_LINE> return chipNameFromPupilCoords(xPupil, yPupil, camera=self.camera, allow_multiple_chips=self.allow_multiple_chips) <NEW_LINE> <DEDENT> @compound('xPix', 'yPix') <NEW_LINE> def get_pixelCoordinates(self): <NEW_LINE> <INDENT> if not self.camera: <NEW_LINE> <INDENT> raise RuntimeError("No camera defined. Cannot calculate pixel coordinates") <NEW_LINE> <DEDENT> chipNameList = self.column_by_name('chipName') <NEW_LINE> xPupil, yPupil = (self.column_by_name('x_pupil'), self.column_by_name('y_pupil')) <NEW_LINE> return pixelCoordsFromPupilCoords(xPupil, yPupil, chipName=chipNameList, camera=self.camera) <NEW_LINE> <DEDENT> @compound('xFocalPlane', 'yFocalPlane') <NEW_LINE> def get_focalPlaneCoordinates(self): <NEW_LINE> <INDENT> xPupil, yPupil = (self.column_by_name('x_pupil'), self.column_by_name('y_pupil')) <NEW_LINE> return focalPlaneCoordsFromPupilCoords(xPupil, yPupil, camera=self.camera)
Methods for getting coordinates from the camera object
6259903b26238365f5fadd3a
class RbacRuntimeError(RuntimeError): <NEW_LINE> <INDENT> pass
An error has ocurred at runtime.
6259903b8da39b475be043d3
class Preference(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> user = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> title = models.CharField(max_length=80) <NEW_LINE> description = models.CharField(max_length=150) <NEW_LINE> active = models.BooleanField(default=True) <NEW_LINE> @classmethod <NEW_LINE> def create(cls, user, title, description, active): <NEW_LINE> <INDENT> preference = cls(user=user, title=title, description=description, active=active).save() <NEW_LINE> return preference
This class models a notification with 4 fields. id - The ID to track the preference object. user -- A one-to-one field that is a user. title -- The title of the preference. description -- The description of the preference. active -- Whether the preference is active or not.
6259903bb57a9660fecd2c60
class Canvas(BaseCanvas): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> BrushSets.inherit(type(self)) <NEW_LINE> self._extended_context = None <NEW_LINE> <DEDENT> def _on_draw(self, ctx): <NEW_LINE> <INDENT> if self._extended_context is None: <NEW_LINE> <INDENT> self._extended_context = ExtendedContext(ctx, self) <NEW_LINE> <DEDENT> return self.on_draw(self._extended_context) <NEW_LINE> <DEDENT> def on_draw(self, ctx): <NEW_LINE> <INDENT> raise NotImplementedError("on_draw method not implemented in subclass.") <NEW_LINE> <DEDENT> def draw_grid(ctx, x = 50, y = 50): <NEW_LINE> <INDENT> draw_grid(ctx, x, y) <NEW_LINE> <DEDENT> @brush <NEW_LINE> def write_text(cr, x, y, text, align = TextAlign.TOP_LEFT): <NEW_LINE> <INDENT> return write_text(cr, x, y, text, align)
X11 Canvas object. This class is meant to be used as a superclass and should not be instantiated directly. Subclasses should implement the :func:`on_draw` callback, which is invoked every time the canvas needs to be redrawn. Redraws happen at regular intervals in time, as specified by the ``interval`` attribute (also passed as an argument via the constructor).
6259903b596a897236128e84
@py2_iterable <NEW_LINE> class Streamer(object): <NEW_LINE> <INDENT> def __init__(self, stream): <NEW_LINE> <INDENT> if isinstance(stream, str) or isinstance(stream, basestring): <NEW_LINE> <INDENT> stream = io.StringIO(u'{}'.format(stream)) <NEW_LINE> <DEDENT> self.stream = stream <NEW_LINE> self._peek = self.stream.read(1) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> char = self._peek <NEW_LINE> self._peek = self.stream.read(1) <NEW_LINE> if self.stream.closed: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if char == '': <NEW_LINE> <INDENT> self.stream.close() <NEW_LINE> raise StopIteration <NEW_LINE> <DEDENT> return char <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> return self._peek <NEW_LINE> <DEDENT> def isclosed(self): <NEW_LINE> <INDENT> return self.stream.closed
Wraps an io.StringIO and iterates a byte at a time, instead of a line at a time
6259903b507cdc57c63a5f80
class JSONWebTokenSerializerCustomer(Serializer): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(JSONWebTokenSerializerCustomer, self).__init__(*args, **kwargs) <NEW_LINE> self.fields[self.username_field] = serializers.CharField() <NEW_LINE> self.fields['password'] = PasswordField(write_only=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def username_field(self): <NEW_LINE> <INDENT> return get_username_field() <NEW_LINE> <DEDENT> def validate(self, attrs): <NEW_LINE> <INDENT> credentials = { self.username_field: attrs.get(self.username_field), 'password': attrs.get('password') } <NEW_LINE> if all(credentials.values()): <NEW_LINE> <INDENT> user = authenticate(**credentials) <NEW_LINE> if user: <NEW_LINE> <INDENT> groups = user.groups.all() <NEW_LINE> custGroup = [] <NEW_LINE> if groups: <NEW_LINE> <INDENT> custGroup = [x for x in groups if x.name == "Customer"] <NEW_LINE> <DEDENT> if len(custGroup) == 0: <NEW_LINE> <INDENT> msg = _('Unable to login if user is not customer.') <NEW_LINE> raise serializers.ValidationError(msg) <NEW_LINE> <DEDENT> <DEDENT> if user: <NEW_LINE> <INDENT> if not user.is_active: <NEW_LINE> <INDENT> msg = _('User account is disabled.') <NEW_LINE> raise serializers.ValidationError(msg) <NEW_LINE> <DEDENT> payload = jwt_payload_handler(user) <NEW_LINE> return { 'token': jwt_encode_handler(payload), 'user': user } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = _('Unable to login with provided credentials.') <NEW_LINE> raise serializers.ValidationError(msg) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> msg = _('Must include "{username_field}" and "password".') <NEW_LINE> msg = msg.format(username_field=self.username_field) <NEW_LINE> raise serializers.ValidationError(msg)
Serializer class used to validate a username and password. 'username' is identified by the custom UserModel.USERNAME_FIELD. Returns a JSON Web Token that can be used to authenticate later calls.
6259903bac7a0e7691f736ce
class Checker(object): <NEW_LINE> <INDENT> executables = [] <NEW_LINE> extensions = [] <NEW_LINE> mimetypes = []
A base class for checkers
6259903b379a373c97d9a20e
class NameGender(): <NEW_LINE> <INDENT> def __init__(self, male_file_name, female_file_name): <NEW_LINE> <INDENT> self.males = self._load_dict(male_file_name) <NEW_LINE> self.females = self._load_dict(female_file_name) <NEW_LINE> <DEDENT> def _load_dict(self, file_name): <NEW_LINE> <INDENT> data = dict() <NEW_LINE> for line in open(file_name): <NEW_LINE> <INDENT> t = line.strip().split("\t") <NEW_LINE> if len(t) == 2: <NEW_LINE> <INDENT> name = t[0].lower() <NEW_LINE> score = float(t[1]) <NEW_LINE> data[name] = score <NEW_LINE> <DEDENT> <DEDENT> return data <NEW_LINE> <DEDENT> def _get_raw_male_score(self, name): <NEW_LINE> <INDENT> return self.males.get(name, -1.0) <NEW_LINE> <DEDENT> def _get_raw_female_score(self, name): <NEW_LINE> <INDENT> return self.females.get(name, -1.0) <NEW_LINE> <DEDENT> def get_gender_scores(self, name): <NEW_LINE> <INDENT> m = self._get_raw_male_score(name.lower()) <NEW_LINE> f = self._get_raw_female_score(name.lower()) <NEW_LINE> if m > 0 and f < 0: <NEW_LINE> <INDENT> return (1,0) <NEW_LINE> <DEDENT> elif m < 0 and f > 0: <NEW_LINE> <INDENT> return (0,1) <NEW_LINE> <DEDENT> elif m > 0 and f > 0: <NEW_LINE> <INDENT> tot = m + f <NEW_LINE> return (m/tot,f/tot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (-1,-1)
This class helps to handle two dictionaries which contain the gender association scores of names. Provide the gender scores and call get_gender_scores to get normalized scores for a name for both genders.
6259903ba4f1c619b294f77a
class ConcreteA: <NEW_LINE> <INDENT> value = 'A'
A concrete prototype
6259903bcad5886f8bdc596f
@keras_export('keras.metrics.Poisson') <NEW_LINE> class Poisson(MeanMetricWrapper): <NEW_LINE> <INDENT> def __init__(self, name='poisson', dtype=None): <NEW_LINE> <INDENT> super(Poisson, self).__init__(poisson, name, dtype=dtype)
Computes the Poisson metric between `y_true` and `y_pred`. `metric = y_pred - y_true * log(y_pred)` Args: name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. Standalone usage: >>> m = tf.keras.metrics.Poisson() >>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]]) >>> m.result().numpy() 0.49999997 >>> m.reset_states() >>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]], ... sample_weight=[1, 0]) >>> m.result().numpy() 0.99999994 Usage with `compile()` API: ```python model.compile(optimizer='sgd', loss='mse', metrics=[tf.keras.metrics.Poisson()]) ```
6259903bd164cc617582215b
class Connection: <NEW_LINE> <INDENT> def __init__(self, address, listener): <NEW_LINE> <INDENT> if not callable(listener): <NEW_LINE> <INDENT> raise TypeError("listener is not a callable. {} passed".format(type(listener))) <NEW_LINE> <DEDENT> self._address = address <NEW_LINE> self._socket = socket.socket(type=socket.SOCK_DGRAM) <NEW_LINE> self._socket.connect(address) <NEW_LINE> self._socket.setblocking(False) <NEW_LINE> self._selector = selectors.DefaultSelector() <NEW_LINE> self._selector.register(self._socket, selectors.EVENT_READ, listener) <NEW_LINE> self._selector_select = self._selector.select <NEW_LINE> self._closed = False <NEW_LINE> self.send = self._socket.send <NEW_LINE> <DEDENT> @property <NEW_LINE> def socket(self): <NEW_LINE> <INDENT> return self._socket <NEW_LINE> <DEDENT> def set_listener(self, listener): <NEW_LINE> <INDENT> if not callable(listener): <NEW_LINE> <INDENT> raise TypeError("listener is not a callable. {} passed".format(type(listener))) <NEW_LINE> <DEDENT> self._selector.modify(self._socket, selectors.EVENT_READ, listener) <NEW_LINE> <DEDENT> def tick(self): <NEW_LINE> <INDENT> if self._closed: return <NEW_LINE> events = self._selector_select(0) <NEW_LINE> for key, mask in events: <NEW_LINE> <INDENT> socket = key.fileobj <NEW_LINE> data = socket.recv(1024) <NEW_LINE> key.data(data, socket) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self._closed: return <NEW_LINE> self._selector.unregister(self._socket) <NEW_LINE> self._socket.close() <NEW_LINE> self._closed = True <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.close() <NEW_LINE> self._selector.close()
Async UDP connection to a server. If you want to receive data from server say first a "hello" to the server. Once you have done with the server call :meth:`close`. Parameters: address ((host, port)): listener (callable): It uses the next signature listener(data, socket). Returns: An instance of this class. Raises: TypeError: If listener is not a callable.
6259903b91af0d3eaad3b01a
class ToyModel(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def sample_real(n=1, mean_top=[0, 2], std_top=0.2, weights_top=[0.5, 0.5], std_middle=0.2, std_bottom=0.2, weights_bottom=[0.5, 0.5]): <NEW_LINE> <INDENT> assert(sum(weights_top) == 1) <NEW_LINE> assert(sum(weights_bottom) == 1) <NEW_LINE> mean_x1 = ones(n) * mean_top[0] <NEW_LINE> mean_x1[rand(n) < weights_top[0]] = mean_top[1] <NEW_LINE> x1 = mean_x1 + randn(n) * std_top <NEW_LINE> x2 = x1 + randn(n) * std_middle <NEW_LINE> x3 = x1 + randn(n) * std_middle <NEW_LINE> mean_x4 = (x2 + x3) * 0.5 <NEW_LINE> mean_x4[rand(n) < weights_bottom[0]] = 0 <NEW_LINE> x4 = mean_x4 + randn(n) * std_bottom <NEW_LINE> mean_x5 = deepcopy(x3) <NEW_LINE> mean_x5[rand(n) < weights_bottom[0]] = 0 <NEW_LINE> x5 = mean_x5 + randn(n) * std_bottom <NEW_LINE> return {1:x1, 2:x2, 3:x3, 4:x4, 5:x5} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_moralised_graph(): <NEW_LINE> <INDENT> graph = {} <NEW_LINE> graph[1] = [2, 3] <NEW_LINE> graph[2] = [1, 3, 4] <NEW_LINE> graph[3] = [1, 2, 4, 5] <NEW_LINE> graph[4] = [2, 3] <NEW_LINE> graph[5] = [3] <NEW_LINE> return graph <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def extract_edges(observations): <NEW_LINE> <INDENT> edges = Set() <NEW_LINE> graph = ToyModel.get_moralised_graph() <NEW_LINE> for node in graph.keys(): <NEW_LINE> <INDENT> for neighbour in graph[node]: <NEW_LINE> <INDENT> if node not in observations: <NEW_LINE> <INDENT> edges.add((node, neighbour)) <NEW_LINE> <DEDENT> if neighbour not in observations: <NEW_LINE> <INDENT> edges.add((neighbour, node)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return list(edges)
Defines a graphical model p(x1)p(x2|x1)p(x3|x1)p(x4|x2,x3)p(x5|x3) where the individual distributions are defined via various sample methods
6259903b73bcbd0ca4bcb46f
class LaMetricNotificationService(BaseNotificationService): <NEW_LINE> <INDENT> def __init__(self, hasslametricmanager, icon, display_time): <NEW_LINE> <INDENT> self.hasslametricmanager = hasslametricmanager <NEW_LINE> self._icon = icon <NEW_LINE> self._display_time = display_time <NEW_LINE> <DEDENT> def send_message(self, message="", **kwargs): <NEW_LINE> <INDENT> from lmnotify import SimpleFrame, Sound, Model <NEW_LINE> targets = kwargs.get(ATTR_TARGET) <NEW_LINE> data = kwargs.get(ATTR_DATA) <NEW_LINE> _LOGGER.debug("Targets/Data: %s/%s", targets, data) <NEW_LINE> icon = self._icon <NEW_LINE> sound = None <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> if "icon" in data: <NEW_LINE> <INDENT> icon = data["icon"] <NEW_LINE> <DEDENT> if "sound" in data: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sound = Sound(category="notifications", sound_id=data["sound"]) <NEW_LINE> _LOGGER.debug("Adding notification sound %s", data["sound"]) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> _LOGGER.error("Sound ID %s unknown, ignoring", data["sound"]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> text_frame = SimpleFrame(icon, message) <NEW_LINE> _LOGGER.debug("Icon/Message/Duration: %s, %s, %d", icon, message, self._display_time) <NEW_LINE> frames = [text_frame] <NEW_LINE> if sound is not None: <NEW_LINE> <INDENT> frames.append(sound) <NEW_LINE> <DEDENT> _LOGGER.debug(frames) <NEW_LINE> model = Model(frames=frames) <NEW_LINE> lmn = self.hasslametricmanager.manager() <NEW_LINE> devices = lmn.get_devices() <NEW_LINE> for dev in devices: <NEW_LINE> <INDENT> if (targets is None) or (dev["name"] in targets): <NEW_LINE> <INDENT> lmn.set_device(dev) <NEW_LINE> lmn.send_notification(model, lifetime=self._display_time) <NEW_LINE> _LOGGER.debug("Sent notification to LaMetric %s", dev["name"])
Implement the notification service for LaMetric.
6259903b07d97122c4217e84
class DihedralSliceLayer(nn.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, input_layer): <NEW_LINE> <INDENT> super(DihedralSliceLayer, self).__init__(input_layer) <NEW_LINE> <DEDENT> def get_output_shape_for(self, input_shape): <NEW_LINE> <INDENT> return (8 * input_shape[0],) + input_shape[1:] <NEW_LINE> <DEDENT> def get_output_for(self, input, *args, **kwargs): <NEW_LINE> <INDENT> return nn.utils.concatenate([ array_tf_0(input), array_tf_90(input), array_tf_180(input), array_tf_270(input), array_tf_0f(input), array_tf_90f(input), array_tf_180f(input), array_tf_270f(input), ], axis=0)
This layer stacks rotations of 0, 90, 180, and 270 degrees of the input, as well as their horizontal flips, along the batch dimension. If the input has shape (batch_size, num_channels, r, c), then the output will have shape (8 * batch_size, num_channels, r, c). Note that the stacking happens on axis 0, so a reshape to (8, batch_size, num_channels, r, c) will separate the slice axis.
6259903bd4950a0f3b111733
class CmdInventory(MuxCommand): <NEW_LINE> <INDENT> key = "inventory" <NEW_LINE> aliases = ["inv", "i"] <NEW_LINE> locks = "cmd:all()" <NEW_LINE> arg_regex = r"$" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> if not self.caller.contents: <NEW_LINE> <INDENT> self.caller.msg("You are not carrying or wearing anything.") <NEW_LINE> return <NEW_LINE> <DEDENT> items = self.caller.contents <NEW_LINE> carry_table = evtable.EvTable(border="header") <NEW_LINE> wear_table = evtable.EvTable(border="header") <NEW_LINE> for item in items: <NEW_LINE> <INDENT> if not item.db.worn: <NEW_LINE> <INDENT> carry_table.add_row("|C%s|n" % item.name, item.db.desc or "") <NEW_LINE> <DEDENT> <DEDENT> if carry_table.nrows == 0: <NEW_LINE> <INDENT> carry_table.add_row("|CNothing.|n", "") <NEW_LINE> <DEDENT> string = "|wYou are carrying:\n%s" % carry_table <NEW_LINE> for item in items: <NEW_LINE> <INDENT> if item.db.worn: <NEW_LINE> <INDENT> wear_table.add_row("|C%s|n" % item.name, item.db.desc or "") <NEW_LINE> <DEDENT> <DEDENT> if wear_table.nrows == 0: <NEW_LINE> <INDENT> wear_table.add_row("|CNothing.|n", "") <NEW_LINE> <DEDENT> string += "|/|wYou are wearing:\n%s" % wear_table <NEW_LINE> self.caller.msg(string)
view inventory Usage: inventory inv Shows your inventory.
6259903b10dbd63aa1c71dbc
class FluxProfile(object): <NEW_LINE> <INDENT> def __init__(self, x_edges, x, counts, background, exposure, mask=None): <NEW_LINE> <INDENT> import pandas as pd <NEW_LINE> x_edges = np.asanyarray(x_edges) <NEW_LINE> x = np.asanyarray(x) <NEW_LINE> counts = np.asanyarray(counts) <NEW_LINE> background = np.asanyarray(background) <NEW_LINE> exposure = np.asanyarray(exposure) <NEW_LINE> mask = np.asanyarray(mask) <NEW_LINE> self.shape = x.shape <NEW_LINE> d = pd.DataFrame(index=np.arange(x.size)) <NEW_LINE> d['x'] = x.flat <NEW_LINE> d['label'] = np.digitize(x.flat, x_edges) - 1 <NEW_LINE> d['counts'] = counts.flat <NEW_LINE> d['background'] = background.flat <NEW_LINE> d['exposure'] = exposure.flat <NEW_LINE> if mask: <NEW_LINE> <INDENT> d['mask'] = mask.flat <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d['mask'] = np.ones_like(d['x']) <NEW_LINE> <DEDENT> self.data = d <NEW_LINE> self.bins = np.arange(len(x_edges) + 1) <NEW_LINE> p = pd.DataFrame(index=np.arange(x_edges.size - 1)) <NEW_LINE> p['x_lo'] = x_edges[:-1] <NEW_LINE> p['x_hi'] = x_edges[1:] <NEW_LINE> p['x_center'] = 0.5 * (p['x_hi'] + p['x_lo']) <NEW_LINE> p['x_width'] = p['x_hi'] - p['x_lo'] <NEW_LINE> self.profile = p <NEW_LINE> self.x_edges = x_edges <NEW_LINE> <DEDENT> def compute(self, method='sum_first'): <NEW_LINE> <INDENT> d = self.data <NEW_LINE> g = d.groupby('label') <NEW_LINE> p = self.profile <NEW_LINE> p['n_entries'] = g['x'].aggregate(len) <NEW_LINE> for name in ['counts', 'background', 'exposure']: <NEW_LINE> <INDENT> p['{0}'.format(name)] = g[name].sum() <NEW_LINE> <DEDENT> p['excess'] = p['counts'] - p['background'] <NEW_LINE> p['flux'] = p['excess'] / p['exposure'] <NEW_LINE> <DEDENT> def plot(self, which='n_entries', xlabel='Distance (deg)', ylabel=None): <NEW_LINE> <INDENT> import matplotlib.pyplot as plt <NEW_LINE> if ylabel == None: <NEW_LINE> <INDENT> ylabel = which <NEW_LINE> <DEDENT> p = self.profile <NEW_LINE> x = p['x_center'] <NEW_LINE> xerr = 0.5 * p['x_width'] <NEW_LINE> y = p[which] <NEW_LINE> plt.errorbar(x, y, xerr=xerr, fmt='o'); <NEW_LINE> plt.xlabel(xlabel) <NEW_LINE> plt.ylabel(ylabel) <NEW_LINE> plt.grid() <NEW_LINE> <DEDENT> def save(self, filename): <NEW_LINE> <INDENT> pass
Compute flux profiles
6259903b8c3a8732951f773e
class ClassAttributeModificationWarning(type): <NEW_LINE> <INDENT> def __setattr__(cls, attr, value): <NEW_LINE> <INDENT> logger.warning('You are modifying class attribute of \'{}\' class. You better know what you are doing!' .format(cls.__name__)) <NEW_LINE> logger.debug(pformat(format_stack())) <NEW_LINE> super().__setattr__(attr, value)
Meta class that logs warnings when class's attributes are overridden.
6259903b63b5f9789fe86354
class Row(TypeEngine): <NEW_LINE> <INDENT> @property <NEW_LINE> def python_type(self) -> Optional[Type[Any]]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _compiler_dispatch(cls, _visitor: Visitable, **_kw: Any) -> str: <NEW_LINE> <INDENT> return "ROW"
A type for rows.
6259903b379a373c97d9a210
class DependencyMFVI(nn.Module): <NEW_LINE> <INDENT> def __init__(self, max_iter=3): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.max_iter = max_iter <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"{self.__class__.__name__}(max_iter={self.max_iter})" <NEW_LINE> <DEDENT> @torch.enable_grad() <NEW_LINE> def forward(self, scores, mask, target=None): <NEW_LINE> <INDENT> logits = self.mfvi(*scores, mask) <NEW_LINE> marginals = logits.softmax(-1) <NEW_LINE> if target is None: <NEW_LINE> <INDENT> return marginals <NEW_LINE> <DEDENT> loss = F.cross_entropy(logits[mask], target[mask]) <NEW_LINE> return loss, marginals <NEW_LINE> <DEDENT> def mfvi(self, s_arc, s_sib, mask): <NEW_LINE> <INDENT> batch_size, seq_len = mask.shape <NEW_LINE> ls, rs = torch.stack(torch.where(mask.new_ones(seq_len, seq_len))).view(-1, seq_len, seq_len).sort(0)[0] <NEW_LINE> mask = mask.index_fill(1, ls.new_tensor(0), 1) <NEW_LINE> mask = (mask.unsqueeze(-1) & mask.unsqueeze(-2)).permute(2, 1, 0) <NEW_LINE> mask2o = mask.unsqueeze(1) & mask.unsqueeze(2) <NEW_LINE> mask2o = mask2o & ls.unsqueeze(-1).ne(ls.new_tensor(range(seq_len))).unsqueeze(-1) <NEW_LINE> mask2o = mask2o & rs.unsqueeze(-1).ne(rs.new_tensor(range(seq_len))).unsqueeze(-1) <NEW_LINE> s_arc = s_arc.permute(2, 1, 0) <NEW_LINE> s_sib = s_sib.permute(2, 1, 3, 0) * mask2o <NEW_LINE> q = s_arc <NEW_LINE> for _ in range(self.max_iter): <NEW_LINE> <INDENT> q = q.softmax(0) <NEW_LINE> q = s_arc + (q.unsqueeze(1) * s_sib).sum(2) <NEW_LINE> <DEDENT> return q.permute(2, 1, 0)
Mean Field Variational Inference for approximately calculating marginals of dependency trees :cite:`wang-tu-2020-second`.
6259903b07d97122c4217e85
class DescribeLoadBalancersRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.EcmRegion = None <NEW_LINE> self.LoadBalancerIds = None <NEW_LINE> self.LoadBalancerName = None <NEW_LINE> self.LoadBalancerVips = None <NEW_LINE> self.BackendPrivateIps = None <NEW_LINE> self.Offset = None <NEW_LINE> self.Limit = None <NEW_LINE> self.WithBackend = None <NEW_LINE> self.VpcId = None <NEW_LINE> self.Filters = None <NEW_LINE> self.SecurityGroup = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.EcmRegion = params.get("EcmRegion") <NEW_LINE> self.LoadBalancerIds = params.get("LoadBalancerIds") <NEW_LINE> self.LoadBalancerName = params.get("LoadBalancerName") <NEW_LINE> self.LoadBalancerVips = params.get("LoadBalancerVips") <NEW_LINE> self.BackendPrivateIps = params.get("BackendPrivateIps") <NEW_LINE> self.Offset = params.get("Offset") <NEW_LINE> self.Limit = params.get("Limit") <NEW_LINE> self.WithBackend = params.get("WithBackend") <NEW_LINE> self.VpcId = params.get("VpcId") <NEW_LINE> if params.get("Filters") is not None: <NEW_LINE> <INDENT> self.Filters = [] <NEW_LINE> for item in params.get("Filters"): <NEW_LINE> <INDENT> obj = Filter() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Filters.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.SecurityGroup = params.get("SecurityGroup") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
DescribeLoadBalancers请求参数结构体
6259903b6fece00bbacccb95
@attrs.define(kw_only=True) <NEW_LINE> class Record: <NEW_LINE> <INDENT> scores: typing.Optional[RecordScores] <NEW_LINE> categories_node_hash: undefined.UndefinedOr[int] <NEW_LINE> seals_node_hash: undefined.UndefinedOr[int] <NEW_LINE> state: typedefs.IntAnd[RecordState] <NEW_LINE> objectives: typing.Optional[list[Objective]] <NEW_LINE> interval_objectives: typing.Optional[list[Objective]] <NEW_LINE> redeemed_count: int <NEW_LINE> completion_times: typing.Optional[int] <NEW_LINE> reward_visibility: typing.Optional[list[bool]]
Represents a Bungie profile records/triumphs component.
6259903bd53ae8145f91964e
class Automi: <NEW_LINE> <INDENT> def __init__(self,ns="/automi/"): <NEW_LINE> <INDENT> self.ns=ns <NEW_LINE> self.joints=None <NEW_LINE> self.angles=None <NEW_LINE> self._sub_joints=rospy.Subscriber(ns+"joint_states",JointState,self._cb_joints,queue_size=1) <NEW_LINE> rospy.loginfo("Waiting for joints to be populated...") <NEW_LINE> while not rospy.is_shutdown(): <NEW_LINE> <INDENT> if self.joints is not None: break <NEW_LINE> rospy.sleep(0.1) <NEW_LINE> rospy.loginfo("Waiting for joints to be populated...") <NEW_LINE> <DEDENT> rospy.loginfo("Joints populated") <NEW_LINE> rospy.loginfo("Creating joint command publishers") <NEW_LINE> self._pub_joints={} <NEW_LINE> for j in self.joints: <NEW_LINE> <INDENT> p=rospy.Publisher(self.ns+j+"_position_controller/command",Float64) <NEW_LINE> self._pub_joints[j]=p <NEW_LINE> <DEDENT> rospy.sleep(1) <NEW_LINE> self._pub_cmd_vel=rospy.Publisher(ns+"cmd_vel",Twist) <NEW_LINE> <DEDENT> def set_walk_velocity(self,x,y,t): <NEW_LINE> <INDENT> msg=Twist() <NEW_LINE> msg.linear.x=x <NEW_LINE> msg.linear.y=y <NEW_LINE> msg.angular.z=t <NEW_LINE> self._pub_cmd_vel.publish(msg) <NEW_LINE> <DEDENT> def _cb_joints(self,msg): <NEW_LINE> <INDENT> if self.joints is None: <NEW_LINE> <INDENT> self.joints=msg.name <NEW_LINE> <DEDENT> self.angles=msg.position <NEW_LINE> <DEDENT> def get_angles(self): <NEW_LINE> <INDENT> if self.joints is None: return None <NEW_LINE> if self.angles is None: return None <NEW_LINE> return dict(zip(self.joints,self.angles)) <NEW_LINE> <DEDENT> def set_angles(self,angles): <NEW_LINE> <INDENT> for j,v in angles.items(): <NEW_LINE> <INDENT> if j not in self.joints: <NEW_LINE> <INDENT> rospy.logerror("Invalid joint name "+j) <NEW_LINE> continue <NEW_LINE> <DEDENT> self._pub_joints[j].publish(v) <NEW_LINE> <DEDENT> <DEDENT> def set_angles_slow(self,stop_angles,delay=2): <NEW_LINE> <INDENT> start_angles=self.get_angles() <NEW_LINE> start=time.time() <NEW_LINE> stop=start+delay <NEW_LINE> r=rospy.Rate(100) <NEW_LINE> while not rospy.is_shutdown(): <NEW_LINE> <INDENT> t=time.time() <NEW_LINE> if t>stop: break <NEW_LINE> ratio=(t-start)/delay <NEW_LINE> angles=interpolate(stop_angles,start_angles,ratio) <NEW_LINE> self.set_angles(angles) <NEW_LINE> r.sleep()
Client ROS class for manipulating Automi OP in Gazebo
6259903b26068e7796d4db2f
class CheckListBox(ListBox, WithCharEvents): <NEW_LINE> <INDENT> bind_mouse_leaving = bind_lclick_double = True <NEW_LINE> _wx_widget_type = _wx.CheckListBox <NEW_LINE> _native_widget: _wx.CheckListBox <NEW_LINE> def __init__(self, parent, choices=None, isSingle=False, isSort=False, isHScroll=False, isExtended=False, onSelect=None, ampersand=False): <NEW_LINE> <INDENT> if ampersand: choices = [x.replace(u'&', u'&&') for x in choices] <NEW_LINE> super(CheckListBox, self).__init__(parent, choices, isSingle, isSort, isHScroll, isExtended, onSelect) <NEW_LINE> self.on_box_checked = self._evt_handler(_wx.EVT_CHECKLISTBOX, lambda event: [event.GetSelection()]) <NEW_LINE> self.on_context = self._evt_handler(_wx.EVT_CONTEXT_MENU, lambda event: [self]) <NEW_LINE> <DEDENT> def lb_check_at_index(self, lb_selection_dex, do_check): <NEW_LINE> <INDENT> self._native_widget.Check(lb_selection_dex, do_check) <NEW_LINE> <DEDENT> def lb_is_checked_at_index(self, lb_selection_dex): <NEW_LINE> <INDENT> return self._native_widget.IsChecked(lb_selection_dex) <NEW_LINE> <DEDENT> def toggle_checked_at_index(self, lb_selection_dex): <NEW_LINE> <INDENT> do_check = not self.lb_is_checked_at_index(lb_selection_dex) <NEW_LINE> self.lb_check_at_index(lb_selection_dex, do_check) <NEW_LINE> <DEDENT> def set_all_checkmarks(self, checked): <NEW_LINE> <INDENT> with self.pause_drawing(): <NEW_LINE> <INDENT> for i in range(self.lb_get_items_count()): <NEW_LINE> <INDENT> self.lb_check_at_index(i, checked) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def set_all_items(self, keys_values): <NEW_LINE> <INDENT> with self.pause_drawing(): <NEW_LINE> <INDENT> self.lb_clear() <NEW_LINE> for i, (k, v) in enumerate(keys_values.items()): <NEW_LINE> <INDENT> self.lb_append(k) <NEW_LINE> self.lb_check_at_index(i, v) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def set_all_items_keep_pos(self, keys_values): <NEW_LINE> <INDENT> if not keys_values: <NEW_LINE> <INDENT> self.lb_clear() <NEW_LINE> return <NEW_LINE> <DEDENT> with self.pause_drawing(): <NEW_LINE> <INDENT> for index, (lab, ch) in enumerate(keys_values.items()): <NEW_LINE> <INDENT> lab = lab.replace('&', '&&') <NEW_LINE> if index >= self.lb_get_items_count(): <NEW_LINE> <INDENT> self.lb_append(lab) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.lb_set_label_at_index(index, lab) <NEW_LINE> <DEDENT> self.lb_check_at_index(index, ch) <NEW_LINE> <DEDENT> for index in range(self.lb_get_items_count(), len(keys_values), -1): <NEW_LINE> <INDENT> self.lb_delete_at_index(index - 1)
A list of checkboxes, of which one or more can be selected. Events: - on_box_checked(index: int): Posted when user checks an item from the list. The default arg processor extracts the index of the event. - on_context(lb_instance: CheckListBox): Posted when this CheckListBox is right-clicked. - Mouse events - see gui.base_components.WithMouseEvents. - Key events - see gui.base_components.WithCharEvents.
6259903b21bff66bcd723e51
class AbstractAsyncTaskResult(): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def done(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Definition of the AsyncTaskResult interface This represents a Future-like object. Custom implementations of WorkerPool interface need to return objects with this interface in `map` method.
6259903b8a43f66fc4bf3376
class MiddleItem(models.Model): <NEW_LINE> <INDENT> name = models.CharField(verbose_name='中項目', max_length=255) <NEW_LINE> parent = models.ForeignKey(LargeItem, verbose_name='大項目', on_delete=models.PROTECT) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
中項目
6259903b8e05c05ec3f6f74f
class TestCertificatesV1beta1Api(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = kubernetes.client.apis.certificates_v1beta1_api.CertificatesV1beta1Api() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_create_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete_collection_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_api_resources(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_list_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_patch_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_read_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_replace_certificate_signing_request(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_replace_certificate_signing_request_approval(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_replace_certificate_signing_request_status(self): <NEW_LINE> <INDENT> pass
CertificatesV1beta1Api unit test stubs
6259903b71ff763f4b5e8984
class SearchResult(object): <NEW_LINE> <INDENT> def __init__(self, filename, link, date, time, event, info): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.link = link <NEW_LINE> self.date = date <NEW_LINE> self.time = time <NEW_LINE> self.event = event <NEW_LINE> self.info = info
Search result -- a single utterance.
6259903b507cdc57c63a5f84
class Visual(object): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> self._transform = np.eye(4) <NEW_LINE> self._children = [] <NEW_LINE> self._parent = None <NEW_LINE> self.parent = parent <NEW_LINE> self.program = None <NEW_LINE> self._shaderTransforms = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def transform(self): <NEW_LINE> <INDENT> return self._transform <NEW_LINE> <DEDENT> @property <NEW_LINE> def children(self): <NEW_LINE> <INDENT> return [c for c in self._children] <NEW_LINE> <DEDENT> @property <NEW_LINE> def parent(self): <NEW_LINE> <INDENT> return self._parent <NEW_LINE> <DEDENT> @parent.setter <NEW_LINE> def parent(self, value): <NEW_LINE> <INDENT> oldparent = self.parent <NEW_LINE> oldlist = None <NEW_LINE> if oldparent is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif isinstance(oldparent, (Visual, World)): <NEW_LINE> <INDENT> oldlist = oldparent._children <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('This should not happen: old parent was not a Visual or World') <NEW_LINE> <DEDENT> newlist = None <NEW_LINE> if value is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif value is self: <NEW_LINE> <INDENT> raise ValueError('A visual cannot have itself as parent.') <NEW_LINE> <DEDENT> elif isinstance(value, (Visual, World)): <NEW_LINE> <INDENT> newlist = value._children <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Visual.parent must be a visual or world.') <NEW_LINE> <DEDENT> if oldlist is not None: <NEW_LINE> <INDENT> while self in oldlist: <NEW_LINE> <INDENT> oldlist.remove(self) <NEW_LINE> <DEDENT> <DEDENT> if newlist is not None: <NEW_LINE> <INDENT> if newlist is not oldlist: <NEW_LINE> <INDENT> while self in newlist: <NEW_LINE> <INDENT> newlist.remove(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._parent = value <NEW_LINE> if newlist is not None: <NEW_LINE> <INDENT> newlist.append(self) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self._children.__iter__() <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> if self.program is not None: <NEW_LINE> <INDENT> self.program.set_vars(self._shaderTransforms)
Base class to represent a citizen of a World object. Typically a visual is used to visualize something, although this is not strictly necessary. It may for instance also be used as a container to apply a certain transformation to a group of objects, or a camera object.
6259903b30dc7b76659a0a1b
class HashableShape_Deep(object): <NEW_LINE> <INDENT> def __init__(self, shape): <NEW_LINE> <INDENT> self.Shape = shape <NEW_LINE> self.hash = 0 <NEW_LINE> for el in shape.childShapes(): <NEW_LINE> <INDENT> self.hash = self.hash ^ el.hashCode() <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self.hash == other.hash: <NEW_LINE> <INDENT> if len(self.Shape.childShapes()) == len(other.Shape.childShapes()): <NEW_LINE> <INDENT> if self.Shape.ShapeType == other.Shape.ShapeType: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return self.hash
Similar to HashableShape, except that the things the shape is composed of are compared. Example: >>> wire2 = Part.Wire(wire1.childShapes()) >>> wire2.isSame(wire1) False # <--- the wire2 is a new wire, although made of edges of wire1 >>> HashableShape_Deep(wire2) == HashableShape_Deep(wire1) True # <--- made of same set of elements
6259903b379a373c97d9a212
class AlphaBetaPlayer(IsolationPlayer): <NEW_LINE> <INDENT> def get_move(self, game, time_left): <NEW_LINE> <INDENT> self.time_left = time_left <NEW_LINE> best_move = (-1, -1) <NEW_LINE> try: <NEW_LINE> <INDENT> depth = 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> best_move = self.alphabeta(game, depth) <NEW_LINE> depth += 1 <NEW_LINE> <DEDENT> <DEDENT> except SearchTimeout: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return best_move <NEW_LINE> <DEDENT> def alphabeta(self, game, depth, alpha=float("-inf"), beta=float("inf")): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> legal_moves = game.get_legal_moves() <NEW_LINE> if not legal_moves: <NEW_LINE> <INDENT> return (-1, -1) <NEW_LINE> <DEDENT> best_move = legal_moves[0] <NEW_LINE> score = float('-inf') <NEW_LINE> for move in legal_moves: <NEW_LINE> <INDENT> next_game = game.forecast_move(move) <NEW_LINE> v = self.min_value(next_game, depth - 1, alpha, beta) <NEW_LINE> if score < v: <NEW_LINE> <INDENT> score = v <NEW_LINE> best_move = move <NEW_LINE> <DEDENT> alpha = max(v, alpha) <NEW_LINE> <DEDENT> return best_move <NEW_LINE> <DEDENT> def max_value(self, game, depth, alpha, beta): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> if depth == 0 or len(game.get_legal_moves()) == 0: <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> v = float('-inf') <NEW_LINE> legal_moves = game.get_legal_moves() <NEW_LINE> for move in legal_moves: <NEW_LINE> <INDENT> next_game = game.forecast_move(move) <NEW_LINE> v = max(v, self.min_value(next_game, depth - 1, alpha, beta)) <NEW_LINE> if v >= beta: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> alpha = max(v, alpha) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> def min_value(self, game, depth, alpha, beta): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> if depth == 0 or len(game.get_legal_moves()) == 0: <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> v = float('inf') <NEW_LINE> legal_moves = game.get_legal_moves() <NEW_LINE> for move in legal_moves: <NEW_LINE> <INDENT> next_game = game.forecast_move(move) <NEW_LINE> v = min(v, self.max_value(next_game, depth - 1, alpha, beta)) <NEW_LINE> if v <= alpha: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> beta = min(v, beta) <NEW_LINE> <DEDENT> return v
Game-playing agent that chooses a move using iterative deepening minimax search with alpha-beta pruning. You must finish and test this player to make sure it returns a good move before the search time limit expires.
6259903b07d97122c4217e87
class Resizer(object): <NEW_LINE> <INDENT> def __call__(self, sample, min_side=512, max_side=512): <NEW_LINE> <INDENT> image, annots = sample['img'], sample['annot'] <NEW_LINE> rows, cols, cns = image.shape <NEW_LINE> smallest_side = min(rows, cols) <NEW_LINE> scale = min_side / smallest_side <NEW_LINE> scale1 = 512 / rows <NEW_LINE> scale2 = 512 / cols <NEW_LINE> largest_side = max(rows, cols) <NEW_LINE> if largest_side * scale > max_side: <NEW_LINE> <INDENT> scale = max_side / largest_side <NEW_LINE> <DEDENT> image = skimage.transform.resize(image, (512, 512)) <NEW_LINE> rows, cols, cns = image.shape <NEW_LINE> pad_w = 32 - rows%32 <NEW_LINE> pad_h = 32 - cols%32 <NEW_LINE> new_image = np.zeros((rows + pad_w, cols + pad_h, cns)).astype(np.float32) <NEW_LINE> new_image[:rows, :cols, :] = image.astype(np.float32) <NEW_LINE> image = image.astype(np.float32) <NEW_LINE> annots[:, 0] *= scale2 <NEW_LINE> annots[:, 2] *= scale2 <NEW_LINE> annots[:, 1] *= scale1 <NEW_LINE> annots[:, 3] *= scale1 <NEW_LINE> return {'img': torch.from_numpy(image), 'annot': torch.from_numpy(annots), 'scale1': scale1, 'scale2': scale2}
Convert ndarrays in sample to Tensors.
6259903b6fece00bbacccb97
class TestRoutes(unittest.TestCase): <NEW_LINE> <INDENT> def test_get_authors(self): <NEW_LINE> <INDENT> author = TestRoutesMock.get_authors_mock() <NEW_LINE> self.assertTrue(str(author["author"]["first"])) <NEW_LINE> self.assertTrue(str(author["author"]["formatted_name"])) <NEW_LINE> self.assertTrue(str(author["author"]["id"])) <NEW_LINE> self.assertTrue(str(author["author"]["last"]))
Unitest for Routes methods
6259903bd99f1b3c44d06890
class Job(): <NEW_LINE> <INDENT> def __init__(self, name="", start_time=0, end_time=0, status=0): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.status = status <NEW_LINE> <DEDENT> def get_start_time_obj(self): <NEW_LINE> <INDENT> return datetime.datetime.strptime(self.start_time, '%Y-%m-%d.%H:%M:%S') <NEW_LINE> <DEDENT> def get_end_time_obj(self): <NEW_LINE> <INDENT> return datetime.datetime.strptime(self.end_time, '%Y-%m-%d.%H:%M:%S')
A class definition to hold all the information about a job. name : string Name of the job. start_time : string When the job started end_time : string When the job finished status: number Status of the job. If it is 0 means success, all othe values means failure
6259903b8a43f66fc4bf3378
class LineError(ChatError): <NEW_LINE> <INDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return "{error}: {line}".format( error=self.args[0], line=self.args[1].decode('ascii').strip())
Error with the line. Usually means a parse or unknown command error.
6259903b94891a1f408b9fec
class Candidate(): <NEW_LINE> <INDENT> def __init__(self, locator='', score=0, match_addr='', x=None, y=None, wkid=4326, entity='', confidence='', **kwargs): <NEW_LINE> <INDENT> for k in locals().keys(): <NEW_LINE> <INDENT> if k not in ['self', 'kwargs']: setattr(self, k, locals()[k]) <NEW_LINE> <DEDENT> for k in kwargs: <NEW_LINE> <INDENT> setattr(self, k, kwargs[k])
Class representing a candidate address returned from geocoders. Accepts arguments defined below, plus informal keyword arguments. Arguments: ========== locator -- Locator used for geocoding (default '') score -- Standardized score (default 0) match_addr -- Address returned by geocoder (default '') x -- X-coordinate (longitude for lat-lon SRS) (default None) y -- Y-coordinate (latitude for lat-lon SRS) (default None) wkid -- Well-known ID for spatial reference system (default 4326) entity -- Used by Bing (default '') confidence -- Used by Bing (default '') geoservice -- GeocodeService used for geocoding (default '') Usage Example: ============== c = Candidate('US_RoofTop', 91.5, '340 N 12th St, Philadelphia, PA, 19107', '-75.16', '39.95', some_extra_data='yellow')
6259903b8da39b475be043d9
class BulkIndexError(EdenError): <NEW_LINE> <INDENT> def __init__(self, resource=None, errors=None): <NEW_LINE> <INDENT> super(BulkIndexError, self).__init__( 'Failed to bulk index resource {} errors: {}'.format( resource, errors), payload={})
Exception raised when bulk index operation fails..
6259903b73bcbd0ca4bcb474
class Flush(Statement): <NEW_LINE> <INDENT> match = re.compile(r'flush\b', re.I).match <NEW_LINE> def process_item(self): <NEW_LINE> <INDENT> line = self.item.get_line()[5:].lstrip() <NEW_LINE> if not line: <NEW_LINE> <INDENT> self.isvalid = False <NEW_LINE> return <NEW_LINE> <DEDENT> if line.startswith('('): <NEW_LINE> <INDENT> assert line[-1] == ')', repr(line) <NEW_LINE> self.specs = specs_split_comma(line[1:-1], self.item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.specs = specs_split_comma(line, self.item) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def tofortran(self, isfix=None): <NEW_LINE> <INDENT> tab = self.get_indent_tab(isfix=isfix) <NEW_LINE> return tab + 'flush (%s)' % (', '.join(self.specs)) <NEW_LINE> <DEDENT> def analyze(self): return
FLUSH <file-unit-number> FLUSH ( <flush-spec-list> ) <flush-spec> = [ UNIT = ] <file-unit-number> | IOSTAT = <scalar-int-variable> | IOMSG = <iomsg-variable> | ERR = <label>
6259903b596a897236128e8a
class TRFBD(Omni): <NEW_LINE> <INDENT> name = "TRFBD" <NEW_LINE> def __init__(self, **args): <NEW_LINE> <INDENT> Omni.__init__(self, **args) <NEW_LINE> if 'SpinCycle' in args: self.spincycle = self.spincycleMax = args['SpinCycle'] <NEW_LINE> else: self.spincycle = self.spincycleMax = 0 <NEW_LINE> <DEDENT> def Tick(self): <NEW_LINE> <INDENT> bReturn = Omni.Tick(self) <NEW_LINE> if self.weapons: <NEW_LINE> <INDENT> enemy, range = self.GetNearestEnemy() <NEW_LINE> if enemy is not None and range < self.spin_range: <NEW_LINE> <INDENT> if self.spincycleMax > 0: <NEW_LINE> <INDENT> self.spincycle -= 1 <NEW_LINE> if self.spincycle > self.spincycleMax * 0.5: <NEW_LINE> <INDENT> self.Input("Spin", 0, 100) <NEW_LINE> <DEDENT> if self.spincycleMax * 0.5 >= self.spincycle > 0: <NEW_LINE> <INDENT> self.Input("Spin", 0, -100) <NEW_LINE> <DEDENT> if self.spincycle <= 0: <NEW_LINE> <INDENT> self.spincycle = self.spincycleMax <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.Input("Spin", 0, 100) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.Input("Spin", 0, 0) <NEW_LINE> <DEDENT> <DEDENT> return bReturn <NEW_LINE> <DEDENT> def Throttle(self, throttle): <NEW_LINE> <INDENT> if self.bCarSteering and self.last_turn_throttle != 0: <NEW_LINE> <INDENT> speed = self.GetSpeed() <NEW_LINE> if speed > 0 and speed < self.top_speed / 3: throttle = self.last_throttle + 10 <NEW_LINE> elif speed < 0 and speed > -self.top_speed / 3: throttle = self.last_throttle - 10 <NEW_LINE> <DEDENT> throttle = min(max(throttle, -100), 100) <NEW_LINE> self.set_throttle = throttle <NEW_LINE> self.Input('Forward', 0, throttle) <NEW_LINE> self.DebugString(0, "Throttle = " + str(int(throttle)))
AI for torque reaction full body drums.
6259903b3c8af77a43b68831
class TestRemoveNumbersBadInput(TestCase): <NEW_LINE> <INDENT> def test_non_string_input(self): <NEW_LINE> <INDENT> self.assertRaises(ptext.InputError, ptext.remove_numbers, [])
tests for bad input to remove_numbers
6259903bbe383301e0254a02
class ReadThread(Thread): <NEW_LINE> <INDENT> def __init__(self, socket, name=None): <NEW_LINE> <INDENT> self.socket = socket <NEW_LINE> super(ReadThread, self).__init__() <NEW_LINE> self.endflag = False <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.setName(name) <NEW_LINE> <DEDENT> self.resync_count = 0 <NEW_LINE> self.read_sequence = [] <NEW_LINE> self.decodeerror = [] <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> mode = 0 <NEW_LINE> remaining = 0 <NEW_LINE> body = "" <NEW_LINE> while not self.endflag: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if mode == 0: <NEW_LINE> <INDENT> data = self.socket.recv(12) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> print("Connection closed by peer") <NEW_LINE> self.endflag = True <NEW_LINE> break <NEW_LINE> <DEDENT> cmd, magic, size = struct.unpack("<I4sI", data) <NEW_LINE> if magic != "FOSC": <NEW_LINE> <INDENT> print("**************** resync *************") <NEW_LINE> FoscDecoder.printhex(data) <NEW_LINE> mode = 2 <NEW_LINE> self.resync_count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.read_sequence.append(cmd) <NEW_LINE> body = "" <NEW_LINE> remaining = size <NEW_LINE> mode = 1 <NEW_LINE> <DEDENT> <DEDENT> elif mode == 1: <NEW_LINE> <INDENT> incoming = self.socket.recv(remaining) <NEW_LINE> body += incoming <NEW_LINE> remaining -= len(incoming) <NEW_LINE> print("remaining {}".format( remaining)) <NEW_LINE> if remaining == 0: <NEW_LINE> <INDENT> mode = 0 <NEW_LINE> self.proc(cmd, size, body) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> data = self.socket.recv(2000) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> mode = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> FoscDecoder.printhex(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except socket.timeout: <NEW_LINE> <INDENT> mode = 0 <NEW_LINE> print(self.name) <NEW_LINE> <DEDENT> except struct.error: <NEW_LINE> <INDENT> print("unpack error") <NEW_LINE> FoscDecoder.printhex(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def stopit(self): <NEW_LINE> <INDENT> self.endflag = True <NEW_LINE> <DEDENT> def proc(self, cmd, size, body): <NEW_LINE> <INDENT> print("Incoming cmd: %s, size %s" % (cmd, size)) <NEW_LINE> try: <NEW_LINE> <INDENT> decoder = FoscDecoder.decoder_call.get(cmd) <NEW_LINE> if decoder is None: <NEW_LINE> <INDENT> FoscDecoder.printhex(body) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> decoder(struct.pack("<I4sI", cmd, "FOSC", size) + body) <NEW_LINE> <DEDENT> <DEDENT> except BaseException as e: <NEW_LINE> <INDENT> msg = "cmd %s: %s" % (cmd, e.message) <NEW_LINE> self.decodeerror.append(msg) <NEW_LINE> print("** DECODE ERROR: {}".format(msg)) <NEW_LINE> <DEDENT> <DEDENT> def stats(self): <NEW_LINE> <INDENT> print("Sequence of incoming packets: {}".format(self.read_sequence)) <NEW_LINE> if self.resync_count > 0: <NEW_LINE> <INDENT> print("Fallen out of sync %s time(s)" % self.resync_count) <NEW_LINE> <DEDENT> if len(self.decodeerror) > 0: <NEW_LINE> <INDENT> print("%s error(s) during decoding:" % len(self.decodeerror)) <NEW_LINE> for msg in self.decodeerror: <NEW_LINE> <INDENT> print(msg)
We use a persistent tcp connection and blocking read from a socket with a timeout of 1 sec. The packets have the following structure: int32 command char4 "FOSC" int32 size data block with "size" bytes The integers are little endian.
6259903b5e10d32532ce41f8
class PSDConstraint(LeqConstraint): <NEW_LINE> <INDENT> OP_NAME = ">>" <NEW_LINE> def __init__(self, lh_exp, rh_exp): <NEW_LINE> <INDENT> if (lh_exp.size[0] != lh_exp.size[1]) or (rh_exp.size[0] != rh_exp.size[1]): <NEW_LINE> <INDENT> raise ValueError( "Non-square matrix in positive definite constraint." ) <NEW_LINE> <DEDENT> super(PSDConstraint, self).__init__(lh_exp, rh_exp) <NEW_LINE> <DEDENT> def is_dcp(self): <NEW_LINE> <INDENT> return self._expr.is_affine() <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> if self._expr.value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mat = self._expr.value <NEW_LINE> w, _ = np.linalg.eig(mat + mat.T) <NEW_LINE> return w.min()/2 >= -self.TOLERANCE <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def violation(self): <NEW_LINE> <INDENT> if self._expr.value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mat = self._expr.value <NEW_LINE> w, _ = np.linalg.eig(mat + mat.T) <NEW_LINE> return -min(w.min()/2, 0) <NEW_LINE> <DEDENT> <DEDENT> def canonicalize(self): <NEW_LINE> <INDENT> obj, constraints = self._expr.canonical_form <NEW_LINE> half = lu.create_const(0.5, (1,1)) <NEW_LINE> symm = lu.mul_expr(half, lu.sum_expr([obj, lu.transpose(obj)]), obj.size) <NEW_LINE> dual_holder = SDP(symm, enforce_sym=False, constr_id=self.id) <NEW_LINE> return (None, constraints + [dual_holder])
Constraint X >> Y that z.T(X - Y)z >= 0 for all z.
6259903b507cdc57c63a5f85
class CBCT1(CatPhan504Mixin, TestCase): <NEW_LINE> <INDENT> file_path = ['CBCT_1.zip'] <NEW_LINE> expected_roll = -0.53 <NEW_LINE> origin_slice = 32 <NEW_LINE> hu_values = {'Poly': -35, 'Acrylic': 130, 'Delrin': 347, 'Air': -996, 'Teflon': 1004, 'PMP': -186, 'LDPE': -94} <NEW_LINE> unif_values = {'Center': 13, 'Left': 17, 'Right': 5, 'Top': 10, 'Bottom': 9} <NEW_LINE> mtf_values = {30: 1.3, 50: 0.96, 80: 0.64} <NEW_LINE> avg_line_length = 49.9 <NEW_LINE> lowcon_visible = 1
A Varian CBCT dataset
6259903b63b5f9789fe86358
@six.add_metaclass(ABCMeta) <NEW_LINE> class BaseTowerContext(object): <NEW_LINE> <INDENT> def __init__(self, ns_name, vs_name=''): <NEW_LINE> <INDENT> self._name = ns_name <NEW_LINE> self._vs_name = vs_name <NEW_LINE> if len(vs_name): <NEW_LINE> <INDENT> assert len(ns_name), "TowerContext(vs_name) cannot be used with an empty name!" <NEW_LINE> <DEDENT> <DEDENT> @abstractproperty <NEW_LINE> def is_main_training_tower(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def has_own_variables(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def vs_name(self): <NEW_LINE> <INDENT> return self._vs_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def ns_name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def get_collection_in_tower(self, key): <NEW_LINE> <INDENT> return self._collection_guard.get_collection_in_tower(key) <NEW_LINE> <DEDENT> @call_only_once <NEW_LINE> def _get_scopes(self): <NEW_LINE> <INDENT> if not len(self._name): <NEW_LINE> <INDENT> return [tf.variable_scope(tf.get_variable_scope())] <NEW_LINE> <DEDENT> ret = [] <NEW_LINE> if len(self._vs_name): <NEW_LINE> <INDENT> ret.append(tf.variable_scope(self._vs_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret.append(tf.variable_scope(tf.get_variable_scope())) <NEW_LINE> <DEDENT> if len(self._name) and self._name != self._vs_name: <NEW_LINE> <INDENT> ret.append(tf.name_scope(self._name + '/')) <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _keys_to_freeze(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> global _CurrentTowerContext <NEW_LINE> assert _CurrentTowerContext is None, "Cannot nest TowerContext!" <NEW_LINE> _CurrentTowerContext = self <NEW_LINE> self._collection_guard = CollectionGuard( self._name, check_diff=not self.is_main_training_tower, freeze_keys=self._keys_to_freeze()) <NEW_LINE> self._ctxs = self._get_scopes() <NEW_LINE> self._ctxs.append(self._collection_guard) <NEW_LINE> for c in self._ctxs: <NEW_LINE> <INDENT> c.__enter__() <NEW_LINE> <DEDENT> ns = tf.get_default_graph().get_name_scope() <NEW_LINE> assert ns == self._name, "Name conflict: name_scope inside tower '{}' becomes '{}'!".format(self._name, ns) + " You may need a different name for the tower!" <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> global _CurrentTowerContext <NEW_LINE> _CurrentTowerContext = None <NEW_LINE> if not self.has_own_variables: <NEW_LINE> <INDENT> diff_trainable_vars = self._collection_guard.get_collection_in_tower(tf.GraphKeys.TRAINABLE_VARIABLES) <NEW_LINE> assert len(diff_trainable_vars) == 0, "New TRAINABLE_VARIABLES shouldn't be created in {}: ".format( self._name) + ', '.join([k.name for k in diff_trainable_vars]) <NEW_LINE> <DEDENT> for c in self._ctxs[::-1]: <NEW_LINE> <INDENT> c.__exit__(exc_type, exc_val, exc_tb) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "TowerContext(name={}, is_training={})".format( self._name, self._is_training)
A context where the current model is built in. Since TF 1.8, TensorFlow starts to introduce the same concept.
6259903bcad5886f8bdc5972
class ListrefsCommand(BaseCommand): <NEW_LINE> <INDENT> command_name = "ul_listrefs" <NEW_LINE> @inlineCallbacks <NEW_LINE> def run(self, protocol, parsed_line, invoker_dbref): <NEW_LINE> <INDENT> cmd_line = parsed_line.kwargs['cmd'].split() <NEW_LINE> class_choices = ['light', 'medium', 'heavy', 'assault'] <NEW_LINE> type_choices = ['mech', 'tank', 'vtol', 'battlesuit'] <NEW_LINE> pool_choices = ['human', 'ai', 'both'] <NEW_LINE> parser = BTMuxArgumentParser(protocol, invoker_dbref, prog="listrefs", description='Lists unit references.') <NEW_LINE> parser.add_argument( "--class", type=str, choices=class_choices, dest='filter_class', help="Mech weight class to filter by") <NEW_LINE> parser.add_argument( "--type", type=str, choices=type_choices, dest='filter_type', help="Unit type to filter by") <NEW_LINE> parser.add_argument( "--pool", type=str, choices=pool_choices, dest='filter_pool', default='human', help="Unit pool to filter by") <NEW_LINE> args = parser.parse_args(args=cmd_line) <NEW_LINE> try: <NEW_LINE> <INDENT> yield self.handle(protocol, invoker_dbref, args) <NEW_LINE> <DEDENT> except AssertionError as exc: <NEW_LINE> <INDENT> raise CommandError(exc.message) <NEW_LINE> <DEDENT> <DEDENT> @inlineCallbacks <NEW_LINE> def handle(self, protocol, invoker_dbref, args): <NEW_LINE> <INDENT> lib_summary = yield get_library_summary_list( pool=args.filter_pool, filter_class=args.filter_class, filter_type=args.filter_type) <NEW_LINE> pval = self._get_header_str('Unit Reference Listing : %d results' % len(lib_summary['refs'])) <NEW_LINE> pval += '%r%b%b' <NEW_LINE> for counter, udict in enumerate(lib_summary['refs'], start=1): <NEW_LINE> <INDENT> weight = udict['weight'] <NEW_LINE> class_color = get_weight_class_color_for_tonnage(weight) <NEW_LINE> pval += "[ljust({class_color}{reference}{ansi_normal}, 18)]".format( class_color=class_color, reference=udict['reference'], ansi_normal=ANSI_NORMAL, ) <NEW_LINE> if counter % 4 == 0: <NEW_LINE> <INDENT> pval += "%r%b%b" <NEW_LINE> <DEDENT> <DEDENT> pval += self._get_footer_str(pad_char='-') <NEW_LINE> pval += '%r[space(5)]' <NEW_LINE> pval += '[ljust(%ch%cgLight,20)]' <NEW_LINE> pval += '[ljust(%ch%cyMedium, 20)]' <NEW_LINE> pval += '[ljust(%ch%crHeavy, 20)]' <NEW_LINE> pval += '%ch%cmAssault' <NEW_LINE> pval += '%r[space(22)]%cnFor more info, type %ch%cglistrefs -h' <NEW_LINE> pval += self._get_footer_str() <NEW_LINE> mux_commands.pemit(protocol, [invoker_dbref], pval)
Lists unit refs.
6259903bd53ae8145f919652
class APIError(Exception): <NEW_LINE> <INDENT> pass
Summary
6259903bbaa26c4b54d50494
class imerge(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.enabled = True <NEW_LINE> self.checked = False <NEW_LINE> <DEDENT> def onClick(self): <NEW_LINE> <INDENT> pass
Implementation for WEBDHMV2_addin.imerge (Button)
6259903b07d97122c4217e8a
@python_2_unicode_compatible <NEW_LINE> class ZDProcess(models.Model): <NEW_LINE> <INDENT> create_time = models.DateTimeField('创建时间', auto_now=True) <NEW_LINE> file_name = models.CharField('请求子文件名', max_length=240, blank=True) <NEW_LINE> userid = models.IntegerField('用户的id', null=True, blank=True) <NEW_LINE> idno = models.CharField('身份证号', max_length=32, db_index=True) <NEW_LINE> real_name = models.CharField('真实姓名', max_length=32, blank=True) <NEW_LINE> rep_time = models.DateTimeField('响应创建时间', blank=True, null=True) <NEW_LINE> rep_result = models.CharField('响应结果代码', max_length=6, blank=True) <NEW_LINE> ispass_byidno = models.NullBooleanField('是否通过真实身份认证', blank=True, null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'data_zdprocess' <NEW_LINE> verbose_name = '中登处理流水' <NEW_LINE> verbose_name_plural = '中登处理流水' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{}'.format(self.create_time)
中登处理流水
6259903b73bcbd0ca4bcb476
class TagsDistributionPerCourse( TagsDistributionDownstreamMixin, EventLogSelectionMixin, MapReduceJobTask): <NEW_LINE> <INDENT> def output(self): <NEW_LINE> <INDENT> return get_target_from_url(self.output_root) <NEW_LINE> <DEDENT> def mapper(self, line): <NEW_LINE> <INDENT> value = self.get_event_and_date_string(line) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> event, _ = value <NEW_LINE> if event.get('event_type') != 'problem_check' or event.get('event_source') != 'server': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> timestamp = eventlog.get_event_time_string(event) <NEW_LINE> if timestamp is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> course_id = eventlog.get_course_id(event) <NEW_LINE> if not course_id: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> org_id = opaque_key_util.get_org_id_for_course(course_id) <NEW_LINE> event_data = eventlog.get_event_data(event) <NEW_LINE> if event_data is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> problem_id = event_data.get('problem_id') <NEW_LINE> if not problem_id: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> is_correct = event_data.get('success') == 'correct' <NEW_LINE> saved_tags = event.get('context').get('asides', {}).get('tagging_aside', {}).get('saved_tags', {}) <NEW_LINE> yield (course_id, org_id, problem_id), (timestamp, saved_tags, is_correct) <NEW_LINE> <DEDENT> def reducer(self, key, values): <NEW_LINE> <INDENT> course_id, org_id, problem_id = key <NEW_LINE> num_correct = 0 <NEW_LINE> num_total = 0 <NEW_LINE> latest_timestamp = None <NEW_LINE> latest_tags = None <NEW_LINE> for timestamp, saved_tags, is_correct in values: <NEW_LINE> <INDENT> if latest_timestamp is None or timestamp > latest_timestamp: <NEW_LINE> <INDENT> latest_timestamp = timestamp <NEW_LINE> latest_tags = saved_tags.copy() if saved_tags else None <NEW_LINE> <DEDENT> if is_correct: <NEW_LINE> <INDENT> num_correct += 1 <NEW_LINE> <DEDENT> num_total += 1 <NEW_LINE> <DEDENT> if not latest_tags: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for tag_key, tag_val in latest_tags.iteritems(): <NEW_LINE> <INDENT> tag_val_lst = [tag_val] if isinstance(tag_val, basestring) else tag_val <NEW_LINE> for val in tag_val_lst: <NEW_LINE> <INDENT> yield TagsDistributionRecord( course_id=course_id, org_id=org_id, module_id=problem_id, tag_name=tag_key, tag_value=val, total_submissions=num_total, correct_submissions=num_correct).to_string_tuple()
Calculates tags distribution.
6259903b1d351010ab8f4d08
class ChooseAddressView(chooser.ChooseView): <NEW_LINE> <INDENT> model = Address <NEW_LINE> form_class = AddressForm <NEW_LINE> results_template = 'wagtailaddresses/address-chooser/results.html' <NEW_LINE> chooser_template = 'wagtailaddresses/address-chooser/chooser.html' <NEW_LINE> chooser_javascript = 'wagtailaddresses/address-chooser/chooser.js'
Address choose view.
6259903b287bf620b6272dd7
class IntentReflectorHandler(AbstractRequestHandler): <NEW_LINE> <INDENT> def can_handle(self, handler_input): <NEW_LINE> <INDENT> return ask_utils.is_request_type("IntentRequest")(handler_input) <NEW_LINE> <DEDENT> def handle(self, handler_input): <NEW_LINE> <INDENT> intent_name = ask_utils.get_intent_name(handler_input) <NEW_LINE> speak_output = "You just triggered " + intent_name + "." <NEW_LINE> return ( handler_input.response_builder .speak(speak_output) .response )
The intent reflector is used for interaction model testing and debugging. It will simply repeat the intent the user said. You can create custom handlers for your intents by defining them above, then also adding them to the request handler chain below.
6259903b6e29344779b01840
class AddActivity(Form): <NEW_LINE> <INDENT> activity = StringField('Activity', [validators.DataRequired()])
creates form input field for adding activity
6259903b0fa83653e46f60c8
class NamespaceListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[NamespaceResource]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["NamespaceResource"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(NamespaceListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link
The response of the List Namespace operation. :param value: Result of the List Namespace operation. :type value: list[~azure.mgmt.eventhub.v2015_08_01.models.NamespaceResource] :param next_link: Link to the next set of results. Not empty if Value contains incomplete list of namespaces. :type next_link: str
6259903b23e79379d538d6ee
class Week(BaseModel): <NEW_LINE> <INDENT> year = IntegerField() <NEW_LINE> week_number = IntegerField() <NEW_LINE> minutes_to_work = IntegerField(default=0) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> indexes = ( (('year', 'week_number'), True), ) <NEW_LINE> order_by = ('year', 'week_number') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Week: {}/{}:{}h'.format(self.year, self.week_number, self.minutes_to_work)
Week model.
6259903bcad5886f8bdc5973
class MaxReadedDatetimeMixIn(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def max_readed_datetime(self): <NEW_LINE> <INDENT> return now() - timedelta(hours=MESSAGE_ARCHIVE_HOURS)
Mixin helper class for Contact and Archive manager
6259903b96565a6dacd2d882
class KaHrPayrollEmployeeHolidays(models.Model): <NEW_LINE> <INDENT> _inherit = 'hr.holidays' <NEW_LINE> tunjangan_holiday_id = fields.Many2one('ka_hr_payroll.tunjangan.holidays', string="Ref. Tunjangan", readonly=True) <NEW_LINE> @api.multi <NEW_LINE> def get_dinas_cost(self): <NEW_LINE> <INDENT> if self.holiday_status_help != 'dinas': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> dinas_cost = self.env['ka_hr_payroll.employee.dinas'].search([ ('holiday_id', '=', self.id), ], limit=1) <NEW_LINE> view_id = self.env.ref('ka_hr_payroll.view_employee_dinas_form').id <NEW_LINE> action = self.env.ref('ka_hr_payroll.action_employee_dinas') <NEW_LINE> result = action.read()[0] <NEW_LINE> result['views'] = [(view_id, 'form')] <NEW_LINE> result['view_id'] = view_id <NEW_LINE> result['domain'] = [('holiday_id', '=', self.id)] <NEW_LINE> result['context'] = { 'default_holiday_id': self.id, } <NEW_LINE> if dinas_cost: <NEW_LINE> <INDENT> result['res_id'] = dinas_cost.id <NEW_LINE> <DEDENT> return result
Data holidays of employee. _inherit = 'hr.holidays'
6259903b21bff66bcd723e57
class EncNet(Chain): <NEW_LINE> <INDENT> def __init__(self, dim, act=F.relu, device=None, ): <NEW_LINE> <INDENT> d_inp, d_out = dim <NEW_LINE> super(EncNet, self).__init__( linear=L.Linear(d_inp, d_out), bn=L.BatchNormalization(d_out, decay=0.9, use_gamma=False, use_beta=False), sb=L.Scale(W_shape=d_out, bias_term=True) ) <NEW_LINE> self.sigma = 0.3 <NEW_LINE> self.act = act <NEW_LINE> <DEDENT> def __call__(self, h, noise=False, test=False): <NEW_LINE> <INDENT> h = self.linear(h) <NEW_LINE> h = self.bn(h, test) <NEW_LINE> if noise: <NEW_LINE> <INDENT> h = h + self.generate_norm_noise(h) <NEW_LINE> <DEDENT> h = self.sb(h) <NEW_LINE> h = self.act(h) <NEW_LINE> return h <NEW_LINE> <DEDENT> def generate_norm_noise(self, h): <NEW_LINE> <INDENT> bs, d = h.shape <NEW_LINE> if self.device: <NEW_LINE> <INDENT> r = Variable( cuda.to_gpu(cp.random.randn(bs, d).astype(cp.float32), self.device)) <NEW_LINE> return r * self.sigma <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Variable(np.random.randn(bs, d).astype(np.float32)) * self.sigma
Encoder Component
6259903b23e79379d538d6ef
@implementer(ICheckinCheckoutReference) <NEW_LINE> @adapter(IIterateAware) <NEW_LINE> class CheckinCheckoutReferenceAdapter(object): <NEW_LINE> <INDENT> storage_key = "coci.references" <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> def checkout(self, baseline, wc, refs, storage): <NEW_LINE> <INDENT> for ref in refs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> wc.addReference( ref.targetUID, ref.relationship, referenceClass=ref.__class__ ) <NEW_LINE> <DEDENT> except ReferenceException: <NEW_LINE> <INDENT> logger.warn( "Reference exception when adding relation %r " "from new working copy %s to uid %s. Ignoring relation.", ref.relationship, "/".join(wc.getPhysicalPath()), ref.targetUID, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def checkin(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> checkoutBackReferences = checkinBackReferences = checkin
default adapter for references. on checkout forward refs on baseline are copied to wc backward refs on baseline are ignored on wc on checkin forward refs on wc are kept backwards refs on wc are kept forward refs on baseline get removed backward refs on baseline are kept by virtue of UID transferance
6259903b63b5f9789fe8635c
class ChoiceQuestion(Question): <NEW_LINE> <INDENT> accuracy = models.FloatField(default=0) <NEW_LINE> correct_count_daily = models.IntegerField(default=0) <NEW_LINE> correct_count_weekly = models.IntegerField(default=0) <NEW_LINE> CHOICE_CLASS = Choice <NEW_LINE> def countInc(self, correct=False, *args, **kwargs): <NEW_LINE> <INDENT> self.visit_count = F('visit_count') + 1 <NEW_LINE> self.visit_count_daily = F('visit_count_daily') + 1 <NEW_LINE> self.visit_count_weekly = F('visit_count_weekly') + 1 <NEW_LINE> if correct: <NEW_LINE> <INDENT> self.correct_count_daily = F('correct_count_daily') + 1 <NEW_LINE> self.correct_count_weekly = F('correct_count_weekly') + 1 <NEW_LINE> <DEDENT> <DEDENT> def countResetDaily(self): <NEW_LINE> <INDENT> self.visit_count_daily = 0 <NEW_LINE> self.correct_count_daily = 0 <NEW_LINE> <DEDENT> def countResetWeekly(self): <NEW_LINE> <INDENT> self.visit_count_daily = 0 <NEW_LINE> self.visit_count_weekly = 0 <NEW_LINE> self.correct_count_daily = 0 <NEW_LINE> self.correct_count_weekly = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def accuracy_daily(self): <NEW_LINE> <INDENT> if self.visit_count_daily: <NEW_LINE> <INDENT> return self.correct_count_daily / self.visit_count_daily <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def accuracy_weekly(self): <NEW_LINE> <INDENT> if self.visit_count_weekly: <NEW_LINE> <INDENT> return self.correct_count_weekly / self.visit_count_weekly <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def toSimpleJson(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'question_text': self.question_text, 'category': self.category, 'topic': self.topic, 'visit_count': self.visit_count, 'visit_count_daily': self.visit_count_daily, 'visit_count_weekly': self.visit_count_weekly, 'accuracy': self.accuracy, 'accuracy_daily': self.accuracy_daily, 'accuracy_weekly': self.accuracy_weekly, 'source': self.source, 'entry_date': self.entry_date.timestamp() * 1e3 } <NEW_LINE> <DEDENT> def toJson(self): <NEW_LINE> <INDENT> choices = self.CHOICE_CLASS.objects.filter(question=self) <NEW_LINE> choicesJson = [] <NEW_LINE> for choice in choices: <NEW_LINE> <INDENT> choicesJson.append(choice.toJson()) <NEW_LINE> <DEDENT> return { 'id': self.id, 'question_text': self.question_text, 'choices': choicesJson, 'answer': self.answer, 'resolution': self.resolution, 'category': self.category, 'topic': self.topic, 'visit_count': self.visit_count, 'visit_count_daily': self.visit_count_daily, 'visit_count_weekly': self.visit_count_weekly, 'accuracy': self.accuracy, 'accuracy_daily': self.accuracy_daily, 'accuracy_weekly': self.accuracy_weekly, 'source': self.source, 'entry_date': self.entry_date.timestamp() * 1e3 }
Base class for choice question
6259903b0fa83653e46f60ca
class ProductPageJsonSerializer: <NEW_LINE> <INDENT> def __init__(self, row, *args, **kwargs): <NEW_LINE> <INDENT> self._data = self.get_data(row) <NEW_LINE> <DEDENT> def get_data(self, row): <NEW_LINE> <INDENT> fields = row.get('fields', None) <NEW_LINE> fields_bak = row.get('fields_bak', None) <NEW_LINE> name = row.get('title') <NEW_LINE> _title = row.get('title') <NEW_LINE> slug = row['slug'] <NEW_LINE> _meta_title = row.get('meta_title', None) <NEW_LINE> _meta_keywords = row.get('meta_keywords', None) <NEW_LINE> _meta_description = row.get('meta_description', None) <NEW_LINE> _price = fields.get('price') <NEW_LINE> old_price = fields.get('old_price') <NEW_LINE> if _meta_title is None: <NEW_LINE> <INDENT> _meta_title = '' <NEW_LINE> <DEDENT> if _meta_keywords is None: <NEW_LINE> <INDENT> _meta_keywords = '' <NEW_LINE> <DEDENT> if _meta_description is None: <NEW_LINE> <INDENT> _meta_description = '' <NEW_LINE> <DEDENT> if _price is None: <NEW_LINE> <INDENT> _price = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _price = round(_price) <NEW_LINE> <DEDENT> if old_price is None: <NEW_LINE> <INDENT> old_price = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> old_price = round(old_price) <NEW_LINE> <DEDENT> is_in_stock = fields.get('available', False) <NEW_LINE> scoring = fields.get('score', 0) <NEW_LINE> return { 'name': name, '_title': _title, 'slug': slug, '_meta_title': _meta_title, '_meta_keywords': _meta_keywords, '_meta_description': _meta_description, '_price': _price, 'old_price': old_price, 'is_in_stock': is_in_stock, 'scoring': scoring } <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data
Костыльный класс для работы с json-"дампами" от прошлого программиста
6259903b07d97122c4217e8d
@Thenable.register <NEW_LINE> @python_2_unicode_compatible <NEW_LINE> class GroupResult(ResultSet): <NEW_LINE> <INDENT> id = None <NEW_LINE> results = None <NEW_LINE> def __init__(self, id=None, results=None, **kwargs): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> ResultSet.__init__(self, results, **kwargs) <NEW_LINE> <DEDENT> def save(self, backend=None): <NEW_LINE> <INDENT> return (backend or self.app.backend).save_group(self.id, self) <NEW_LINE> <DEDENT> def delete(self, backend=None): <NEW_LINE> <INDENT> (backend or self.app.backend).delete_group(self.id) <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return self.__class__, self.__reduce_args__() <NEW_LINE> <DEDENT> def __reduce_args__(self): <NEW_LINE> <INDENT> return self.id, self.results <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self.id or self.results) <NEW_LINE> <DEDENT> __nonzero__ = __bool__ <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, GroupResult): <NEW_LINE> <INDENT> return other.id == self.id and other.results == self.results <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> res = self.__eq__(other) <NEW_LINE> return True if res is NotImplemented else not res <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{0}: {1} [{2}]>'.format(type(self).__name__, self.id, ', '.join(r.id for r in self.results)) <NEW_LINE> <DEDENT> def as_tuple(self): <NEW_LINE> <INDENT> return self.id, [r.as_tuple() for r in self.results] <NEW_LINE> <DEDENT> @property <NEW_LINE> def children(self): <NEW_LINE> <INDENT> return self.results <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def restore(self, id, backend=None): <NEW_LINE> <INDENT> return ( backend or (self.app.backend if self.app else current_app.backend) ).restore_group(id)
Like :class:`ResultSet`, but with an associated id. This type is returned by :class:`~celery.group`. It enables inspection of the tasks state and return values as a single entity. :param id: The id of the group. :param results: List of result instances.
6259903b76d4e153a661db6b
class PyMappy(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://pypi.python.org/pypi/mappy" <NEW_LINE> url = "https://pypi.io/packages/source/m/mappy/mappy-2.2.tar.gz" <NEW_LINE> version('2.2', 'dfc2aefe98376124beb81ce7dcefeccb') <NEW_LINE> depends_on('zlib')
Mappy provides a convenient interface to minimap2.
6259903b50485f2cf55dc171
class InstructorInline(admin.TabularInline): <NEW_LINE> <INDENT> model = Instructor <NEW_LINE> extra = 1 <NEW_LINE> verbose_name_plural = 'instructors'
Inline administration descriptor for course instructors
6259903b91af0d3eaad3b024
class TestGenres(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.obj=Genres(type='Comedia', slug= 'comedia', description='A comédia é o uso de humor nas artes cênicas.' ) <NEW_LINE> <DEDENT> def test_create(self): <NEW_LINE> <INDENT> self.obj.save() <NEW_LINE> self.assertEqual(1, self.obj.id)
Testa o Model Genres
6259903b94891a1f408b9fef
class MiddlewareUsingCoro(ManagerTestCase): <NEW_LINE> <INDENT> def test_asyncdef(self): <NEW_LINE> <INDENT> resp = Response('http://example.com/index.html') <NEW_LINE> class CoroMiddleware: <NEW_LINE> <INDENT> async def process_request(self, request, spider): <NEW_LINE> <INDENT> await defer.succeed(42) <NEW_LINE> return resp <NEW_LINE> <DEDENT> <DEDENT> self.mwman._add_middleware(CoroMiddleware()) <NEW_LINE> req = Request('http://example.com/index.html') <NEW_LINE> download_func = mock.MagicMock() <NEW_LINE> dfd = self.mwman.download(download_func, req, self.spider) <NEW_LINE> results = [] <NEW_LINE> dfd.addBoth(results.append) <NEW_LINE> self._wait(dfd) <NEW_LINE> self.assertIs(results[0], resp) <NEW_LINE> self.assertFalse(download_func.called) <NEW_LINE> <DEDENT> @mark.only_asyncio() <NEW_LINE> def test_asyncdef_asyncio(self): <NEW_LINE> <INDENT> resp = Response('http://example.com/index.html') <NEW_LINE> class CoroMiddleware: <NEW_LINE> <INDENT> async def process_request(self, request, spider): <NEW_LINE> <INDENT> await asyncio.sleep(0.1) <NEW_LINE> result = await get_from_asyncio_queue(resp) <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> self.mwman._add_middleware(CoroMiddleware()) <NEW_LINE> req = Request('http://example.com/index.html') <NEW_LINE> download_func = mock.MagicMock() <NEW_LINE> dfd = self.mwman.download(download_func, req, self.spider) <NEW_LINE> results = [] <NEW_LINE> dfd.addBoth(results.append) <NEW_LINE> self._wait(dfd) <NEW_LINE> self.assertIs(results[0], resp) <NEW_LINE> self.assertFalse(download_func.called)
Middlewares using asyncio coroutines should work
6259903bd4950a0f3b111738
class BuyAndHoldFrameworkAlgorithm(QCAlgorithmFramework): <NEW_LINE> <INDENT> def Initialize(self): <NEW_LINE> <INDENT> self.SetStartDate(2019, 1, 1) <NEW_LINE> self.SetCash(100000) <NEW_LINE> tickers = ['FB', 'AMZN', 'NFLX', 'GOOG'] <NEW_LINE> objectiveFunction = 'std' <NEW_LINE> rebalancingParam = 365 <NEW_LINE> self.SetSecurityInitializer(self.CustomSecurityInitializer) <NEW_LINE> self.SetBrokerageModel(AlphaStreamsBrokerageModel()) <NEW_LINE> self.UniverseSettings.Resolution = Resolution.Daily <NEW_LINE> self.UniverseSettings.FillForward = False <NEW_LINE> allocationPlot = Chart('Optimal Allocation') <NEW_LINE> symbols = [] <NEW_LINE> for i in range(len(tickers)): <NEW_LINE> <INDENT> symbols.append(Symbol.Create(tickers[i], SecurityType.Equity, Market.USA)) <NEW_LINE> allocationPlot.AddSeries(Series(tickers[i], SeriesType.Line, '')) <NEW_LINE> <DEDENT> self.AddChart(allocationPlot) <NEW_LINE> self.SetUniverseSelection(ManualUniverseSelectionModel(symbols)) <NEW_LINE> self.SetAlpha(BuyAndHoldAlphaCreationModel()) <NEW_LINE> self.SetPortfolioConstruction(CustomOptimizationPortfolioConstructionModel(objectiveFunction = objectiveFunction, rebalancingParam = rebalancingParam)) <NEW_LINE> self.SetExecution(ImmediateExecutionModel()) <NEW_LINE> self.SetRiskManagement(NullRiskManagementModel()) <NEW_LINE> <DEDENT> def CustomSecurityInitializer(self, security): <NEW_LINE> <INDENT> security.SetDataNormalizationMode(DataNormalizationMode.Adjusted)
Trading Logic: This algorithm buys and holds the provided tickers from the start date until the end date Modules: Universe: Manual input of tickers Alpha: Constant creation of Up Insights every trading bar Portfolio: A choice between Equal Weighting, Maximize Portfolio Return, Minimize Portfolio Standard Deviation or Maximize Portfolio Sharpe Ratio - If some of the tickers did not exist at the start date, it will buy them when they first appeared in the market, in which case it will sell part of the existing securities in order to buy the new ones keeping an equally weighted portfolio - To rebalance the portfolio periodically to ensure optimal allocation, change the rebalancingParam below Execution: Immediate Execution with Market Orders Risk: Null
6259903bbe383301e0254a08
class FabricCodeVersionInfo(Model): <NEW_LINE> <INDENT> _attribute_map = { 'code_version': {'key': 'CodeVersion', 'type': 'str'}, } <NEW_LINE> def __init__(self, code_version=None): <NEW_LINE> <INDENT> self.code_version = code_version
Information about a Service Fabric code version. :param code_version: The product version of Service Fabric. :type code_version: str
6259903b0a366e3fb87ddbd6
class DirectMainTest(cros_test_lib.MockTempDirTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.hooks_mock = self.PatchObject(pre_upload, '_run_project_hooks', return_value=None) <NEW_LINE> <DEDENT> def testNoArgs(self): <NEW_LINE> <INDENT> ret = pre_upload.direct_main([]) <NEW_LINE> self.assertEqual(ret, 0) <NEW_LINE> self.hooks_mock.assert_called_once_with( mock.ANY, proj_dir=os.getcwd(), commit_list=[], presubmit=mock.ANY) <NEW_LINE> <DEDENT> def testExplicitDir(self): <NEW_LINE> <INDENT> ret = pre_upload.direct_main(['--dir', constants.CHROMITE_DIR]) <NEW_LINE> self.assertEqual(ret, 0) <NEW_LINE> self.hooks_mock.assert_called_once_with( mock.ANY, proj_dir=constants.CHROMITE_DIR, commit_list=[], presubmit=mock.ANY) <NEW_LINE> <DEDENT> def testBogusProject(self): <NEW_LINE> <INDENT> ret = pre_upload.direct_main(['--dir', constants.CHROMITE_DIR, '--project', 'foooooooooo']) <NEW_LINE> self.assertEqual(ret, 0) <NEW_LINE> self.hooks_mock.assert_called_once_with( 'foooooooooo', proj_dir=constants.CHROMITE_DIR, commit_list=[], presubmit=mock.ANY) <NEW_LINE> <DEDENT> def testBogustProjectNoDir(self): <NEW_LINE> <INDENT> ret = pre_upload.direct_main(['--project', 'foooooooooo']) <NEW_LINE> self.assertEqual(ret, 0) <NEW_LINE> self.hooks_mock.assert_called_once_with( 'foooooooooo', proj_dir=os.getcwd(), commit_list=[], presubmit=mock.ANY) <NEW_LINE> <DEDENT> def testNoGitDir(self): <NEW_LINE> <INDENT> self.assertRaises(pre_upload.BadInvocation, pre_upload.direct_main, ['--dir', self.tempdir]) <NEW_LINE> <DEDENT> def testNoDir(self): <NEW_LINE> <INDENT> self.assertRaises(pre_upload.BadInvocation, pre_upload.direct_main, ['--dir', os.path.join(self.tempdir, 'foooooooo')]) <NEW_LINE> <DEDENT> def testCommitList(self): <NEW_LINE> <INDENT> commits = ['sha1', 'sha2', 'shaaaaaaaaaaaan'] <NEW_LINE> ret = pre_upload.direct_main(commits) <NEW_LINE> self.assertEqual(ret, 0) <NEW_LINE> self.hooks_mock.assert_called_once_with( mock.ANY, proj_dir=mock.ANY, commit_list=commits, presubmit=mock.ANY)
Tests for direct_main()
6259903b507cdc57c63a5f8b