code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class population: <NEW_LINE> <INDENT> def __init__(self, size): <NEW_LINE> <INDENT> self.pop = [candidate() for i in xrange(size)] <NEW_LINE> self.children = [] | Defines the population object.
:Attributes:
-pop: A list of randomly initialized candidates.
-children: The list of children produced by pop after crossover
and mutation. | 6259905b45492302aabfdaca |
class Data: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass | creates new instance of the data class | 6259905b4e4d5625663739f9 |
class Server: <NEW_LINE> <INDENT> def __init__( self, app, args, work_dir, name="node", port=VAST_PORT, config_file=None, **kwargs, ): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> if config_file: <NEW_LINE> <INDENT> self.config_arg = f"--config={SET_DIR/config_file}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.config_arg = None <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.cwd = work_dir / self.name <NEW_LINE> self.port = port <NEW_LINE> command = [self.app, "--bare-mode"] <NEW_LINE> if self.config_arg: <NEW_LINE> <INDENT> command.append(self.config_arg) <NEW_LINE> <DEDENT> command = command + args <NEW_LINE> LOGGER.debug(f"starting server fixture: {command}") <NEW_LINE> LOGGER.debug(f"waiting for port {self.port} to be available") <NEW_LINE> if not wait.tcp.closed(self.port, timeout=5): <NEW_LINE> <INDENT> raise RuntimeError("Port is blocked by another process.\nAborting tests...") <NEW_LINE> <DEDENT> self.cwd.mkdir(parents=True) <NEW_LINE> out = open(self.cwd / "out", "w") <NEW_LINE> err = open(self.cwd / "err", "w") <NEW_LINE> self.process = spawn( command, cwd=self.cwd, stdout=out, stderr=err, **kwargs, ) <NEW_LINE> LOGGER.debug(f"waiting for server to listen on port {self.port}") <NEW_LINE> if not wait.tcp.open(self.port, timeout=10): <NEW_LINE> <INDENT> raise RuntimeError("Server could not aquire port.\nAborting tests") <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> command = [self.app, "--bare-mode"] <NEW_LINE> if self.config_arg: <NEW_LINE> <INDENT> command.append(self.config_arg) <NEW_LINE> <DEDENT> command = command + ["-e", f":{self.port}", "stop"] <NEW_LINE> LOGGER.debug(f"stopping server fixture: {command}") <NEW_LINE> stop_out = open(self.cwd / "stop.out", "w") <NEW_LINE> stop_err = open(self.cwd / "stop.err", "w") <NEW_LINE> stop = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> stop = spawn( command, cwd=self.cwd, stdout=stop_out, stderr=stop_err, ).wait(STEP_TIMEOUT) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> stop.kill() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.process.wait(STEP_TIMEOUT) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.process.kill() | Server fixture implementation details | 6259905bd99f1b3c44d06c93 |
class ManagedClusterWindowsProfile(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'admin_username': {'required': True}, } <NEW_LINE> _attribute_map = { 'admin_username': {'key': 'adminUsername', 'type': 'str'}, 'admin_password': {'key': 'adminPassword', 'type': 'str'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'enable_csi_proxy': {'key': 'enableCSIProxy', 'type': 'bool'}, } <NEW_LINE> def __init__( self, *, admin_username: str, admin_password: Optional[str] = None, license_type: Optional[Union[str, "LicenseType"]] = None, enable_csi_proxy: Optional[bool] = None, **kwargs ): <NEW_LINE> <INDENT> super(ManagedClusterWindowsProfile, self).__init__(**kwargs) <NEW_LINE> self.admin_username = admin_username <NEW_LINE> self.admin_password = admin_password <NEW_LINE> self.license_type = license_type <NEW_LINE> self.enable_csi_proxy = enable_csi_proxy | Profile for Windows VMs in the managed cluster.
All required parameters must be populated in order to send to Azure.
:ivar admin_username: Required. Specifies the name of the administrator account.
:code:`<br>`:code:`<br>` **Restriction:** Cannot end in "." :code:`<br>`:code:`<br>`
**Disallowed values:** "administrator", "admin", "user", "user1", "test", "user2", "test1",
"user3", "admin1", "1", "123", "a", "actuser", "adm", "admin2", "aspnet", "backup", "console",
"david", "guest", "john", "owner", "root", "server", "sql", "support", "support_388945a0",
"sys", "test2", "test3", "user4", "user5". :code:`<br>`:code:`<br>` **Minimum-length:** 1
character :code:`<br>`:code:`<br>` **Max-length:** 20 characters.
:vartype admin_username: str
:ivar admin_password: Specifies the password of the administrator account.
:code:`<br>`:code:`<br>` **Minimum-length:** 8 characters :code:`<br>`:code:`<br>`
**Max-length:** 123 characters :code:`<br>`:code:`<br>` **Complexity requirements:** 3 out of 4
conditions below need to be fulfilled :code:`<br>` Has lower characters :code:`<br>`Has upper
characters :code:`<br>` Has a digit :code:`<br>` Has a special character (Regex match [\W_])
:code:`<br>`:code:`<br>` **Disallowed values:** "abc@123", "P@$$w0rd", "P@ssw0rd",
"P@ssword123", "Pa$$word", "pass@word1", "Password!", "Password1", "Password22", "iloveyou!".
:vartype admin_password: str
:ivar license_type: The license type to use for Windows VMs. See `Azure Hybrid User Benefits
<https://azure.microsoft.com/pricing/hybrid-benefit/faq/>`_ for more details. Possible values
include: "None", "Windows_Server".
:vartype license_type: str or ~azure.mgmt.containerservice.v2021_07_01.models.LicenseType
:ivar enable_csi_proxy: For more details on CSI proxy, see the `CSI proxy GitHub repo
<https://github.com/kubernetes-csi/csi-proxy>`_.
:vartype enable_csi_proxy: bool | 6259905b91af0d3eaad3b41b |
class CPF_service( cpppo.dfa ): <NEW_LINE> <INDENT> def __init__( self, name=None, **kwds ): <NEW_LINE> <INDENT> name = name or kwds.setdefault( 'context', self.__class__.__name__ ) <NEW_LINE> svcs = CPF( terminal=True ) <NEW_LINE> super( CPF_service, self ).__init__( name=name, initial=svcs, **kwds ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def produce( data ): <NEW_LINE> <INDENT> result = b'' <NEW_LINE> if data and 'CPF' in data: <NEW_LINE> <INDENT> result += CPF.produce( data.CPF ) <NEW_LINE> <DEDENT> return result | Handle Service request/reply that are encoded as a CPF list. We must deduce whether we are
parsing a request or a reply. The request will have a 0 length; the reply (which must contain a
CPF with at least an item count) will have a non-zero length.
Even if the request is empty, we want to produce 'CIP.<service_name>.CPF'. | 6259905b0fa83653e46f64d9 |
class JointLiabilityGuaranteeCalcWeight(CalcWeight): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def calc_weight(cls, guarantee_sum: int, duration: int, state: loan_state): <NEW_LINE> <INDENT> return guarantee_sum | 连带责任保证 | 6259905b8a43f66fc4bf3780 |
class AboutMeHdlr(MetriqueHdlr): <NEW_LINE> <INDENT> @authenticated <NEW_LINE> def get(self, owner): <NEW_LINE> <INDENT> result = self.aboutme(owner=owner) <NEW_LINE> self.write(result) <NEW_LINE> <DEDENT> def aboutme(self, owner): <NEW_LINE> <INDENT> self.user_exists(owner) <NEW_LINE> mask = ['passhash'] <NEW_LINE> if self.is_self(owner): <NEW_LINE> <INDENT> return self.get_user_profile(owner, mask=mask) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask += [] | RequestHandler for seeing your user profile
action can be addToSet, pull
role can be read, write, admin | 6259905b56b00c62f0fb3ebe |
class NumericQueryField(NumericQueryMixin, QueryField): <NEW_LINE> <INDENT> def _expr(self, prefix): <NEW_LINE> <INDENT> return NumericQueryExpression(prefix + [self._field.name]) | Class for expression-based numeric fields | 6259905b097d151d1a2c2660 |
class CendariAuthPlugin(plugins.SingletonPlugin): <NEW_LINE> <INDENT> plugins.implements(plugins.IConfigurer) <NEW_LINE> plugins.implements(plugins.IAuthenticator) <NEW_LINE> def update_config(self, config): <NEW_LINE> <INDENT> toolkit.add_template_directory(config, 'templates') <NEW_LINE> <DEDENT> def get_auth_functions(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> userdict = get_shib_data(self) <NEW_LINE> if userdict: <NEW_LINE> <INDENT> apiurl = 'http://localhost:42042/v1/session' <NEW_LINE> apijson = {'eppn': userdict['eppn'], 'mail': userdict['mail'], 'cn': userdict['cn']} <NEW_LINE> headers = {'content-type': 'application/json'} <NEW_LINE> try: <NEW_LINE> <INDENT> apiresponse = requests.post(apiurl, data=json.dumps(apijson), headers=headers, timeout=1) <NEW_LINE> log.info('Data API response status_code: '+ str(apiresponse.status_code)) <NEW_LINE> if apiresponse.status_code == 200: <NEW_LINE> <INDENT> response_json = json.loads(apiresponse.content) <NEW_LINE> api_username = str(response_json['username']) <NEW_LINE> log.info('API returned username: ' + api_username + '.') <NEW_LINE> verify_sysadmin_status(self,api_username) <NEW_LINE> log.info('Logging in ' + api_username + '.') <NEW_LINE> pylons.session['cendari-auth-user'] = api_username <NEW_LINE> pylons.session.save() <NEW_LINE> toolkit.redirect_to(controller='user', action='dashboard') <NEW_LINE> <DEDENT> <DEDENT> except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): <NEW_LINE> <INDENT> log.warning('Data API did not respond!') <NEW_LINE> user = ckan.model.User.by_email(userdict['mail']) <NEW_LINE> if user: <NEW_LINE> <INDENT> ckan_user_dict = toolkit.get_action('user_show')(data_dict={'id': user.id}) <NEW_LINE> log.info('logging in existing user ' + ckan_user_dict['name'] + ' based on mail: '+ userdict['mail']) <NEW_LINE> pylons.session['cendari-auth-user'] = ckan_user_dict['name'] <NEW_LINE> pylons.session.save() <NEW_LINE> toolkit.redirect_to(controller='user', action='dashboard') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def identify(self): <NEW_LINE> <INDENT> pylons_user_name = pylons.session.get('cendari-auth-user') <NEW_LINE> if pylons_user_name: <NEW_LINE> <INDENT> toolkit.c.user = pylons_user_name <NEW_LINE> <DEDENT> <DEDENT> def logout(self): <NEW_LINE> <INDENT> if 'cendari-auth-user' in pylons.session: <NEW_LINE> <INDENT> del pylons.session['cendari-auth-user'] <NEW_LINE> pylons.session.save() <NEW_LINE> <DEDENT> toolkit.redirect_to(controller='util',action='redirect',url='/Shibboleth.sso/Logout') <NEW_LINE> <DEDENT> def abort(self, status_code, detail, headers, comment): <NEW_LINE> <INDENT> return status_code, detail, headers, comment | Main plugin class implemeting ``IConfigurer`` and ``IAuthenticator``. | 6259905b2c8b7c6e89bd4de1 |
class Solution: <NEW_LINE> <INDENT> @timeit <NEW_LINE> def isBoomerang(self, points: List[List[int]]) -> bool: <NEW_LINE> <INDENT> a, b, c = points <NEW_LINE> if a == b or a == c or b == c: return False <NEW_LINE> x1, x2 = a[0] - b[0], a[0] - c[0] <NEW_LINE> if x1 == 0 or x2 == 0: return x1 == x2 <NEW_LINE> y1, y2 = a[1] - b[1], a[1] - c[1] <NEW_LINE> k1, k2 = y1 / x1, y2 / x2 <NEW_LINE> return k1 != k2 | [1037. 有效的回旋镖](https://leetcode-cn.com/problems/valid-boomerang/) | 6259905b1b99ca4002290031 |
class GroupLeftCallback(NotificationCallback): <NEW_LINE> <INDENT> __slots__ = NotificationCallback.__slots__ <NEW_LINE> def test(self, node): <NEW_LINE> <INDENT> if not node.has_child("remove"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return Super(GroupChangedCallback, self).test(node) | Callback for group left notifications. | 6259905b0a50d4780f7068b8 |
class SRWLOptMirPl(SRWLOptMir): <NEW_LINE> <INDENT> def __init__(self, _size_tang=1, _size_sag=1, _ap_shape='r', _sim_meth=2, _treat_in_out=1, _ext_in=0, _ext_out=0, _nvx=0, _nvy=0, _nvz=-1, _tvx=1, _tvy=0, _x=0, _y=0, _refl=1, _n_ph_en=1, _n_ang=1, _n_comp=1, _ph_en_start=1000., _ph_en_fin=1000., _ph_en_scale_type='lin', _ang_start=0, _ang_fin=0, _ang_scale_type='lin'): <NEW_LINE> <INDENT> self.set_dim_sim_meth(_size_tang, _size_sag, _ap_shape, _sim_meth, 100, 100, _treat_in_out, _ext_in, _ext_out) <NEW_LINE> self.set_orient(_nvx, _nvy, _nvz, _tvx, _tvy, _x, _y) <NEW_LINE> self.set_reflect(_refl, _n_ph_en, _n_ang, _n_comp, _ph_en_start, _ph_en_fin, _ph_en_scale_type, _ang_start, _ang_fin, _ang_scale_type) | Optical Element: Mirror: Plane | 6259905b2ae34c7f260ac6db |
class ActiveMeasurement(base_model.BaseModel): <NEW_LINE> <INDENT> __tablename__ = "active_measurements" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> network_interface_id = db.Column(db.Integer, db.ForeignKey("network_interfaces.id")) <NEW_LINE> date = db.Column(db.DateTime) <NEW_LINE> dispatched = db.Column(db.Boolean) <NEW_LINE> sim_serial_number = db.Column(db.String(50), db.ForeignKey("sims.serial_number")) <NEW_LINE> device_id = db.Column(db.String(50), db.ForeignKey("devices.device_id")) <NEW_LINE> app_version_code = db.Column(db.String(10)) <NEW_LINE> type = db.Column(db.String(50)) <NEW_LINE> __mapper_args__ = {'polymorphic_on': type} | Active measurement model class | 6259905b29b78933be26abbe |
class ModelViewTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> <DEDENT> def test_list_model(self): <NEW_LINE> <INDENT> url = reverse('catalog_model_list') <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_create_model(self): <NEW_LINE> <INDENT> url = reverse('catalog_model_create') <NEW_LINE> data = { "name": uuid.uuid4(), } <NEW_LINE> response = self.client.post(url, data=data) <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> <DEDENT> def test_detail_model(self): <NEW_LINE> <INDENT> model = create_model() <NEW_LINE> url = reverse('catalog_model_detail', args=[model.slug, ]) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_update_model(self): <NEW_LINE> <INDENT> model = create_model() <NEW_LINE> data = { "name": uuid.uuid4(), } <NEW_LINE> url = reverse('catalog_model_update', args=[model.slug, ]) <NEW_LINE> response = self.client.post(url, data) <NEW_LINE> self.assertEqual(response.status_code, 302) | Tests for Model | 6259905b99cbb53fe68324d4 |
class TestRadioVisServiceList(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self._test_radiovis_services = [] <NEW_LINE> tree = xml.etree.ElementTree.parse(filename) <NEW_LINE> for test_radiovis_service in tree.findall("test_radiovis_service"): <NEW_LINE> <INDENT> name = test_radiovis_service.findtext("name") <NEW_LINE> hostname = test_radiovis_service.findtext("hostname") <NEW_LINE> port = test_radiovis_service.findtext("port") <NEW_LINE> text_topic = test_radiovis_service.findtext("text_topic") <NEW_LINE> image_topic = test_radiovis_service.findtext("image_topic") <NEW_LINE> if text_topic is not None and len(text_topic) == 0: <NEW_LINE> <INDENT> text_topic = None <NEW_LINE> <DEDENT> if image_topic is not None and len(image_topic) == 0: <NEW_LINE> <INDENT> image_topic = None <NEW_LINE> <DEDENT> service = TestRadioVisService(name, hostname, port, text_topic, image_topic) <NEW_LINE> self._test_radiovis_services.append(service) <NEW_LINE> <DEDENT> self._index = 0 <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self._index >= len(self._test_radiovis_services): <NEW_LINE> <INDENT> self._index = 0 <NEW_LINE> raise StopIteration <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> index = self._index <NEW_LINE> self._index = index + 1 <NEW_LINE> return self._test_radiovis_services[index] <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self._test_radiovis_services[index] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._test_radiovis_services) | A list of test RadioVIS services (L{TestRadioVisService} objects). | 6259905bd53ae8145f919a56 |
class PMPackageSet(ABCObject, BoolCompat): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def __iter__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def filter(self, *args, **kwargs): <NEW_LINE> <INDENT> return PMFilteredPackageSet(self, args, kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def best(self): <NEW_LINE> <INDENT> best = None <NEW_LINE> for p in self.sorted: <NEW_LINE> <INDENT> if best is not None and p.key != best.key: <NEW_LINE> <INDENT> raise AmbiguousPackageSetError( ".best called on a set of differently-named packages" ) <NEW_LINE> <DEDENT> best = p <NEW_LINE> <DEDENT> if best is None: <NEW_LINE> <INDENT> raise EmptyPackageSetError(".best called on an empty set") <NEW_LINE> <DEDENT> return best <NEW_LINE> <DEDENT> def select(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.filter(*args, **kwargs).best <NEW_LINE> <DEDENT> except EmptyPackageSetError: <NEW_LINE> <INDENT> raise EmptyPackageSetError("No packages match the filters.") <NEW_LINE> <DEDENT> except AmbiguousPackageSetError: <NEW_LINE> <INDENT> raise AmbiguousPackageSetError( "Ambiguous filter (matches more than a single package name)." ) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def sorted(self): <NEW_LINE> <INDENT> return PMSortedPackageSet(self) <NEW_LINE> <DEDENT> def group_by(self, *criteria): <NEW_LINE> <INDENT> return PMPackageGroupDict(self, criteria) <NEW_LINE> <DEDENT> def __getitem__(self, filt): <NEW_LINE> <INDENT> it = iter(self.filter(filt)) <NEW_LINE> try: <NEW_LINE> <INDENT> ret = next(it) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise EmptyPackageSetError("No packages match the filter.") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> next(it) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AmbiguousPackageSetError("Filter matches more than one package.") <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def __contains__(self, arg): <NEW_LINE> <INDENT> i = iter(self.filter(arg)) <NEW_LINE> try: <NEW_LINE> <INDENT> next(i) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> next(iter(self)) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | A set of packages. | 6259905b91f36d47f2231989 |
class RemoveTest(test_case.TestCase): <NEW_LINE> <INDENT> def test_not_found(self): <NEW_LINE> <INDENT> def delete_machine(*args, **kwargs): <NEW_LINE> <INDENT> self.fail('delete_machine called') <NEW_LINE> <DEDENT> self.mock(catalog.machine_provider, 'delete_machine', delete_machine) <NEW_LINE> catalog.remove(ndb.Key(models.Instance, 'fake-instance')) <NEW_LINE> self.failIf(models.Instance.query().get()) <NEW_LINE> <DEDENT> def test_not_cataloged(self): <NEW_LINE> <INDENT> def delete_machine(*args, **kwargs): <NEW_LINE> <INDENT> return {'error': 'ENTRY_NOT_FOUND'} <NEW_LINE> <DEDENT> def send_machine_event(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.mock(catalog.machine_provider, 'delete_machine', delete_machine) <NEW_LINE> self.mock(catalog.metrics, 'send_machine_event', send_machine_event) <NEW_LINE> key = instances.get_instance_key( 'base-name', 'revision', 'zone', 'instance-name', ) <NEW_LINE> key = models.Instance( key=key, cataloged=False, instance_group_manager=instances.get_instance_group_manager_key(key), ).put() <NEW_LINE> catalog.remove(key) <NEW_LINE> self.failIf(key.get().cataloged) <NEW_LINE> self.failUnless(key.get().pending_deletion) <NEW_LINE> <DEDENT> def test_removed(self): <NEW_LINE> <INDENT> def delete_machine(*args, **kwargs): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def send_machine_event(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.mock(catalog.machine_provider, 'delete_machine', delete_machine) <NEW_LINE> self.mock(catalog.metrics, 'send_machine_event', send_machine_event) <NEW_LINE> key = instances.get_instance_key( 'base-name', 'revision', 'zone', 'instance-name', ) <NEW_LINE> key = models.Instance( key=key, cataloged=True, instance_group_manager=instances.get_instance_group_manager_key(key), ).put() <NEW_LINE> catalog.remove(key) <NEW_LINE> self.failUnless(key.get().cataloged) <NEW_LINE> self.failUnless(key.get().pending_deletion) <NEW_LINE> <DEDENT> def test_removal_error(self): <NEW_LINE> <INDENT> def delete_machine(*args, **kwargs): <NEW_LINE> <INDENT> return {'error': 'error'} <NEW_LINE> <DEDENT> self.mock(catalog.machine_provider, 'delete_machine', delete_machine) <NEW_LINE> key = instances.get_instance_key( 'base-name', 'revision', 'zone', 'instance-name', ) <NEW_LINE> key = models.Instance( key=key, cataloged=True, instance_group_manager=instances.get_instance_group_manager_key(key), ).put() <NEW_LINE> catalog.remove(key) <NEW_LINE> self.failUnless(key.get().cataloged) <NEW_LINE> self.failIf(key.get().pending_deletion) | Tests for catalog.remove. | 6259905b32920d7e50bc763a |
class Checker(): <NEW_LINE> <INDENT> def __init__(self, URL = URL_APPROVED, HEADER = HEADER): <NEW_LINE> <INDENT> self.URL = URL <NEW_LINE> self.HTML = '' <NEW_LINE> self.HEADER = HEADER <NEW_LINE> <DEDENT> def get_html(self): <NEW_LINE> <INDENT> self.HTML = requests.get(self.URL, headers = self.HEADER).text <NEW_LINE> <DEDENT> def soupify(self): <NEW_LINE> <INDENT> return BeautifulSoup(self.HTML, 'lxml') <NEW_LINE> <DEDENT> def get_tds(self): <NEW_LINE> <INDENT> return self.soupify().findAll('td') <NEW_LINE> <DEDENT> def get_fresh_tds(self): <NEW_LINE> <INDENT> self.get_html() <NEW_LINE> return self.get_tds() <NEW_LINE> <DEDENT> def get_groups(self): <NEW_LINE> <INDENT> return list(zip(*(iter(self.get_fresh_tds()),)*5)) <NEW_LINE> <DEDENT> def cleanify_one(self, rawData): <NEW_LINE> <INDENT> No = int(rawData[0].text.replace('.','')) <NEW_LINE> DrugName = rawData[1].text <NEW_LINE> ActiveIngredient = rawData[2].text <NEW_LINE> Date = rawData[3].text <NEW_LINE> Use = str(rawData[4]).split('<br')[0].replace('<td>', '') <NEW_LINE> a_list = rawData[4].findAll('a') <NEW_LINE> Links = {item.text: BASE_URL + item.get('href') for item in a_list} <NEW_LINE> return {'No':No, 'DrugName': DrugName, 'ActiveIngredient': ActiveIngredient, 'Date': Date, 'Usage': Use, 'URLs': Links} <NEW_LINE> <DEDENT> def cleanify_all(self): <NEW_LINE> <INDENT> return [self.cleanify_one(item) for item in self.get_groups()] <NEW_LINE> <DEDENT> def insert_into_database(self): <NEW_LINE> <INDENT> data = self.cleanify_all() <NEW_LINE> for item in data: <NEW_LINE> <INDENT> if not list(approvals.find({'No':item.get('No')})): <NEW_LINE> <INDENT> approvals.insert(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_company_from_trial_snapshot(self,url): <NEW_LINE> <INDENT> r = requests.get(url, headers = self.HEADER) <NEW_LINE> company = str([item for item in BeautifulSoup(r.text, 'lxml').findAll('p') if '2018' in item.text][0]).split('<br/>')[-2].replace('\r','').replace('\n','') <NEW_LINE> return company <NEW_LINE> <DEDENT> def update_info(self, collection = 'approvals'): <NEW_LINE> <INDENT> collection = eval(collection) <NEW_LINE> data = collection.find({'URLs.Drug Trials Snapshot': {'$exists': 1}}, {'URLs.Drug Trials Snapshot':1}) <NEW_LINE> for item in data: <NEW_LINE> <INDENT> company = self.get_company_from_trial_snapshot(item['URLs']['Drug Trials Snapshot']) <NEW_LINE> collection.update({'_id':item['_id']},{'$set':{'Company':company}}) <NEW_LINE> <DEDENT> <DEDENT> def insert_historical(self): <NEW_LINE> <INDENT> for item in HISTORICAL_URLS: <NEW_LINE> <INDENT> data = self.cleanify_all() <NEW_LINE> for item in data: <NEW_LINE> <INDENT> if not list(approvals.find({'No':item.get('No')})): <NEW_LINE> <INDENT> HistAppro.insert(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.update_info('HistAppro') | Class for checking storing and retrieving data from FDA susing URL_APPROVED
but for that url data are updated in few days
but this data can be used for analysis because it is much easier to extract name of Company and drug name from URL_APPROVED | 6259905b7cff6e4e811b7038 |
class DistributionSysTypeSetIterator(APIObject,IDisposable,IEnumerator): <NEW_LINE> <INDENT> def Dispose(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def MoveNext(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def next(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseManagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseUnmanagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __iter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Current=property(lambda self: object(),lambda self,v: None,lambda self: None) | An iterator to a DistributionSys type set.
DistributionSysTypeSetIterator() | 6259905b627d3e7fe0e08480 |
class Binomial(Distribution): <NEW_LINE> <INDENT> def __init__(self, prob=.5, size=20): <NEW_LINE> <INDENT> self.p = prob <NEW_LINE> self.n= size <NEW_LINE> mu = self.calculate_mean() <NEW_LINE> sigma = self.calculate_stdev() <NEW_LINE> Distribution.__init__(self, mu, sigma) <NEW_LINE> <DEDENT> def calculate_mean(self): <NEW_LINE> <INDENT> self.mean = self.p * self.n <NEW_LINE> return self.mean <NEW_LINE> <DEDENT> def calculate_stdev(self): <NEW_LINE> <INDENT> sigma = math.sqrt(self.n * self.p * (1 - self.p)) <NEW_LINE> self.stdev = sigma <NEW_LINE> return self.stdev <NEW_LINE> <DEDENT> def replace_stats_with_data(self): <NEW_LINE> <INDENT> self.read_data_file("numbers_binomial.txt") <NEW_LINE> self.n = len(self.data) <NEW_LINE> self.p = sum(self.data)/self.n <NEW_LINE> self.calculate_mean() <NEW_LINE> self.calculate_stdev() <NEW_LINE> return self.p, self.n <NEW_LINE> <DEDENT> def plot_bar(self): <NEW_LINE> <INDENT> plt.xlabel("Faces") <NEW_LINE> plt.ylabel("Count") <NEW_LINE> plt.title("Binomial Distribution plot") <NEW_LINE> plt.hist(self.data) <NEW_LINE> <DEDENT> def pdf(self, k): <NEW_LINE> <INDENT> density = math.factorial(self.n)/(math.factorial(k)* math.factorial(self.n - k)) * self.p**k * (1 - self.p)**(self.n - k) <NEW_LINE> return density <NEW_LINE> <DEDENT> def plot_bar_pdf(self): <NEW_LINE> <INDENT> x, y = [], [] <NEW_LINE> for k in range(0, n + 1): <NEW_LINE> <INDENT> x.append(k) <NEW_LINE> density = self.pdf(k) <NEW_LINE> y.append(density) <NEW_LINE> <DEDENT> plt.plot(x, y) <NEW_LINE> return x, y <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert self.p == other.p, 'p values are not equal' <NEW_LINE> <DEDENT> except AssertionError as error: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> binomial = Binomial(self.p, self.n + other.n) <NEW_LINE> return binomial <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "mean {}, standard deviation {}, p {}, n {}".format(self.mean, self.stdev, self.p, self.n) | Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) the total number of trials
TODO: Fill out all TODOs in the functions below
| 6259905b009cb60464d02b2a |
class TONE(CheckerMixin): <NEW_LINE> <INDENT> MIXED = 'mixed' <NEW_LINE> NEGATIVE = 'negative' <NEW_LINE> NEUTRAL = 'neutral' <NEW_LINE> POSITIVE = 'positive' | Tone of a publication given by our algorithms | 6259905bbe8e80087fbc0678 |
class Transaction(models.Model): <NEW_LINE> <INDENT> type = models.IntegerField(null=True, blank=True) <NEW_LINE> reward = models.IntegerField(null=True, blank=True) <NEW_LINE> user = models.ForeignKey(SiteUser, related_name='trans_user' ,null=True, blank=True) <NEW_LINE> current_balance = models.IntegerField(null=True, blank=True) <NEW_LINE> involved_user = models.ForeignKey(SiteUser, related_name='trans_involved', null=True, blank=True) <NEW_LINE> involved_topic = models.ForeignKey(Topic, related_name='trans_topic', null=True, blank=True) <NEW_LINE> involved_reply = models.ForeignKey(Reply, related_name='trans_reply', null=True, blank=True) <NEW_LINE> occurrence_time = models.DateTimeField(null=True, blank=True) | 交易 | 6259905b8e7ae83300eea682 |
class DictionaryManager(): <NEW_LINE> <INDENT> def __init__(self, dictionarySource): <NEW_LINE> <INDENT> from ntpath import basename, splitext <NEW_LINE> self.source = dictionarySource <NEW_LINE> self.dataStore = '{}.data'.format(splitext(basename(dictionarySource))[0]) <NEW_LINE> <DEDENT> def reprocess(self): <NEW_LINE> <INDENT> self._saveDictionary(self._substringProcess(self._wordList())) <NEW_LINE> <DEDENT> def _wordList(self): <NEW_LINE> <INDENT> with open(self.source, 'r') as f: <NEW_LINE> <INDENT> content = {} <NEW_LINE> for line in f: <NEW_LINE> <INDENT> word = line.strip('\n') <NEW_LINE> if word and len(word) in range(2, 16): <NEW_LINE> <INDENT> content[word] = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return content <NEW_LINE> <DEDENT> def _substringProcess(self, dict): <NEW_LINE> <INDENT> subs = {} <NEW_LINE> for word in dict: <NEW_LINE> <INDENT> n = len(word) <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> for j in range(i + 1, n + 1): <NEW_LINE> <INDENT> if not word[i:j] in subs: <NEW_LINE> <INDENT> if word[i:j] in dict: <NEW_LINE> <INDENT> subs[word[i:j]] = (True, [], []) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subs[word[i:j]] = (False, [], []) <NEW_LINE> <DEDENT> <DEDENT> ref = subs[word[i:j]] <NEW_LINE> if 0 < i and not word[i - 1] in ref[1]: <NEW_LINE> <INDENT> ref[1].append( word[i - 1] ) <NEW_LINE> <DEDENT> if j < n and not word[j] in ref[2]: <NEW_LINE> <INDENT> ref[2].append( word[j] ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return subs <NEW_LINE> <DEDENT> def loadDictionary(self): <NEW_LINE> <INDENT> from pickle import load <NEW_LINE> from os import path <NEW_LINE> if not path.isfile(self.dataStore): <NEW_LINE> <INDENT> self.reprocess() <NEW_LINE> <DEDENT> with open(self.dataStore, 'rb') as data: <NEW_LINE> <INDENT> subs = load(data) <NEW_LINE> <DEDENT> return subs <NEW_LINE> <DEDENT> def _saveDictionary(self, subDict): <NEW_LINE> <INDENT> from pickle import dump <NEW_LINE> with open(self.dataStore, 'wb') as data: <NEW_LINE> <INDENT> dump(subDict, data) | used to retrieve and write pre-processed dictionaries to disk | 6259905b097d151d1a2c2662 |
class Compile: <NEW_LINE> <INDENT> def __init__(self, op_list): <NEW_LINE> <INDENT> self.op_list = op_list <NEW_LINE> <DEDENT> def _iterops(self): <NEW_LINE> <INDENT> for ops in self.op_list: <NEW_LINE> <INDENT> if type(ops) == list: <NEW_LINE> <INDENT> for op in ops: <NEW_LINE> <INDENT> yield op <NEW_LINE> <DEDENT> <DEDENT> elif hasattr(ops, '__iter__'): <NEW_LINE> <INDENT> raise ValueError("Iterables of operators in Compile must be list type.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield ops <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def apply(self, root, cids, cid_attrib='word_idx', dict_sub={}, stopwords=None): <NEW_LINE> <INDENT> if type(root) == str: <NEW_LINE> <INDENT> root = et.fromstring(root) <NEW_LINE> <DEDENT> for op in self._iterops(): <NEW_LINE> <INDENT> for f in op.apply(root, cids, cid_attrib, dict_sub=dict_sub, stopwords=stopwords): <NEW_LINE> <INDENT> yield f <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def result_set(self, root, cids, cid_attrib='word_idx', dict_sub={}, stopwords=None): <NEW_LINE> <INDENT> if type(root) == str: <NEW_LINE> <INDENT> root = et.fromstring(root) <NEW_LINE> <DEDENT> res = set() <NEW_LINE> for op in self._iterops(): <NEW_LINE> <INDENT> res.update(op.result_set(root, cids, cid_attrib, dict_sub=dict_sub, stopwords=stopwords)) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def apply_mention(self, root, mention_idxs, dict_sub={}, stopwords=None): <NEW_LINE> <INDENT> return self.apply(root, [mention_idxs], dict_sub=dict_sub, stopwords=stopwords) <NEW_LINE> <DEDENT> def apply_relation(self, root, mention1_idxs, mention2_idxs, dict_sub={}, stopwords=None): <NEW_LINE> <INDENT> return self.apply(root, [mention1_idxs, mention2_idxs], dict_sub=dict_sub, stopwords=stopwords) <NEW_LINE> <DEDENT> def apply_multary_relation(self, root, mentions, dict_sub={}, stopwords=None): <NEW_LINE> <INDENT> return self.apply(root, mentions, dict_sub=dict_sub, stopwords=stopwords) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '\n'.join(str(op) for op in self._iterops()) | Compiles a set of functions f_i : 2^T -> {0,1}^F_i to a single function 2^T -> {0,1}^F
where F <= \sum_i F_i
i.e. we can do filtering and/or merging at this point (?) | 6259905badb09d7d5dc0bb5f |
class ProtocolFile(BaseModel): <NEW_LINE> <INDENT> name: str = Field(..., description="The file's basename, including extension") <NEW_LINE> role: ProtocolFileRole = Field(..., description="The file's role in the protocol.") | A file in a protocol. | 6259905b1b99ca4002290032 |
class TestSyncServiceTargetApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = isi_sdk_9_0_0.api.sync_service_target_api.SyncServiceTargetApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_create_policies_policy_cancel_item(self): <NEW_LINE> <INDENT> pass | SyncServiceTargetApi unit test stubs | 6259905b379a373c97d9a61a |
class SBTypeSummaryOptions(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, SBTypeSummaryOptions, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, SBTypeSummaryOptions, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _lldb.new_SBTypeSummaryOptions(*args) <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except __builtin__.Exception: <NEW_LINE> <INDENT> self.this = this <NEW_LINE> <DEDENT> <DEDENT> __swig_destroy__ = _lldb.delete_SBTypeSummaryOptions <NEW_LINE> __del__ = lambda self: None <NEW_LINE> def __nonzero__(self): return self.IsValid() <NEW_LINE> def IsValid(self): <NEW_LINE> <INDENT> return _lldb.SBTypeSummaryOptions_IsValid(self) <NEW_LINE> <DEDENT> def GetLanguage(self): <NEW_LINE> <INDENT> return _lldb.SBTypeSummaryOptions_GetLanguage(self) <NEW_LINE> <DEDENT> def GetCapping(self): <NEW_LINE> <INDENT> return _lldb.SBTypeSummaryOptions_GetCapping(self) <NEW_LINE> <DEDENT> def SetLanguage(self, arg2): <NEW_LINE> <INDENT> return _lldb.SBTypeSummaryOptions_SetLanguage(self, arg2) <NEW_LINE> <DEDENT> def SetCapping(self, arg2): <NEW_LINE> <INDENT> return _lldb.SBTypeSummaryOptions_SetCapping(self, arg2) | Proxy of C++ lldb::SBTypeSummaryOptions class. | 6259905ba8370b77170f19c4 |
class ContextMixin(object): <NEW_LINE> <INDENT> def get_rendering_context(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses must override this method.") | Defines a ``GET`` method that invokes :meth:`get_rendering_context()`
and returns its result to the client. | 6259905b99cbb53fe68324d6 |
class Resource(object): <NEW_LINE> <INDENT> _URL = 'https://graphql.anilist.co' <NEW_LINE> _METHOD = 'POST' <NEW_LINE> _ENDPOINT = '/' <NEW_LINE> _HEADERS = { 'Content-Type': 'application/json', 'Accept': 'application/json', } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._pool = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()).connection_from_url(Resource._URL) <NEW_LINE> <DEDENT> def __new__(type): <NEW_LINE> <INDENT> if not '_instance' in type.__dict__: <NEW_LINE> <INDENT> type._instance = object.__new__(type) <NEW_LINE> <DEDENT> return type._instance <NEW_LINE> <DEDENT> @timed <NEW_LINE> def execute(self, query, variables): <NEW_LINE> <INDENT> headers = Resource._HEADERS <NEW_LINE> endpoint = Resource._ENDPOINT <NEW_LINE> method = Resource._METHOD <NEW_LINE> data = dic_to_json({'query': query, 'variables': variables}) <NEW_LINE> logger.debug('Resource request: %s %s' % (method, endpoint)) <NEW_LINE> logger.debug('Resource request body: %s' % str(data)) <NEW_LINE> logger.debug('Resource request headers: %s' % headers) <NEW_LINE> response = self._pool.request( method, endpoint, body=data, headers=headers) <NEW_LINE> response = response_to_dic(response) <NEW_LINE> logger.debug('Resource response: \n' + pprint.pformat(response)) <NEW_LINE> return response | Abstract resource class.
Works as a base class for all other resources, keeping the generic and re-usable functionality.
Provides to the classes that inherit it with a connection pool (:any:`urllib3.connectionpool.HTTPSConnectionPool`)
and methods to make requests to the anilist api through it.
All resources **must** be singletons.
The only request this class doesn't handle are the authentication ones, managed by :any:`AuthenticationProvider` | 6259905b91f36d47f223198a |
class ViewLiveButtonHelper(PageButtonHelper): <NEW_LINE> <INDENT> def get_buttons_for_obj(self, obj, exclude=None, classnames_add=None, classnames_exclude=None): <NEW_LINE> <INDENT> btns = super().get_buttons_for_obj( obj, exclude, classnames_add, classnames_exclude) <NEW_LINE> extra_btns = [ { 'url': obj.get_url(), 'label': 'View Live', 'classname': 'button button-small button-secondary', 'title': 'View Live' }, ] <NEW_LINE> return extra_btns + btns | Override to add 'View Live' button | 6259905bbaa26c4b54d5089b |
@register_event('templatesendjobfinish') <NEW_LINE> class TemplateSendJobFinishEvent(BaseEvent): <NEW_LINE> <INDENT> event = 'templatesendjobfinish' <NEW_LINE> status = StringField('Status') | 模板消息任务完成事件
详情请参阅
http://mp.weixin.qq.com/wiki/17/304c1885ea66dbedf7dc170d84999a9d.html | 6259905b30dc7b76659a0d7b |
class TestStream(FnStream): <NEW_LINE> <INDENT> def __init__(self, inp, domain, test, graph, **kwargs): <NEW_LINE> <INDENT> super(TestStream, self).__init__(inp, domain, lambda *args: tuple() if test(*args) else None, [], graph, **kwargs) | Function | 6259905b23849d37ff8526bc |
class UnexpectedResponseError(InteroperabilityError): <NEW_LINE> <INDENT> pass | Exception raised when the received message was not expected in the current step of the executed test. | 6259905b004d5f362081fae9 |
class WarehouseGraph(Graph): <NEW_LINE> <INDENT> def __init__(self, storage, products): <NEW_LINE> <INDENT> super(WarehouseGraph, self).__init__() <NEW_LINE> self.order = products <NEW_LINE> self.gathered_products = {key: 0 for key in products.iterkeys()} <NEW_LINE> self.storage = storage <NEW_LINE> self.insufficient = False <NEW_LINE> <DEDENT> def order_fulfilled(self): <NEW_LINE> <INDENT> for key, value in self.gathered_products.items(): <NEW_LINE> <INDENT> if self.order[key] > value: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def update_gathered_products(self, warehouse): <NEW_LINE> <INDENT> for product, value in self.order.items(): <NEW_LINE> <INDENT> self.gathered_products[product] = self.gathered_products[product] + min(self.order[product] - self.gathered_products[product], self.storage.get_product_quantity(warehouse, product)) <NEW_LINE> <DEDENT> <DEDENT> def return_data(self): <NEW_LINE> <INDENT> if self.insufficient: <NEW_LINE> <INDENT> return self.error('Delivery impossible for the current warehouse stocks.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {'delivery_time': max(self.visited_vertices.values())} <NEW_LINE> <DEDENT> <DEDENT> def process_vertex(self, vertex, distance=0): <NEW_LINE> <INDENT> if not self.order_fulfilled(): <NEW_LINE> <INDENT> self.update_gathered_products(vertex) <NEW_LINE> still_searching = super(WarehouseGraph, self).process_vertex(vertex, distance) <NEW_LINE> if still_searching == self.LIMIT_ACHIEVED: <NEW_LINE> <INDENT> self.insufficient = True <NEW_LINE> return self.return_data() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.return_data() | Class implementing the connections and stocks of warehouses directly | 6259905b01c39578d7f14232 |
class MockPostgresCursor(mock.Mock): <NEW_LINE> <INDENT> def __init__(self, existing_update_ids): <NEW_LINE> <INDENT> super(MockPostgresCursor, self).__init__() <NEW_LINE> self.existing = existing_update_ids <NEW_LINE> <DEDENT> def execute(self, query, params): <NEW_LINE> <INDENT> if query.startswith('SELECT 1 FROM table_updates'): <NEW_LINE> <INDENT> self.fetchone_result = (1, ) if params[0] in self.existing else None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fetchone_result = None <NEW_LINE> <DEDENT> <DEDENT> def fetchone(self): <NEW_LINE> <INDENT> return self.fetchone_result | Keeps state to simulate executing SELECT queries and fetching results. | 6259905b32920d7e50bc763d |
class YAKAPISettings(APISettings): <NEW_LINE> <INDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if attr not in self.defaults.keys(): <NEW_LINE> <INDENT> raise AttributeError("Invalid API setting: '%s'" % attr) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> val = self.user_settings[attr] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> val = self.defaults[attr] <NEW_LINE> <DEDENT> if val and attr in self.import_strings: <NEW_LINE> <INDENT> val = perform_import(val, attr) <NEW_LINE> <DEDENT> setattr(self, attr, val) <NEW_LINE> return val | Adds the ability to import strings in dictionaries | 6259905b16aa5153ce401adb |
class TelnetOption(object): <NEW_LINE> <INDENT> def __init__(self, serialised=None): <NEW_LINE> <INDENT> self.local = UNKNOWN <NEW_LINE> self.remote = UNKNOWN <NEW_LINE> self.pending = False <NEW_LINE> if serialised: <NEW_LINE> <INDENT> self.deserialise(serialised) <NEW_LINE> <DEDENT> <DEDENT> def serialise(self): <NEW_LINE> <INDENT> return (self.local, self.remote, self.pending) <NEW_LINE> <DEDENT> def deserialise(self, data): <NEW_LINE> <INDENT> self.local, self.remote, self.pending = data | Simple class used to track the status of a Telnet option | 6259905b0a50d4780f7068ba |
class SimpleLSTM(nn.Module): <NEW_LINE> <INDENT> def __init__(self, inp_size: int, out_size: int): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.lstm = nn.LSTM(input_size=inp_size, hidden_size=out_size) <NEW_LINE> <DEDENT> def forward(self, xs): <NEW_LINE> <INDENT> out, _ = self.lstm(xs.view(xs.shape[0], 1, xs.shape[1])) <NEW_LINE> return out.view(out.shape[0], out.shape[2]) | Contextualise the input sequence of embedding vectors using unidirectional
LSTM.
Type: Tensor[N x Din] -> Tensor[N x Dout], where
* `N` is is the length of the input sequence
* `Din` is the input embedding size
* `Dout` is the output embedding size
Example:
>>> lstm = SimpleLSTM(3, 5) # input size 3, output size 5
>>> xs = torch.randn(10, 3) # input sequence of length 10
>>> ys = lstm(xs) # equivalent to: lstm.forward(xs)
>>> list(ys.shape)
[10, 5] | 6259905b63d6d428bbee3d83 |
class PioneerCoin(Bitcoin): <NEW_LINE> <INDENT> name = 'pioneercoin' <NEW_LINE> symbols = ('PCOIN', ) <NEW_LINE> seeds = ('seed5.cryptolife.net', 'seed2.cryptolife.net', 'seed3.cryptolife.net', 'electrum3.cryptolife.net') <NEW_LINE> port = 35514 <NEW_LINE> message_start = b'\xfe\xc3\xb9\xde' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 55, 'SCRIPT_ADDR': 5, 'SECRET_KEY': 183 } | Class with all the necessary PioneerCoin (PCOIN) network information based on
https://github.com/PCOIN/PIONEERCOIN/blob/master/src/net.cpp
(date of access: 02/17/2018) | 6259905b2ae34c7f260ac6de |
class MessageFlowError(SignalingError): <NEW_LINE> <INDENT> pass | Raised when an associated message is considered valid but it has
been sent or received at a point in time where it's unexpected or
other circumstances prevent it from being processed (such as a
combined sequence number overflow). | 6259905b24f1403a926863ca |
class ExpressionDictOperationPop2(ExpressionChildrenHavingBase): <NEW_LINE> <INDENT> kind = "EXPRESSION_DICT_OPERATION_POP2" <NEW_LINE> named_children = ("dict_arg", "key") <NEW_LINE> __slots__ = ("known_hashable_key",) <NEW_LINE> def __init__(self, dict_arg, key, source_ref): <NEW_LINE> <INDENT> assert dict_arg is not None <NEW_LINE> assert key is not None <NEW_LINE> ExpressionChildrenHavingBase.__init__( self, values={"dict_arg": dict_arg, "key": key}, source_ref=source_ref, ) <NEW_LINE> self.known_hashable_key = None <NEW_LINE> <DEDENT> def computeExpression(self, trace_collection): <NEW_LINE> <INDENT> dict_arg = self.subnode_dict_arg <NEW_LINE> key = self.subnode_key <NEW_LINE> if self.known_hashable_key is None: <NEW_LINE> <INDENT> self.known_hashable_key = key.isKnownToBeHashable() <NEW_LINE> if self.known_hashable_key is False: <NEW_LINE> <INDENT> trace_collection.onExceptionRaiseExit(BaseException) <NEW_LINE> return makeUnhashableExceptionReplacementExpression( node=self, key=key, operation="dict.pop", side_effects=(dict_arg, key), ) <NEW_LINE> <DEDENT> <DEDENT> trace_collection.removeKnowledge(dict_arg) <NEW_LINE> trace_collection.onExceptionRaiseExit(BaseException) <NEW_LINE> return self, None, None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def mayRaiseException(exception_type): <NEW_LINE> <INDENT> return True | This operation represents d.pop(key), i.e. default None. | 6259905b8e7ae83300eea685 |
@ddt.ddt <NEW_LINE> class TestMicrosites(DatabaseMicrositeTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestMicrosites, self).setUp() <NEW_LINE> <DEDENT> @ddt.data(*MICROSITE_BACKENDS) <NEW_LINE> def test_get_value_for_org_when_microsite_has_no_org(self, site_backend): <NEW_LINE> <INDENT> with patch('microsite_configuration.microsite.BACKEND', get_backend(site_backend, BaseMicrositeBackend)): <NEW_LINE> <INDENT> value = get_value_for_org("BogusX", "university", "default_value") <NEW_LINE> self.assertEquals(value, "default_value") <NEW_LINE> <DEDENT> <DEDENT> @ddt.data(*MICROSITE_BACKENDS) <NEW_LINE> def test_get_value_for_org(self, site_backend): <NEW_LINE> <INDENT> with patch('microsite_configuration.microsite.BACKEND', get_backend(site_backend, BaseMicrositeBackend)): <NEW_LINE> <INDENT> value = get_value_for_org("TestSiteX", "university", "default_value") <NEW_LINE> self.assertEquals(value, "test_site") | Run through some Microsite logic | 6259905b55399d3f05627b16 |
class OverwritePlugin(utils.OperationWrapper, plugins.ToolsPlugin): <NEW_LINE> <INDENT> menu = ('Misura', 'Overwrite datasets') <NEW_LINE> name = 'Overwrite' <NEW_LINE> description_short = 'Overwrite one dataset with another one' <NEW_LINE> description_full = ('Overwrite dataset A with dataset B.' 'Optionally delete B dataset after operation.') <NEW_LINE> def __init__(self, dst='', src='', delete=True): <NEW_LINE> <INDENT> self.fields = [ plugins.FieldDataset( 'dst', 'Destination dataset (A)', default=dst), plugins.FieldDataset('src', 'Source dataset (B)', default=src), plugins.FieldBool('delete', 'Delete B dataset', default=delete), ] <NEW_LINE> <DEDENT> def apply(self, cmd, fields): <NEW_LINE> <INDENT> self.ops = [] <NEW_LINE> self.doc = cmd.document <NEW_LINE> if fields['dst'] == fields['src']: <NEW_LINE> <INDENT> raise plugins.ToolsPluginException('A and B datasets must differ.') <NEW_LINE> <DEDENT> delete = fields['delete'] <NEW_LINE> a = self.doc.data[fields['dst']] <NEW_LINE> a1 = copy(a) <NEW_LINE> b = self.doc.data[fields['src']] <NEW_LINE> pm = getattr(b, 'pluginmanager', False) <NEW_LINE> if delete: <NEW_LINE> <INDENT> self.ops.append(document.OperationDatasetDelete(fields['src'])) <NEW_LINE> <DEDENT> a1.data = b.data[:] <NEW_LINE> self.ops.append(document.OperationDatasetSet(fields['dst'], a1)) <NEW_LINE> self.apply_ops() | Overwrite two datasets. | 6259905b0c0af96317c5785b |
class FunctionCall: <NEW_LINE> <INDENT> def __init__(self, fun_expr, args): <NEW_LINE> <INDENT> self.fun_expr = fun_expr <NEW_LINE> self.args = args <NEW_LINE> <DEDENT> def evaluate(self, scope): <NEW_LINE> <INDENT> fun = self.fun_expr.evaluate(scope) <NEW_LINE> new_scope = Scope(parent=scope) <NEW_LINE> for i in range(len(self.args)): <NEW_LINE> <INDENT> scope[fun.args[i]] = self.args[i].evaluate(new_scope) <NEW_LINE> <DEDENT> for operation in fun.body: <NEW_LINE> <INDENT> res = operation.evaluate(new_scope) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def accept(self, visitor): <NEW_LINE> <INDENT> visitor.visit_function_call(self) | FunctionCall - представляет вызов функции в программе.
В результате вызова функции должен создаваться новый объект Scope,
являющий дочерним для текущего Scope
(т. е. текущий Scope должен стать для него родителем).
Новый Scope станет текущим Scope-ом при вычислении тела функции. | 6259905b91f36d47f223198b |
class ExecuteAddressGoal(BaseGoal): <NEW_LINE> <INDENT> def __init__(self, addr): <NEW_LINE> <INDENT> super(ExecuteAddressGoal, self).__init__('execute_address') <NEW_LINE> self.addr = addr <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<ExecuteAddressCondition targeting %#x>" % self.addr <NEW_LINE> <DEDENT> def check(self, cfg, state, peek_blocks): <NEW_LINE> <INDENT> node = self._get_cfg_node(cfg, state) <NEW_LINE> if node is None: <NEW_LINE> <INDENT> l.error('Failed to find CFGNode for state %s on the control flow graph.', state) <NEW_LINE> return False <NEW_LINE> <DEDENT> for src, dst in self._dfs_edges(cfg.graph, node, max_steps=peek_blocks): <NEW_LINE> <INDENT> if src.addr == self.addr or dst.addr == self.addr: <NEW_LINE> <INDENT> l.debug("State %s will reach %#x.", state, self.addr) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> l.debug('SimState %s will not reach %#x.', state, self.addr) <NEW_LINE> return False <NEW_LINE> <DEDENT> def check_state(self, state): <NEW_LINE> <INDENT> return state.addr == self.addr | A goal that prioritizes states reaching (or are likely to reach) certain address in some specific steps. | 6259905b23e79379d538daf4 |
class SearchQuery(object): <NEW_LINE> <INDENT> def __init__(self, query, label=None): <NEW_LINE> <INDENT> self.query = stripAccents(query) <NEW_LINE> self.declared_label = stripAccents(label) <NEW_LINE> self.label = self.declared_label or self.query <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _get_label_delimiter(cls, query_string, label_delimiters): <NEW_LINE> <INDENT> for d in label_delimiters: <NEW_LINE> <INDENT> if d in query_string: <NEW_LINE> <INDENT> return d <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_string(cls, query_string, label_delimiters=("#\t|")): <NEW_LINE> <INDENT> query = query_string.strip() <NEW_LINE> label_delimiter = cls._get_label_delimiter(query_string, label_delimiters) <NEW_LINE> if label_delimiter: <NEW_LINE> <INDENT> label_delimiter = label_delimiter[0] <NEW_LINE> pattern = label_delimiter.replace("|", "\\|") + "+" <NEW_LINE> lbl, q = re.split(pattern, query, 1) <NEW_LINE> if len(lbl) == 0: <NEW_LINE> <INDENT> raise ValidationError("Delimiter ({label_delimiter!r}) was used, but no label given!" "Query was: {query!r}".format(**locals()), code="invalid") <NEW_LINE> <DEDENT> if len(lbl) > 80: <NEW_LINE> <INDENT> raise ValidationError("Label too long: {lbl!r}".format(**locals()), code="invalid") <NEW_LINE> <DEDENT> if not len(query): <NEW_LINE> <INDENT> raise ValidationError("Invalid label (before the {label_delimiter}). Query was: {query!r}" .format(**locals()), code="invalid") <NEW_LINE> <DEDENT> return SearchQuery(q.strip(), label=lbl.strip()) <NEW_LINE> <DEDENT> return SearchQuery(query) | Represents a query object that contains both a query and
an optional label | 6259905bf7d966606f7493b4 |
class BotInfoCommand(BaseCommand): <NEW_LINE> <INDENT> command_name = "botinfo" <NEW_LINE> @inlineCallbacks <NEW_LINE> def run(self, protocol, parsed_line, invoker_dbref): <NEW_LINE> <INDENT> bot_dbref = yield mux_commands.think(protocol, "%#") <NEW_LINE> bot_name = yield mux_commands.think(protocol, "[name(%#)]") <NEW_LINE> pval = self._get_header_str("Battlesnake Botinfo", width=50) <NEW_LINE> pval += ( "\r" " Bot name: {bot_name}\r" " Bot DBref: {bot_dbref}\r" " Command Prefix: {cmd_prefix}\r" " Command Kwarg Delim: {cmd_kwarg_delimiter}\r" " Command Kwarg List Delim: {cmd_kwarg_list_delimiter}" ).format( bot_name=bot_name, bot_dbref=bot_dbref, cmd_prefix=protocol.cmd_prefix, cmd_kwarg_delimiter=protocol.cmd_kwarg_delimiter, cmd_kwarg_list_delimiter=protocol.cmd_kwarg_list_delimiter, ) <NEW_LINE> pval += self._get_footer_str(width=50) <NEW_LINE> mux_commands.pemit(protocol, invoker_dbref, pval) | Shows some assorted info about the bot. | 6259905b8e71fb1e983bd0c2 |
class QueuedProxyLogger(StructuredLogger): <NEW_LINE> <INDENT> threads = {} <NEW_LINE> def __init__(self, logger): <NEW_LINE> <INDENT> StructuredLogger.__init__(self, logger.name) <NEW_LINE> if logger.name not in self.threads: <NEW_LINE> <INDENT> self.threads[logger.name] = LogQueueThread(Queue(), logger) <NEW_LINE> self.threads[logger.name].start() <NEW_LINE> <DEDENT> self.queue = self.threads[logger.name].queue <NEW_LINE> <DEDENT> def _handle_log(self, data): <NEW_LINE> <INDENT> self.queue.put(data) | Logger that logs via a queue.
This is intended for multiprocessing use cases where there are
some subprocesses which want to share a log handler with the main thread,
without the overhead of having a multiprocessing lock for all logger
access. | 6259905b009cb60464d02b2e |
class PersonProductRegisteredBranchesView(PersonProductBaseBranchesView): <NEW_LINE> <INDENT> label_template = ( 'Bazaar Branches of %(product)s registered by %(person)s') <NEW_LINE> def _getCollection(self): <NEW_LINE> <INDENT> return getUtility(IAllBranches).registeredBy( self.context.person).inProduct(self.context.product) | Branch listing for a person's registered branches of a product. | 6259905b627d3e7fe0e08484 |
class Query(datastore.Query): <NEW_LINE> <INDENT> def __init__(self, kind, filters, orderings=None): <NEW_LINE> <INDENT> super(Query, self).__init__(kind, filters) <NEW_LINE> if orderings: <NEW_LINE> <INDENT> self.Order(*orderings) <NEW_LINE> <DEDENT> <DEDENT> def IsKeysOnly(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def Run(self, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return iter([datastore.Get(self['key ='])]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return iter([datastore.Get(self['key =='])]) <NEW_LINE> <DEDENT> <DEDENT> except datastore_errors.EntityNotFoundError: <NEW_LINE> <INDENT> return iter([None]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return super(Query, self).Run(**kwargs) | This class extends ``datastore.Query`` class to handles `key` filters.
| 6259905bd7e4931a7ef3d677 |
class YoutubeDialog(QDialog): <NEW_LINE> <INDENT> def __init__(self, videos, parent): <NEW_LINE> <INDENT> QDialog.__init__(self, parent) <NEW_LINE> self.videos = videos <NEW_LINE> self.setupUi(self) <NEW_LINE> <DEDENT> def setupUi(self, Dialog): <NEW_LINE> <INDENT> Dialog.resize(316, 238) <NEW_LINE> Dialog.setWindowTitle("Select Resolution") <NEW_LINE> self.verticalLayout = QVBoxLayout(Dialog) <NEW_LINE> self.frame = QFrame(Dialog) <NEW_LINE> self.frame.setFrameShape(QFrame.StyledPanel) <NEW_LINE> self.frame.setFrameShadow(QFrame.Raised) <NEW_LINE> self.verticalLayout_2 = QVBoxLayout(self.frame) <NEW_LINE> self.buttonGroup = QButtonGroup(self.frame) <NEW_LINE> for i, video in enumerate(self.videos): <NEW_LINE> <INDENT> radioButton = QRadioButton(self.frame) <NEW_LINE> radioButton.setText("%s (%s%s)"%(video.resolution, video.extension, video.info)) <NEW_LINE> self.buttonGroup.addButton(radioButton) <NEW_LINE> self.verticalLayout_2.addWidget(radioButton) <NEW_LINE> if i==1 : radioButton.setChecked(True) <NEW_LINE> <DEDENT> spacerItem = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding) <NEW_LINE> self.verticalLayout_2.addItem(spacerItem) <NEW_LINE> self.verticalLayout.addWidget(self.frame) <NEW_LINE> self.buttonBox = QDialogButtonBox(Dialog) <NEW_LINE> self.buttonBox.setOrientation(QtCore.Qt.Horizontal) <NEW_LINE> self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) <NEW_LINE> self.verticalLayout.addWidget(self.buttonBox) <NEW_LINE> self.buttonBox.accepted.connect(Dialog.accept) <NEW_LINE> self.buttonBox.rejected.connect(Dialog.reject) | This dialog is used to select which resolution to download | 6259905b7d847024c075d9d5 |
class YamadaK4Test(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> k4 = instantiate_k_graph(4) <NEW_LINE> k4_yamada = yamada.Yamada(k4) <NEW_LINE> self.msts = k4_yamada.spanning_trees() <NEW_LINE> <DEDENT> def test_number_of_msts(self): <NEW_LINE> <INDENT> self.assertTrue(len(self.msts) == 16) <NEW_LINE> <DEDENT> def test_unique_msts(self): <NEW_LINE> <INDENT> self.assertTrue(unique_trees(self.msts)) | Test the minimum spanning trees returned from K4.
K4 is a complete graph of three nodes with fixed weights, w(e_i) = 1. | 6259905b0fa83653e46f64de |
class FrameServer(Service): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.port = kwargs.pop("port", 0) <NEW_LINE> super(FrameServer, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> tornado.ioloop.IOLoop.current().clear_instance() <NEW_LINE> tornado.ioloop.IOLoop.clear_current() <NEW_LINE> tornado.ioloop.IOLoop.instance() <NEW_LINE> frame_handler = FrameGrabber(self.qinput) <NEW_LINE> frame_handler.start() <NEW_LINE> Watchdog(self.exit, frame_handler).start() <NEW_LINE> app = tornado.web.Application([ (r"/", FrameHandler, dict(frame_handler=frame_handler)), ]) <NEW_LINE> app.listen(self.port) <NEW_LINE> tornado.ioloop.IOLoop.current().start() | A service that run a mjpeg server with incoming message
| 6259905b8e71fb1e983bd0c3 |
class RazerBladeStealthLate2019(_RippleKeyboard): <NEW_LINE> <INDENT> EVENT_FILE_REGEX = re.compile(r'.*Razer_Blade_Stealth(-if01)?-event-kbd') <NEW_LINE> USB_VID = 0x1532 <NEW_LINE> USB_PID = 0x024A <NEW_LINE> METHODS = ['get_device_type_keyboard', 'set_static_effect', 'set_spectrum_effect', 'set_none_effect', 'set_breath_random_effect', 'set_breath_single_effect'] <NEW_LINE> DEVICE_IMAGE = "https://assets2.razerzone.com/images/blade-stealth-13/shop/stealth-l2p-1.jpg" <NEW_LINE> RAZER_URLS = { "top_img": "https://assets2.razerzone.com/images/blade-stealth-13/shop/stealth-l2p-2.jpg", "side_img": "https://assets2.razerzone.com/images/blade-stealth-13/shop/stealth-l2p-3.jpg", "perspective_img": "https://assets2.razerzone.com/images/blade-stealth-13/shop/stealth-l2p-1.jpg" } | Class for the Razer Blade Stealth (Late 2019) | 6259905b3cc13d1c6d466d39 |
class BuildProfileLocally(plugin.Command): <NEW_LINE> <INDENT> name = "build_local_profile" <NEW_LINE> @classmethod <NEW_LINE> def args(cls, parser): <NEW_LINE> <INDENT> super(BuildProfileLocally, cls).args(parser) <NEW_LINE> parser.add_argument( "module_name", help="The name of the module (without the .pdb extensilon).") <NEW_LINE> parser.add_argument( "guid", help="The guid of the module.") <NEW_LINE> <DEDENT> def __init__(self, module_name=None, guid=None, **kwargs): <NEW_LINE> <INDENT> super(BuildProfileLocally, self).__init__(**kwargs) <NEW_LINE> self.module_name = module_name <NEW_LINE> self.guid = guid <NEW_LINE> <DEDENT> def render(self, renderer): <NEW_LINE> <INDENT> profile_name = "{0}/GUID/{1}".format(self.module_name, self.guid) <NEW_LINE> renderer.format("Fetching Profile {0}", profile_name) <NEW_LINE> dump_dir = "/tmp/" <NEW_LINE> fetch_pdb = self.session.RunPlugin( "fetch_pdb", pdb_filename="%s.pdb" % self.module_name, guid=self.guid, dump_dir=dump_dir) <NEW_LINE> if fetch_pdb.error_status: <NEW_LINE> <INDENT> raise RuntimeError( "Failed fetching the pdb file: %s" % renderer.error_status) <NEW_LINE> <DEDENT> out_file = os.path.join(dump_dir, "%s.json" % self.guid) <NEW_LINE> parse_pdb = self.session.RunPlugin( "parse_pdb", pdb_filename=os.path.join(dump_dir, "%s.pdb" % self.module_name), output_filename="%s.json" % self.guid, dump_dir=dump_dir) <NEW_LINE> if parse_pdb.error_status: <NEW_LINE> <INDENT> raise RuntimeError( "Failed parsing pdb file: %s" % renderer.error_status) <NEW_LINE> <DEDENT> repository = self.session.repository_managers.values()[0] <NEW_LINE> data = json.load(open(out_file)) <NEW_LINE> repository.StoreData(profile_name, data) | Download and builds a profile locally in one step.
We store the profile in the first repository in the profile_path which must
be writable. Usually this is a caching repository so the profile goes in the
local cache. | 6259905b3539df3088ecd895 |
class TestTakeCustomChartOptions: <NEW_LINE> <INDENT> def test_take_custom_chart_options(self): <NEW_LINE> <INDENT> assert take_custom_chart_options() is None | Function currently not implemented. | 6259905b462c4b4f79dbcfff |
class SubscriptionDiagnosticSettingsResource(SubscriptionProxyOnlyResource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'storage_account_id': {'key': 'properties.storageAccountId', 'type': 'str'}, 'service_bus_rule_id': {'key': 'properties.serviceBusRuleId', 'type': 'str'}, 'event_hub_authorization_rule_id': {'key': 'properties.eventHubAuthorizationRuleId', 'type': 'str'}, 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, 'logs': {'key': 'properties.logs', 'type': '[SubscriptionLogSettings]'}, 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SubscriptionDiagnosticSettingsResource, self).__init__(**kwargs) <NEW_LINE> self.storage_account_id = kwargs.get('storage_account_id', None) <NEW_LINE> self.service_bus_rule_id = kwargs.get('service_bus_rule_id', None) <NEW_LINE> self.event_hub_authorization_rule_id = kwargs.get('event_hub_authorization_rule_id', None) <NEW_LINE> self.event_hub_name = kwargs.get('event_hub_name', None) <NEW_LINE> self.logs = kwargs.get('logs', None) <NEW_LINE> self.workspace_id = kwargs.get('workspace_id', None) | The subscription diagnostic setting resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Azure resource Id.
:vartype id: str
:ivar name: Azure resource name.
:vartype name: str
:ivar type: Azure resource type.
:vartype type: str
:param location: Location of the resource.
:type location: str
:param storage_account_id: The resource ID of the storage account to which you would like to
send Diagnostic Logs.
:type storage_account_id: str
:param service_bus_rule_id: The service bus rule Id of the diagnostic setting. This is here to
maintain backwards compatibility.
:type service_bus_rule_id: str
:param event_hub_authorization_rule_id: The resource Id for the event hub authorization rule.
:type event_hub_authorization_rule_id: str
:param event_hub_name: The name of the event hub. If none is specified, the default event hub
will be selected.
:type event_hub_name: str
:param logs: The list of logs settings.
:type logs: list[~$(python-base-namespace).v2017_05_01_preview.models.SubscriptionLogSettings]
:param workspace_id: The full ARM resource ID of the Log Analytics workspace to which you would
like to send Diagnostic Logs. Example:
/subscriptions/4b9e8510-67ab-4e9a-95a9-e2f1e570ea9c/resourceGroups/insights-integration/providers/Microsoft.OperationalInsights/workspaces/viruela2.
:type workspace_id: str | 6259905b16aa5153ce401add |
class ListHypervisor(command.Lister): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(ListHypervisor, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( "--matching", metavar="<hostname>", help="Filter hypervisors using <hostname> substring", ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> compute_client = self.app.client_manager.compute <NEW_LINE> columns = ( "ID", "Hypervisor Hostname" ) <NEW_LINE> if parsed_args.matching: <NEW_LINE> <INDENT> data = compute_client.hypervisors.search(parsed_args.matching) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = compute_client.hypervisors.list() <NEW_LINE> <DEDENT> return (columns, (utils.get_item_properties( s, columns, ) for s in data)) | List hypervisors | 6259905b63d6d428bbee3d84 |
class GetInfoInputSet(InputSet): <NEW_LINE> <INDENT> def set_APIKey(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('APIKey', value) <NEW_LINE> <DEDENT> def set_APISecret(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('APISecret', value) <NEW_LINE> <DEDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('AccessToken', value) <NEW_LINE> <DEDENT> def set_AccessTokenSecret(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('AccessTokenSecret', value) <NEW_LINE> <DEDENT> def set_PhotoID(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('PhotoID', value) <NEW_LINE> <DEDENT> def set_PhotoSecret(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('PhotoSecret', value) <NEW_LINE> <DEDENT> def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> super(GetInfoInputSet, self)._set_input('ResponseFormat', value) | An InputSet with methods appropriate for specifying the inputs to the GetInfo
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 6259905b55399d3f05627b18 |
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = DiscreteDistribution() <NEW_LINE> for p in self.legalPositions: <NEW_LINE> <INDENT> self.beliefs[p] = 1.0 <NEW_LINE> <DEDENT> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observeUpdate(self, observation, gameState): <NEW_LINE> <INDENT> for pos in self.allPositions: <NEW_LINE> <INDENT> self.beliefs[pos] = self.beliefs[pos] * self.getObservationProb(observation, gameState.getPacmanPosition(), pos, self.getJailPosition()) <NEW_LINE> <DEDENT> self.beliefs.normalize() <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> beliefsCopy = self.beliefs.copy() <NEW_LINE> for pos in self.allPositions: <NEW_LINE> <INDENT> if pos == self.getJailPosition(): <NEW_LINE> <INDENT> oldPositions = self.allPositions <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> actionVectors = [(1,0),(-1,0),(0,1),(0,-1),(0,0)] <NEW_LINE> nwse = [(pos[0] + a[0], pos[1] + a[1]) for a in actionVectors] <NEW_LINE> oldPositions = [p for p in nwse if p in self.allPositions] <NEW_LINE> <DEDENT> currentSum = 0 <NEW_LINE> for oldPos in oldPositions: <NEW_LINE> <INDENT> newPosDist = self.getPositionDistribution(gameState, oldPos) <NEW_LINE> if pos in newPosDist: <NEW_LINE> <INDENT> currentSum += beliefsCopy[oldPos] * newPosDist[pos] <NEW_LINE> <DEDENT> <DEDENT> self.beliefs[pos] = currentSum <NEW_LINE> <DEDENT> self.beliefs.normalize() <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs | The exact dynamic inference module should use forward algorithm updates to
compute the exact belief function at each time step. | 6259905bb5575c28eb7137c9 |
class TczHourSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = TczHour <NEW_LINE> fields = ('id', 'tcz_date', 'tcz_user', 'tcz_user_change', 'tcz_court', 'tcz_hour', 'tcz_free', ) <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> tcz_hour = TczHour(tcz_date=validated_data['tcz_date'], tcz_user=validated_data['tcz_user'], tcz_user_change=validated_data['tcz_user_change'], tcz_court=validated_data['tcz_court'], tcz_hour=validated_data['tcz_hour'], tcz_free=validated_data['tcz_free']) <NEW_LINE> if user_has_reservation(validated_data['tcz_user']): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> tcz_hour.save() <NEW_LINE> return tcz_hour | serializer for the reserved hour
| 6259905b45492302aabfdad2 |
class WikipediaAr(Platform): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.platformName = "Wikipedia (ar)" <NEW_LINE> self.parameterName = "wikipedia" <NEW_LINE> self.tags = ["education", "wiki"] <NEW_LINE> self.isValidMode = {} <NEW_LINE> self.isValidMode["phonefy"] = False <NEW_LINE> self.isValidMode["usufy"] = True <NEW_LINE> self.isValidMode["searchfy"] = False <NEW_LINE> self.url = {} <NEW_LINE> self.url["usufy"] = "http://ar.wikipedia.org/wiki/user:" + "<usufy>" <NEW_LINE> self.needsCredentials = {} <NEW_LINE> self.needsCredentials["usufy"] = False <NEW_LINE> self.validQuery = {} <NEW_LINE> self.validQuery["usufy"] = ".+" <NEW_LINE> self.notFoundText = {} <NEW_LINE> self.notFoundText["usufy"] = ['mw-userpage-userdoesnotexist error'] <NEW_LINE> self.fieldsRegExp = {} <NEW_LINE> self.fieldsRegExp["usufy"] = {} <NEW_LINE> self.foundFields = {} | A <Platform> object for WikipediaAr. | 6259905bbaa26c4b54d5089f |
class TDBException(TException): <NEW_LINE> <INDENT> def __init__(self, error_msg=None,): <NEW_LINE> <INDENT> self.error_msg = error_msg <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.error_msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('TDBException') <NEW_LINE> if self.error_msg is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('error_msg', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.error_msg.encode('utf-8') if sys.version_info[0] == 2 else self.error_msg) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- error_msg | 6259905b99cbb53fe68324da |
class NetworkTableConnection: <NEW_LINE> <INDENT> def __init__(self, stream, typeManager): <NEW_LINE> <INDENT> self.stream = stream <NEW_LINE> self.rstream = ReadStream(stream.getInputStream()) <NEW_LINE> self.wstream = stream.getOutputStream() <NEW_LINE> self.typeManager = typeManager <NEW_LINE> self.write_lock = _impl.create_rlock('write_lock') <NEW_LINE> self.isValid = True <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.isValid: <NEW_LINE> <INDENT> self.isValid = False <NEW_LINE> self.stream.close() <NEW_LINE> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> with self.write_lock: <NEW_LINE> <INDENT> self.wstream.flush() <NEW_LINE> <DEDENT> <DEDENT> def sendKeepAlive(self): <NEW_LINE> <INDENT> with self.write_lock: <NEW_LINE> <INDENT> self.wstream.write(KEEP_ALIVE.getBytes()) <NEW_LINE> self.wstream.flush() <NEW_LINE> <DEDENT> <DEDENT> def sendClientHello(self): <NEW_LINE> <INDENT> with self.write_lock: <NEW_LINE> <INDENT> self.wstream.write(CLIENT_HELLO.getBytes(PROTOCOL_REVISION)) <NEW_LINE> self.wstream.flush() <NEW_LINE> <DEDENT> <DEDENT> def sendServerHelloComplete(self): <NEW_LINE> <INDENT> with self.write_lock: <NEW_LINE> <INDENT> self.wstream.write(SERVER_HELLO_COMPLETE.getBytes()) <NEW_LINE> self.wstream.flush() <NEW_LINE> <DEDENT> <DEDENT> def sendProtocolVersionUnsupported(self): <NEW_LINE> <INDENT> with self.write_lock: <NEW_LINE> <INDENT> self.wstream.write(PROTOCOL_UNSUPPORTED.getBytes(PROTOCOL_REVISION)) <NEW_LINE> self.wstream.flush() <NEW_LINE> <DEDENT> <DEDENT> def sendEntry(self, entryBytes): <NEW_LINE> <INDENT> with self.write_lock: <NEW_LINE> <INDENT> self.wstream.write(entryBytes) <NEW_LINE> <DEDENT> <DEDENT> def read(self, adapter): <NEW_LINE> <INDENT> messageType = self.rstream.read(1) <NEW_LINE> if messageType == KEEP_ALIVE.HEADER: <NEW_LINE> <INDENT> adapter.keepAlive() <NEW_LINE> <DEDENT> elif messageType == CLIENT_HELLO.HEADER: <NEW_LINE> <INDENT> protocolRevision = CLIENT_HELLO.read(self.rstream)[0] <NEW_LINE> adapter.clientHello(protocolRevision) <NEW_LINE> <DEDENT> elif messageType == SERVER_HELLO_COMPLETE.HEADER: <NEW_LINE> <INDENT> adapter.serverHelloComplete() <NEW_LINE> <DEDENT> elif messageType == PROTOCOL_UNSUPPORTED.HEADER: <NEW_LINE> <INDENT> protocolRevision = PROTOCOL_UNSUPPORTED.read(self.rstream)[0] <NEW_LINE> adapter.protocolVersionUnsupported(protocolRevision) <NEW_LINE> <DEDENT> elif messageType == ENTRY_ASSIGNMENT.HEADER: <NEW_LINE> <INDENT> entryName, (typeId, entryId, entrySequenceNumber) = ENTRY_ASSIGNMENT.read(self.rstream) <NEW_LINE> entryType = self.typeManager.getType(typeId) <NEW_LINE> if entryType is None: <NEW_LINE> <INDENT> raise BadMessageError("Unknown data type: 0x%x" % typeId) <NEW_LINE> <DEDENT> value = entryType.readValue(self.rstream) <NEW_LINE> adapter.offerIncomingAssignment(NetworkTableEntry(entryName, entryType, value, id=entryId, sequenceNumber=entrySequenceNumber)) <NEW_LINE> <DEDENT> elif messageType == FIELD_UPDATE.HEADER: <NEW_LINE> <INDENT> entryId, entrySequenceNumber = FIELD_UPDATE.read(self.rstream) <NEW_LINE> entry = adapter.getEntry(entryId) <NEW_LINE> if entry is None: <NEW_LINE> <INDENT> raise BadMessageError("Received update for unknown entry id: %d " % entryId) <NEW_LINE> <DEDENT> value = entry.getType().readValue(self.rstream) <NEW_LINE> adapter.offerIncomingUpdate(entry, entrySequenceNumber, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise BadMessageError("Unknown Network Table Message Type: %s" % (messageType)) | An abstraction for the NetworkTable protocol
| 6259905b009cb60464d02b2f |
class SchemaSet(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.schemata = {} <NEW_LINE> for section in Schema.SECTIONS: <NEW_LINE> <INDENT> for name, sconfig in data.get(section, {}).items(): <NEW_LINE> <INDENT> if name in self.schemata: <NEW_LINE> <INDENT> raise TypeError("Duplicate schema name: %r" % name) <NEW_LINE> <DEDENT> self.schemata[name] = Schema(self, section, name, sconfig) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get(self, name): <NEW_LINE> <INDENT> schema = self.schemata.get(name) <NEW_LINE> if schema is None: <NEW_LINE> <INDENT> raise TypeError("No such schema: %r" % name) <NEW_LINE> <DEDENT> return schema <NEW_LINE> <DEDENT> def merge_entity_schema(self, left, right): <NEW_LINE> <INDENT> if left == right: <NEW_LINE> <INDENT> return left <NEW_LINE> <DEDENT> lefts = self.get(left) <NEW_LINE> lefts = [s.name for s in lefts.schemata] <NEW_LINE> if right in lefts: <NEW_LINE> <INDENT> return left <NEW_LINE> <DEDENT> rights = self.get(right) <NEW_LINE> rights = [s.name for s in rights.schemata] <NEW_LINE> if left in rights: <NEW_LINE> <INDENT> return right <NEW_LINE> <DEDENT> for left in lefts: <NEW_LINE> <INDENT> for right in rights: <NEW_LINE> <INDENT> if left == right: <NEW_LINE> <INDENT> return left <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> data = {} <NEW_LINE> for name, schema in self.schemata.items(): <NEW_LINE> <INDENT> if not schema.hidden: <NEW_LINE> <INDENT> data[name] = schema <NEW_LINE> <DEDENT> <DEDENT> return data <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.schemata.values()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<SchemaSet(%r)>' % self.schemata | A collection of schemata. | 6259905ba8ecb03325872811 |
class DataWithOracleGivensLC(GivensLC): <NEW_LINE> <INDENT> def process(self, data, oracle, *args, target_names=None, prelabel_data=True, feature_groups=None, **kwargs): <NEW_LINE> <INDENT> if target_names is not None: <NEW_LINE> <INDENT> self.target_names = target_names <NEW_LINE> <DEDENT> if feature_groups is not None: <NEW_LINE> <INDENT> self.feature_groups = feature_groups <NEW_LINE> <DEDENT> self.data_matrix, self.feature_names, self.feature_spec = parse_data( data, feature_names=kwargs.get('feature_names'), feature_spec=kwargs.get('feature_spec') ) <NEW_LINE> if hasattr(oracle, 'predict') and hasattr(oracle, 'classes_'): <NEW_LINE> <INDENT> logger.info( 'Inferring that oracle is a Scikit-Learn-like classifier ' 'and using the "predict" method.') <NEW_LINE> self.oracle = lambda x: np.eye(len(oracle.classes_))[oracle.predict(x).astype('intp'),] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info('Treating oracle as a function') <NEW_LINE> self.oracle = oracle <NEW_LINE> <DEDENT> if prelabel_data: <NEW_LINE> <INDENT> targets = self.oracle(self.data_matrix) <NEW_LINE> if self.target_names is None: <NEW_LINE> <INDENT> self.target_names = getattr(targets, 'columns', np.arange(targets.shape[1])) <NEW_LINE> <DEDENT> self.target_matrix = targets if isinstance(targets, np.ndarray) else targets.to_numpy() | Given LC for the usual explanation setting, where we have unlabeled training data and
a predictor oracle. | 6259905b32920d7e50bc7640 |
@pytest.mark.skipif(ARCH not in ("i686", "x86_64"), reason=f"Skipped for {ARCH}") <NEW_LINE> class SyscallArgsCommand(GefUnitTestGeneric): <NEW_LINE> <INDENT> @pytest.mark.online <NEW_LINE> def setUp(self) -> None: <NEW_LINE> <INDENT> self.tempdirfd = tempfile.TemporaryDirectory(prefix=GEF_DEFAULT_TEMPDIR) <NEW_LINE> self.tempdirpath = pathlib.Path(self.tempdirfd.name).absolute() <NEW_LINE> base = "https://raw.githubusercontent.com/hugsy/gef-extras/master/syscall-tables" <NEW_LINE> for arch in ("ARM", "ARM_OABI", "X86", "X86_64"): <NEW_LINE> <INDENT> url = f"{base}/{arch}.py" <NEW_LINE> data = download_file(url) <NEW_LINE> if not data: <NEW_LINE> <INDENT> raise Exception(f"Failed to download {arch}.py ({url})") <NEW_LINE> <DEDENT> fpath = self.tempdirpath / f"{arch}.py" <NEW_LINE> with fpath.open("wb") as fd: <NEW_LINE> <INDENT> fd.write(data) <NEW_LINE> <DEDENT> <DEDENT> return super().setUp() <NEW_LINE> <DEDENT> def tearDown(self) -> None: <NEW_LINE> <INDENT> self.tempdirfd.cleanup() <NEW_LINE> return <NEW_LINE> <DEDENT> def test_cmd_syscall_args(self): <NEW_LINE> <INDENT> self.assertFailIfInactiveSession(gdb_run_cmd("syscall-args")) <NEW_LINE> before = (f"gef config syscall-args.path {self.tempdirpath.absolute()}",) <NEW_LINE> after = ("continue", "syscall-args") <NEW_LINE> res = gdb_start_silent_cmd("catch syscall openat", before=before, after=after, target=_target("syscall-args"),) <NEW_LINE> self.assertNoException(res) <NEW_LINE> self.assertIn("Detected syscall open", res) | `syscall-args` command test module | 6259905b23e79379d538daf6 |
class TempObj(Managed): <NEW_LINE> <INDENT> def as_input(self): <NEW_LINE> <INDENT> return TempInputObj(self.name, *self.axes, **self.kwargs) <NEW_LINE> <DEDENT> def as_output(self): <NEW_LINE> <INDENT> return TempOutputObj(self.name, *self.axes, **self.kwargs) <NEW_LINE> <DEDENT> def create_arg(self, job): <NEW_LINE> <INDENT> raise NotImplementedError('create input or output using as_input or as_output') | Interface class used to represent a managed object
| 6259905b6e29344779b01c47 |
class Robot(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.x = 10 <NEW_LINE> self.y = 10 <NEW_LINE> self.fuel = 100 <NEW_LINE> <DEDENT> def moveLeft(self): <NEW_LINE> <INDENT> if self.fuel >= 5: <NEW_LINE> <INDENT> self.x -= 1 <NEW_LINE> self.fuel -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Insufficient fuel to perform action") <NEW_LINE> <DEDENT> <DEDENT> def moveRight(self): <NEW_LINE> <INDENT> if self.fuel >= 5: <NEW_LINE> <INDENT> self.x += 1 <NEW_LINE> self.fuel -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Insufficient fuel to perform action") <NEW_LINE> <DEDENT> <DEDENT> def moveUp(self): <NEW_LINE> <INDENT> if self.fuel >= 5: <NEW_LINE> <INDENT> self.y -= 1 <NEW_LINE> self.fuel -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Insufficient fuel to perform action") <NEW_LINE> <DEDENT> <DEDENT> def moveDown(self): <NEW_LINE> <INDENT> if self.fuel >= 5: <NEW_LINE> <INDENT> self.y += 1 <NEW_LINE> self.fuel -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Insufficient fuel to perform action") <NEW_LINE> <DEDENT> <DEDENT> def display(self): <NEW_LINE> <INDENT> print("({}, {}) - Fuel: {}" .format(self.x, self.y, self.fuel)) <NEW_LINE> <DEDENT> def fire(self): <NEW_LINE> <INDENT> if self.fuel >= 15: <NEW_LINE> <INDENT> print("Pew! Pew!") <NEW_LINE> self.fuel -= 15 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Insufficient fuel to perform action") | Robot class to create and move robot around | 6259905b435de62698e9d3ff |
class ParkingLotFormView(CustomUserMixin, CreateView): <NEW_LINE> <INDENT> model = ParkingLot <NEW_LINE> form_class = ParkingLotForm <NEW_LINE> template_name = 'buildings/administrative/parking_lots/parkinglot_form.html' <NEW_LINE> def test_func(self): <NEW_LINE> <INDENT> return BuildingPermissions.can_edit_unit( user=self.request.user, building=self.get_object().building, ) <NEW_LINE> <DEDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> return get_object_or_404( Unit, building_id=self.kwargs['b_pk'], pk=self.kwargs['u_pk'], ) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['unit'] = self.get_object() <NEW_LINE> context['building'] = self.get_object().building <NEW_LINE> context['active_units'] = True <NEW_LINE> return context <NEW_LINE> <DEDENT> @transaction.atomic <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> unit = self.get_object() <NEW_LINE> parking_lot = form.save(commit=False) <NEW_LINE> parking_lot.unit = self.get_object() <NEW_LINE> parking_lot.save() <NEW_LINE> messages.success( self.request, _('Parqueadero creado exitosamente.') ) <NEW_LINE> return redirect(unit.get_absolute_url()) | Form view to create a new parking lot of a unit. | 6259905b0fa83653e46f64e0 |
class WalletUserListView(generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = WalletUserSerializer <NEW_LINE> queryset = WalletUser.objects.all()[:1] | This view returns the details of a test admin.
Click on the login button on the rest framework console.
Enter "walletadmin"as username.
Enter "mobilewallet2020" as password.
You can also use these credentials to log into the admin console:
https://walletcore.herokuapp.com/admin/
You can also navigate to /api/v1/docs/ to view the swagger documentation:
https://walletcore.herokuapp.com/api/v1/docs/
Which should show endpoints for the users endpoints.
If no swagger documentation is on display, click on the session login button to authorize | 6259905b3cc13d1c6d466d3b |
class DatabaseStatTracker(util.CursorDebugWrapper): <NEW_LINE> <INDENT> def execute(self, sql, params=()): <NEW_LINE> <INDENT> start = datetime.now() <NEW_LINE> try: <NEW_LINE> <INDENT> return self.cursor.execute(sql, params) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> stop = datetime.now() <NEW_LINE> duration = ms_from_timedelta(stop - start) <NEW_LINE> stacktrace = tidy_stacktrace(traceback.extract_stack()) <NEW_LINE> _params = '' <NEW_LINE> try: <NEW_LINE> <INDENT> _params = simplejson.dumps([force_unicode(x, strings_only=True) for x in params]) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> template_info = None <NEW_LINE> cur_frame = sys._getframe().f_back <NEW_LINE> try: <NEW_LINE> <INDENT> while cur_frame is not None: <NEW_LINE> <INDENT> if cur_frame.f_code.co_name == 'render': <NEW_LINE> <INDENT> node = cur_frame.f_locals['self'] <NEW_LINE> if isinstance(node, Node): <NEW_LINE> <INDENT> template_info = get_template_info(node.source) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> cur_frame = cur_frame.f_back <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> del cur_frame <NEW_LINE> self.db.queries.append({ 'sql': self.db.ops.last_executed_query(self.cursor, sql, params), 'duration': duration, 'raw_sql': sql, 'params': _params, 'hash': sha_constructor("%s%s%s" % (settings.SECRET_KEY, sql.encode('utf-8') if type(sql) == unicode else sql, _params)).hexdigest(), 'stacktrace': stacktrace, 'start_time': start, 'stop_time': stop, 'is_slow': (duration > SQL_WARNING_THRESHOLD), 'is_select': sql.lower().strip().startswith('select'), 'template_info': template_info, }) | Replacement for CursorDebugWrapper which stores additional information
in `connection.queries`. | 6259905badb09d7d5dc0bb65 |
class And( Validator ): <NEW_LINE> <INDENT> def __init__( self, *args ): <NEW_LINE> <INDENT> super( And, self ).__init__(u'') <NEW_LINE> self.validators = args <NEW_LINE> <DEDENT> def isValid( self, element ): <NEW_LINE> <INDENT> for validator in self.validators: <NEW_LINE> <INDENT> validator.validate( element ) <NEW_LINE> <DEDENT> return True | Validate the given element. If an error is raised by the children
it will delegate this exception to the parent element. | 6259905b3c8af77a43b68a3e |
class ManageUserView(generics.RetrieveUpdateAPIView): <NEW_LINE> <INDENT> serializer_class = UserSerializer <NEW_LINE> authentication_classes = (authentication.TokenAuthentication,) <NEW_LINE> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return self.request.user | Manage the auth user | 6259905b63b5f9789fe8676d |
class CommandHistory(): <NEW_LINE> <INDENT> __history = [] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.__history.append("*** new history ***") <NEW_LINE> <DEDENT> def append(self, command:str): <NEW_LINE> <INDENT> self.__history.append(command) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.__history = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def history(self): <NEW_LINE> <INDENT> return self.__history <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_ten(self): <NEW_LINE> <INDENT> return self.__history[-10:] | Models the command history object
This class can be used to capture each of the commands the Robot has performed
into a list so that this can be displayed or used later. | 6259905b460517430c432b50 |
class Circuit_Logic_operateur: <NEW_LINE> <INDENT> def AND(self, a, b): <NEW_LINE> <INDENT> if a + b == 2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def OR(self, a, b): <NEW_LINE> <INDENT> if a + b >= 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def XOR(self, a, b): <NEW_LINE> <INDENT> if a + b == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def NOR(self, a, b): <NEW_LINE> <INDENT> return not (self.OR(a, b)) <NEW_LINE> <DEDENT> def NAND(self, a, b): <NEW_LINE> <INDENT> return not (self.AND(a, b)) <NEW_LINE> <DEDENT> def XNOR(self, a, b): <NEW_LINE> <INDENT> return not (self.XOR(a, b)) | class de logic de circuit | 6259905be76e3b2f99fd9ffa |
class DUIK_OT_swap_ikfk ( bpy.types.Operator ): <NEW_LINE> <INDENT> bl_idname = "armature.swap_ikfk" <NEW_LINE> bl_label = "Swap IK / FK" <NEW_LINE> bl_options = {'REGISTER','UNDO'} <NEW_LINE> mode: bpy.props.StringProperty( default = 'AUTO' ) <NEW_LINE> @classmethod <NEW_LINE> def poll( self, context ): <NEW_LINE> <INDENT> bone = context.active_pose_bone <NEW_LINE> if bone is None: return False <NEW_LINE> return bone.duik_ikfk.ikCtrl_name != '' <NEW_LINE> <DEDENT> def execute( self, context ): <NEW_LINE> <INDENT> active_bone = context.active_pose_bone <NEW_LINE> ikCtrl = active_bone.duik_ikfk.ikCtrl_name <NEW_LINE> pole = active_bone.duik_ikfk.pole_name <NEW_LINE> fk2 = active_bone.duik_ikfk.fk2_name <NEW_LINE> fk1 = active_bone.duik_ikfk.fk1_name <NEW_LINE> ik2 = active_bone.duik_ikfk.ik2_name <NEW_LINE> ik1 = active_bone.duik_ikfk.ik1_name <NEW_LINE> armature = context.active_object <NEW_LINE> ikCtrl = armature.pose.bones[ikCtrl] <NEW_LINE> pole = armature.pose.bones[pole] <NEW_LINE> fk2 = armature.pose.bones[fk2] <NEW_LINE> fk1 = armature.pose.bones[fk1] <NEW_LINE> ik2 = armature.pose.bones[ik2] <NEW_LINE> ik1 = armature.pose.bones[ik1] <NEW_LINE> if ikCtrl["FK / IK Blend"] > 0.5: <NEW_LINE> <INDENT> fk2ik( context, fk1, fk2, ik1, ik2, ikCtrl ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ik2fk( context, ikCtrl, ik2, pole, fk2 ) <NEW_LINE> <DEDENT> return {'FINISHED'} | Swaps the limb rigged in IK/FK between IK and FK | 6259905b55399d3f05627b1a |
class IThemeSpecific(IDefaultPloneLayer): <NEW_LINE> <INDENT> pass | Marker interface that defines a Zope 3 browser layer.
If you need to register a viewlet only for the
"uwosh_intranet_theme_clean" theme, this interface must be its layer
(in intranet_theme/viewlets/configure.zcml). | 6259905ba79ad1619776b5bb |
class SentMessage(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'sent_messages' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> phone = db.Column(db.String) <NEW_LINE> date = db.Column(db.DateTime) <NEW_LINE> message = db.Column(db.String) <NEW_LINE> status = db.Column(db.String) <NEW_LINE> sid = db.Column(db.String) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('users.id')) <NEW_LINE> def __init__(self, phone, message, status, sid, date): <NEW_LINE> <INDENT> self.phone = phone <NEW_LINE> self.message = message <NEW_LINE> self.status = status <NEW_LINE> self.sid = sid <NEW_LINE> self.date = date | Record of a sent SMS message | 6259905bd53ae8145f919a5e |
class MediaType(Base): <NEW_LINE> <INDENT> __tablename__ = 'MediaType' <NEW_LINE> media_type_id = Column("MediaTypeId", Integer, primary_key=True) <NEW_LINE> name = Column("Name", Unicode(120)) | SQLAlchemy model for the MediaType table in our database. | 6259905b3539df3088ecd898 |
class ScaryThrower(ThrowerAnt): <NEW_LINE> <INDENT> name = 'Scary' <NEW_LINE> implemented = True <NEW_LINE> food_cost = 6 <NEW_LINE> def throw_at(self, target): <NEW_LINE> <INDENT> if target: <NEW_LINE> <INDENT> apply_effect(make_scare, target, 2) | ThrowerAnt that intimidates Bees, making them back away instead of advancing. | 6259905bf7d966606f7493b6 |
class PersonTag(AbstractLabel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('tag') | Model that represents a tag that may be used to qualify people | 6259905b23849d37ff8526c2 |
class AmcrestSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, name, device, sensor_type): <NEW_LINE> <INDENT> self._name = '{} {}'.format(name, SENSORS[sensor_type][0]) <NEW_LINE> self._signal_name = name <NEW_LINE> self._api = device.api <NEW_LINE> self._sensor_type = sensor_type <NEW_LINE> self._state = None <NEW_LINE> self._attrs = {} <NEW_LINE> self._unit_of_measurement = SENSORS[sensor_type][1] <NEW_LINE> self._icon = SENSORS[sensor_type][2] <NEW_LINE> self._unsub_dispatcher = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return self._attrs <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return self._icon <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._unit_of_measurement <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self._api.available <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if not self.available: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _LOGGER.debug("Updating %s sensor", self._name) <NEW_LINE> try: <NEW_LINE> <INDENT> if self._sensor_type == SENSOR_MOTION_DETECTOR: <NEW_LINE> <INDENT> self._state = self._api.is_motion_detected <NEW_LINE> self._attrs['Record Mode'] = self._api.record_mode <NEW_LINE> <DEDENT> elif self._sensor_type == SENSOR_PTZ_PRESET: <NEW_LINE> <INDENT> self._state = self._api.ptz_presets_count <NEW_LINE> <DEDENT> elif self._sensor_type == SENSOR_SDCARD: <NEW_LINE> <INDENT> storage = self._api.storage_all <NEW_LINE> try: <NEW_LINE> <INDENT> self._attrs['Total'] = '{:.2f} {}'.format( *storage['total']) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._attrs['Total'] = '{} {}'.format(*storage['total']) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._attrs['Used'] = '{:.2f} {}'.format(*storage['used']) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._attrs['Used'] = '{} {}'.format(*storage['used']) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._state = '{:.2f}'.format(storage['used_percent']) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._state = storage['used_percent'] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except AmcrestError as error: <NEW_LINE> <INDENT> log_update_error(_LOGGER, 'update', self.name, 'sensor', error) <NEW_LINE> <DEDENT> <DEDENT> async def async_on_demand_update(self): <NEW_LINE> <INDENT> self.async_schedule_update_ha_state(True) <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> self._unsub_dispatcher = async_dispatcher_connect( self.hass, service_signal(SERVICE_UPDATE, self._signal_name), self.async_on_demand_update) <NEW_LINE> <DEDENT> async def async_will_remove_from_hass(self): <NEW_LINE> <INDENT> self._unsub_dispatcher() | A sensor implementation for Amcrest IP camera. | 6259905bb7558d5895464a2a |
class MicrophoneDistanceValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> MICROPHONE_DISTANCE_UNSPECIFIED = 0 <NEW_LINE> NEARFIELD = 1 <NEW_LINE> MIDFIELD = 2 <NEW_LINE> FARFIELD = 3 | The audio type that most closely describes the audio being recognized.
Values:
MICROPHONE_DISTANCE_UNSPECIFIED: Audio type is not known.
NEARFIELD: The audio was captured from a closely placed microphone. Eg.
phone, dictaphone, or handheld microphone. Generally if there speaker
is within 1 meter of the microphone.
MIDFIELD: The speaker if within 3 meters of the microphone.
FARFIELD: The speaker is more than 3 meters away from the microphone. | 6259905b3cc13d1c6d466d3d |
class Heap(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._heap = [0] <NEW_LINE> self.size = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def heap(self): <NEW_LINE> <INDENT> return self._heap <NEW_LINE> <DEDENT> def percUp(self, i): <NEW_LINE> <INDENT> while (i // 2) > 0: <NEW_LINE> <INDENT> if self._heap[i] < self._heap[i // 2]: <NEW_LINE> <INDENT> self._heap[i], self._heap[i // 2] = self._heap[i // 2], self._heap[i] <NEW_LINE> <DEDENT> i = i // 2 <NEW_LINE> <DEDENT> <DEDENT> def insert(self, k): <NEW_LINE> <INDENT> self._heap.append(k) <NEW_LINE> self.size += 1 <NEW_LINE> self.percUp(self.size) <NEW_LINE> <DEDENT> def percDown(self, i): <NEW_LINE> <INDENT> while (i * 2) <= self.size: <NEW_LINE> <INDENT> mc = self.minChild(i) <NEW_LINE> if self._heap[i] > self._heap[mc]: <NEW_LINE> <INDENT> self._heap[i], self._heap[mc] = self._heap[mc], self._heap[i] <NEW_LINE> <DEDENT> i = mc <NEW_LINE> <DEDENT> <DEDENT> def minChild(self, i): <NEW_LINE> <INDENT> if (i * 2) + 1 > self.size: <NEW_LINE> <INDENT> return i * 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self._heap[(i*2)+1] < self._heap[i*2]: <NEW_LINE> <INDENT> return (i * 2) + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return i * 2 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def delMin(self): <NEW_LINE> <INDENT> valueToDelete = self._heap[1] <NEW_LINE> self._heap[1] = self._heap[self.size] <NEW_LINE> self.size = self.size - 1 <NEW_LINE> self.list.pop() <NEW_LINE> self.percDown(1) <NEW_LINE> return valueToDelete <NEW_LINE> <DEDENT> def build_heap(self, array): <NEW_LINE> <INDENT> i = len(array) // 2 <NEW_LINE> self.size = len(array) <NEW_LINE> self._heap = [0] + array[:] <NEW_LINE> while i > 0: <NEW_LINE> <INDENT> self.percDown(i) <NEW_LINE> i = i - 1 | An implementation of the min heap in Python3 | 6259905b16aa5153ce401ae0 |
class CategoryViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = Category.objects.all() <NEW_LINE> serializer_class = CategorySerializer | API endpoint that allows categories to be viewed. | 6259905b07f4c71912bb0a38 |
class TestIsBuildslaveDir(misc.FileIOMixin, misc.LoggingMixin, unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.mocked_stdout = io.BytesIO() <NEW_LINE> self.patch(sys, "stdout", self.mocked_stdout) <NEW_LINE> self.setUpLogging() <NEW_LINE> self.tac_file_path = os.path.join("testdir", "buildbot.tac") <NEW_LINE> <DEDENT> def assertReadErrorMessage(self, strerror): <NEW_LINE> <INDENT> self.assertLogged( re.escape("error reading '%s': %s" % ( self.tac_file_path, strerror)), "invalid buildslave directory 'testdir'") <NEW_LINE> <DEDENT> def test_open_error(self): <NEW_LINE> <INDENT> self.setUpOpenError(1, "open-error", "dummy") <NEW_LINE> self.assertFalse(base.isBuildslaveDir("testdir")) <NEW_LINE> self.assertReadErrorMessage("open-error") <NEW_LINE> self.open.assert_called_once_with(self.tac_file_path) <NEW_LINE> <DEDENT> def test_read_error(self): <NEW_LINE> <INDENT> self.setUpReadError(1, "read-error", "dummy") <NEW_LINE> self.assertFalse(base.isBuildslaveDir("testdir")) <NEW_LINE> self.assertReadErrorMessage("read-error") <NEW_LINE> self.open.assert_called_once_with(self.tac_file_path) <NEW_LINE> <DEDENT> def test_unexpected_tac_contents(self): <NEW_LINE> <INDENT> self.setUpOpen("dummy-contents") <NEW_LINE> self.assertFalse(base.isBuildslaveDir("testdir")) <NEW_LINE> self.assertLogged( re.escape("unexpected content in '%s'" % self.tac_file_path), "invalid buildslave directory 'testdir'", "unexpected error message on stdout") <NEW_LINE> self.open.assert_called_once_with(self.tac_file_path) <NEW_LINE> <DEDENT> def test_slavedir_good(self): <NEW_LINE> <INDENT> self.setUpOpen("Application('buildslave')") <NEW_LINE> self.assertTrue(base.isBuildslaveDir("testdir")) <NEW_LINE> self.open.assert_called_once_with(self.tac_file_path) | Test buildslave.scripts.base.isBuildslaveDir() | 6259905b99cbb53fe68324dd |
class Config(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return util.getJSONFile(CONFIG_PATH) <NEW_LINE> <DEDENT> def save(self, config_object): <NEW_LINE> <INDENT> return util.saveJSONFile(CONFIG_PATH, config_object) | Class to handle configuration file | 6259905b63b5f9789fe8676f |
class TestFunctions(unittest.TestCase): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _RunTests(mock_runner, tests): <NEW_LINE> <INDENT> results = [] <NEW_LINE> tests = shard._TestCollection([shard._Test(t) for t in tests]) <NEW_LINE> shard._RunTestsFromQueue(mock_runner, tests, results, watchdog_timer.WatchdogTimer(None)) <NEW_LINE> run_results = base_test_result.TestRunResults() <NEW_LINE> for r in results: <NEW_LINE> <INDENT> run_results.AddTestRunResults(r) <NEW_LINE> <DEDENT> return run_results <NEW_LINE> <DEDENT> def testRunTestsFromQueue(self): <NEW_LINE> <INDENT> results = TestFunctions._RunTests(MockRunner(), ['a', 'b']) <NEW_LINE> self.assertEqual(len(results.GetPass()), 2) <NEW_LINE> self.assertEqual(len(results.GetNotPass()), 0) <NEW_LINE> <DEDENT> def testRunTestsFromQueueRetry(self): <NEW_LINE> <INDENT> results = TestFunctions._RunTests(MockRunnerFail(), ['a', 'b']) <NEW_LINE> self.assertEqual(len(results.GetPass()), 0) <NEW_LINE> self.assertEqual(len(results.GetFail()), 2) <NEW_LINE> <DEDENT> def testRunTestsFromQueueFailTwice(self): <NEW_LINE> <INDENT> results = TestFunctions._RunTests(MockRunnerFailTwice(), ['a', 'b']) <NEW_LINE> self.assertEqual(len(results.GetPass()), 2) <NEW_LINE> self.assertEqual(len(results.GetNotPass()), 0) <NEW_LINE> <DEDENT> def testSetUp(self): <NEW_LINE> <INDENT> runners = [] <NEW_LINE> counter = shard._ThreadSafeCounter() <NEW_LINE> shard._SetUp(MockRunner, '0', runners, counter) <NEW_LINE> self.assertEqual(len(runners), 1) <NEW_LINE> self.assertEqual(runners[0].setups, 1) <NEW_LINE> <DEDENT> def testThreadSafeCounter(self): <NEW_LINE> <INDENT> counter = shard._ThreadSafeCounter() <NEW_LINE> for i in xrange(5): <NEW_LINE> <INDENT> self.assertEqual(counter.GetAndIncrement(), i) | Tests for shard._RunTestsFromQueue. | 6259905b379a373c97d9a622 |
class Id(Field): <NEW_LINE> <INDENT> store = True <NEW_LINE> readonly = True <NEW_LINE> def __init__(self, string=None, **kwargs): <NEW_LINE> <INDENT> super(Id, self).__init__(type='integer', string=string, **kwargs) <NEW_LINE> <DEDENT> def to_column(self): <NEW_LINE> <INDENT> return fields.integer('ID') <NEW_LINE> <DEDENT> def __get__(self, record, owner): <NEW_LINE> <INDENT> if record is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> if not record: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return record.ensure_one()._ids[0] <NEW_LINE> <DEDENT> def __set__(self, record, value): <NEW_LINE> <INDENT> raise TypeError("field 'id' cannot be assigned") | Special case for field 'id'. | 6259905b7047854f463409be |
class SubItemPuppetClassIds(SubItem): <NEW_LINE> <INDENT> objName = 'puppetclass_ids' <NEW_LINE> payloadObj = 'puppetclass_id' <NEW_LINE> index = 'id' <NEW_LINE> def getPayloadStruct(self, attributes, objType): <NEW_LINE> <INDENT> payload = {"puppetclass_id": attributes, objType + "_class": {"puppetclass_id": attributes}} <NEW_LINE> return payload | ItemOverrideValues class
Represent the content of a foreman smart class parameter as a dict | 6259905b29b78933be26abc3 |
class GalicianAnalyzerLucene( SparklingJavaTransformer, HasInputCol, HasOutputCol, HasStopwords, HasStopwordCase): <NEW_LINE> <INDENT> package_name = "com.sparklingpandas.sparklingml.feature" <NEW_LINE> class_name = "GalicianAnalyzerLucene" <NEW_LINE> transformer_name = package_name + "." + class_name <NEW_LINE> @keyword_only <NEW_LINE> def __init__(self, inputCol=None, outputCol=None, stopwords=None, stopwordCase=False): <NEW_LINE> <INDENT> super(GalicianAnalyzerLucene, self).__init__() <NEW_LINE> self._setDefault(stopwordCase=False) <NEW_LINE> kwargs = self._input_kwargs <NEW_LINE> self.setParams(**kwargs) <NEW_LINE> <DEDENT> @keyword_only <NEW_LINE> def setParams(self, inputCol=None, outputCol=None, stopwords=None, stopwordCase=False): <NEW_LINE> <INDENT> kwargs = self._input_kwargs <NEW_LINE> return self._set(**kwargs) | >>> from pyspark.sql import SparkSession
>>> spark = SparkSession.builder.master("local[2]").getOrCreate()
>>> df = spark.createDataFrame([("hi boo",), ("bye boo",)], ["vals"])
>>> transformer = GalicianAnalyzerLucene()
>>> transformer.setParams(inputCol="vals", outputCol="out")
GalicianAnalyzerLucene_...
>>> result = transformer.transform(df)
>>> result.count()
2
>>> transformer.setStopwordCase(True)
GalicianAnalyzerLucene_...
>>> result = transformer.transform(df)
>>> result.count()
2 | 6259905b507cdc57c63a63a4 |
@dataclass <NEW_LINE> class ActiveLogEvents(betterproto.Message): <NEW_LINE> <INDENT> id: List[int] = betterproto.uint32_field(1) | Note: NextLogEvents has a custom equality operator in the firmware which
must be updated if you add/remove/modify the fields of the protobuf
definition! | 6259905bf548e778e596cb91 |
class MeanCovTracker(object): <NEW_LINE> <INDENT> def __init__(self, x): <NEW_LINE> <INDENT> self.N = x.shape[0] <NEW_LINE> self.sx = x.sum(axis=0) <NEW_LINE> self.sxxT = np.dot(x.T, x) <NEW_LINE> self.update_mean_cov() <NEW_LINE> <DEDENT> def update_mean_cov(self): <NEW_LINE> <INDENT> self.mean = self.sx / self.N <NEW_LINE> self.cov = (self.sxxT / self.N) - xxT(self.mean) <NEW_LINE> self.L = linalg.cholesky(self.cov, lower=True) <NEW_LINE> <DEDENT> def remove_point(self, x): <NEW_LINE> <INDENT> self.N -= 1 <NEW_LINE> self.sx -= x <NEW_LINE> self.sxxT -= xxT(x) <NEW_LINE> self.update_mean_cov() <NEW_LINE> <DEDENT> def add_point(self, x): <NEW_LINE> <INDENT> self.N += 1 <NEW_LINE> self.sx += x <NEW_LINE> self.sxxT += xxT(x) <NEW_LINE> self.update_mean_cov() | Tracks mean and cov of a set of points.
Note: points must be given as a (N,d) np.array | 6259905b4a966d76dd5f04f0 |
class Memory(object): <NEW_LINE> <INDENT> def __init__(self, models): <NEW_LINE> <INDENT> self.models = models <NEW_LINE> self.orig_positions = [m.pos for m in models] <NEW_LINE> <DEDENT> def restore_models(self): <NEW_LINE> <INDENT> for pos, model in zip(self.orig_positions, self.models): <NEW_LINE> <INDENT> model.pos = pos <NEW_LINE> <DEDENT> <DEDENT> def get_falling(self): <NEW_LINE> <INDENT> return self._get_moved(lambda dx, dy: dy > 0) <NEW_LINE> <DEDENT> def get_pushed(self): <NEW_LINE> <INDENT> return self._get_moved(lambda dx, dy: dx != 0) <NEW_LINE> <DEDENT> def _get_moved(self, condition): <NEW_LINE> <INDENT> moved = [] <NEW_LINE> for pos, model in zip(self.orig_positions, self.models): <NEW_LINE> <INDENT> dx, dy = v2.diff(model.pos, pos) <NEW_LINE> if condition(dx, dy): <NEW_LINE> <INDENT> moved.append(model) <NEW_LINE> <DEDENT> <DEDENT> return moved | It remebers falls and pushes
to know what the dangerous moves. | 6259905bac7a0e7691f73ae0 |
class VoteHandler(BaseHandler): <NEW_LINE> <INDENT> @web.authenticated <NEW_LINE> @gen.coroutine <NEW_LINE> def get(self): <NEW_LINE> <INDENT> _id = self.get_argument('_id', None) <NEW_LINE> content = self.get_argument('content', None) <NEW_LINE> vote = self.get_argument('vote', None) <NEW_LINE> _id = self.mongo_check_id(_id) <NEW_LINE> if content and vote: <NEW_LINE> <INDENT> if vote == 'up': <NEW_LINE> <INDENT> voteresult = True <NEW_LINE> <DEDENT> elif vote == 'down': <NEW_LINE> <INDENT> voteresult = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write_result(None) <NEW_LINE> <DEDENT> domain, uid = self.current_user[:2] <NEW_LINE> user = dict(authdomain=domain, uid=uid) <NEW_LINE> r = dbUsers.users.update_one(user, {'$set':{'votes.'+str(_id): voteresult}}) <NEW_LINE> print(r.matched_count, r.modified_count) <NEW_LINE> self.write_result(r.modified_count, finish=False) <NEW_LINE> if r.modified_count: <NEW_LINE> <INDENT> inc = 1 if vote=='up' else -1 <NEW_LINE> if content == 'answer': <NEW_LINE> <INDENT> dbPosts.answers.update_one({'_id':_id}, {'$inc': {'voteCount': inc}}) <NEW_LINE> <DEDENT> if content == 'question': <NEW_LINE> <INDENT> dbPosts.posts.update_one({'_id':_id}, {'$inc': {'voteCount': inc}}) | handler for /ajax/vote. Store vote under user collection. | 6259905bd99f1b3c44d06c9f |
@adapter(IClient) <NEW_LINE> @implementer(ILocalRoleProvider) <NEW_LINE> class ClientLocalRolesProvider(object): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> self.context = client <NEW_LINE> <DEDENT> def getRoles(self, principal_id): <NEW_LINE> <INDENT> if principal_id == self.context.getId(): <NEW_LINE> <INDENT> return ("CountryManager",) <NEW_LINE> <DEDENT> return () <NEW_LINE> <DEDENT> def getAllRoles(self): <NEW_LINE> <INDENT> return [(self.context.getId(), ("CountryManager",))] | `borg.localrole` provider for :obj:`IClient` instances.
This local role provider gives the client user itself the
`CountryManager` local role. This allows publication of surveys
inside the client since the publication machinery always
runs under the client user. | 6259905b91f36d47f223198e |
class LinearMove(Movable): <NEW_LINE> <INDENT> def __init__(self, **options): <NEW_LINE> <INDENT> Movable.__init__(self, **options) <NEW_LINE> self.__speed = options.get("speed", 5) <NEW_LINE> angle = options.get("angle", 0) <NEW_LINE> self.__angle = 2 * math.pi - math.radians(angle) <NEW_LINE> self.__dx = 0 <NEW_LINE> self.__dy = 0 <NEW_LINE> <DEDENT> def delta_move(self): <NEW_LINE> <INDENT> angle = self.__angle <NEW_LINE> delta_x = self.speed * math.cos(angle) <NEW_LINE> delta_y = self.speed * math.sin(angle) <NEW_LINE> return (delta_x, delta_y) <NEW_LINE> <DEDENT> def flip_horizontal_movement(self, **_): <NEW_LINE> <INDENT> self.__angle = (2 * math.pi + (math.pi - self.__angle)) % (2 * math.pi) <NEW_LINE> <DEDENT> def flip_vertical_movement(self, **_): <NEW_LINE> <INDENT> self.__angle = (2 * math.pi + (-1 * self.__angle)) % (2 * math.pi) <NEW_LINE> <DEDENT> @property <NEW_LINE> def speed(self): <NEW_LINE> <INDENT> return self.__speed <NEW_LINE> <DEDENT> @speed.setter <NEW_LINE> def speed(self, value): <NEW_LINE> <INDENT> self.__speed = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def angle(self): <NEW_LINE> <INDENT> return math.degrees(self.__angle) <NEW_LINE> <DEDENT> @angle.setter <NEW_LINE> def angle(self, value): <NEW_LINE> <INDENT> self.__angle = 2 * math.pi - math.radians(value) | Move an object linearly. | 6259905bf7d966606f7493b7 |
class FruitEditView(LoginRequiredMixin, UpdateView): <NEW_LINE> <INDENT> model = models.Fruit <NEW_LINE> template_name = 'fruits/fruit_edit.html' <NEW_LINE> fields = ('name', 'price') <NEW_LINE> login_url = 'login' | 果物マスタ編集ビュー | 6259905bbe8e80087fbc0682 |
class GlusterNFSVolHelper(GlusterNFSHelper): <NEW_LINE> <INDENT> def __init__(self, execute, config_object, **kwargs): <NEW_LINE> <INDENT> self.gluster_manager = kwargs.pop('gluster_manager') <NEW_LINE> super(GlusterNFSHelper, self).__init__(execute, config_object, **kwargs) <NEW_LINE> <DEDENT> def _get_vol_exports(self): <NEW_LINE> <INDENT> export_vol = self.gluster_manager.get_gluster_vol_option( NFS_RPC_AUTH_ALLOW) <NEW_LINE> return export_vol.split(',') if export_vol else [] <NEW_LINE> <DEDENT> def _manage_access(self, access_type, access_to, cbk): <NEW_LINE> <INDENT> if access_type != 'ip': <NEW_LINE> <INDENT> raise exception.InvalidShareAccess('only ip access type allowed') <NEW_LINE> <DEDENT> export_vol_list = self._get_vol_exports() <NEW_LINE> if cbk(export_vol_list, access_to): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if export_vol_list: <NEW_LINE> <INDENT> argseq = (('volume', 'set', self.gluster_manager.volume, NFS_RPC_AUTH_ALLOW, ','.join(export_vol_list)), ('volume', 'reset', self.gluster_manager.volume, NFS_RPC_AUTH_REJECT)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> argseq = (('volume', 'reset', self.gluster_manager.volume, NFS_RPC_AUTH_ALLOW), ('volume', 'set', self.gluster_manager.volume, NFS_RPC_AUTH_REJECT, '*')) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for args in argseq: <NEW_LINE> <INDENT> self.gluster_manager.gluster_call(*args) <NEW_LINE> <DEDENT> <DEDENT> except exception.ProcessExecutionError as exc: <NEW_LINE> <INDENT> LOG.error(_LE("Error in gluster volume set: %s"), exc.stderr) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def allow_access(self, base, share, access): <NEW_LINE> <INDENT> def cbk(explist, host): <NEW_LINE> <INDENT> if host in explist: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> explist.append(host) <NEW_LINE> <DEDENT> self._manage_access(access['access_type'], access['access_to'], cbk) <NEW_LINE> <DEDENT> def deny_access(self, base, share, access): <NEW_LINE> <INDENT> def cbk(explist, host): <NEW_LINE> <INDENT> if host not in explist: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> explist.remove(host) <NEW_LINE> <DEDENT> self._manage_access(access['access_type'], access['access_to'], cbk) | Manage shares with Gluster-NFS server, volume mapped variant. | 6259905b009cb60464d02b34 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.