code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class LatticeError(Exception): <NEW_LINE> <INDENT> pass | Exception for impossible lattice parameters.
| 625990643617ad0b5ee0787c |
class GlobalRecoveryEvent(Event): <NEW_LINE> <INDENT> def __init__(self, message, refs, add_to_ceph_s, start_epoch, active_clean_num): <NEW_LINE> <INDENT> super().__init__(str(uuid.uuid4()), message, refs, add_to_ceph_s) <NEW_LINE> self._add_to_ceph_s = add_to_ceph_s <NEW_LINE> self._progress = 0.0 <NEW_LINE> self._start_epoch = start_epoch <NEW_LINE> self._active_clean_num = active_clean_num <NEW_LINE> self._refresh() <NEW_LINE> <DEDENT> def global_event_update_progress(self, pg_dump): <NEW_LINE> <INDENT> pgs = pg_dump['pg_stats'] <NEW_LINE> new_active_clean_num = 0 <NEW_LINE> for pg in pgs: <NEW_LINE> <INDENT> if int(pg['reported_epoch']) < int(self._start_epoch): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> state = pg['state'] <NEW_LINE> states = state.split("+") <NEW_LINE> if "active" in states and "clean" in states: <NEW_LINE> <INDENT> new_active_clean_num += 1 <NEW_LINE> <DEDENT> <DEDENT> total_pg_num = len(pgs) <NEW_LINE> if self._active_clean_num != new_active_clean_num: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._progress = float(new_active_clean_num) / total_pg_num <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> self._progress = 0.0 <NEW_LINE> <DEDENT> <DEDENT> self._refresh() <NEW_LINE> <DEDENT> @property <NEW_LINE> def progress(self): <NEW_LINE> <INDENT> return self._progress | An event whoese completion is determined by active+clean/total_pg_num | 625990647cff6e4e811b7173 |
class TestNsxLibIPSet(nsxlib_testcase.NsxClientTestCase): <NEW_LINE> <INDENT> def test_get_ipset_reference(self): <NEW_LINE> <INDENT> mock_ip_set = uuidutils.generate_uuid() <NEW_LINE> result = self.nsxlib.ip_set.get_ipset_reference( mock_ip_set) <NEW_LINE> expected = { 'target_id': mock_ip_set, 'target_type': const.IP_SET } <NEW_LINE> self.assertEqual(expected, result) <NEW_LINE> <DEDENT> def test_create_ip_set(self): <NEW_LINE> <INDENT> fake_ip_set = test_constants.FAKE_IP_SET.copy() <NEW_LINE> data = { 'display_name': fake_ip_set['display_name'], 'ip_addresses': fake_ip_set['ip_addresses'], 'description': 'ipset-desc', 'tags': [] } <NEW_LINE> with mock.patch.object(self.nsxlib.client, 'create') as create: <NEW_LINE> <INDENT> self.nsxlib.ip_set.create( fake_ip_set['display_name'], 'ipset-desc', ip_addresses=fake_ip_set['ip_addresses']) <NEW_LINE> resource = 'ip-sets' <NEW_LINE> create.assert_called_with(resource, data) <NEW_LINE> <DEDENT> <DEDENT> def test_delete_ip_set(self): <NEW_LINE> <INDENT> with mock.patch.object(self.nsxlib.client, 'delete') as delete: <NEW_LINE> <INDENT> fake_ip_set = test_constants.FAKE_IP_SET.copy() <NEW_LINE> self.nsxlib.ip_set.delete(fake_ip_set['id']) <NEW_LINE> delete.assert_called_with('ip-sets/%s' % fake_ip_set['id']) <NEW_LINE> <DEDENT> <DEDENT> def test_update_ip_set(self): <NEW_LINE> <INDENT> fake_ip_set = test_constants.FAKE_IP_SET.copy() <NEW_LINE> new_ip_addresses = ['10.0.0.0'] <NEW_LINE> data = { 'id': fake_ip_set['id'], 'display_name': fake_ip_set['display_name'], 'ip_addresses': new_ip_addresses, 'resource_type': 'IPSet' } <NEW_LINE> with mock.patch.object(self.nsxlib.client, 'get', return_value=fake_ip_set): <NEW_LINE> <INDENT> with mock.patch.object(self.nsxlib.client, 'update') as update: <NEW_LINE> <INDENT> self.nsxlib.ip_set.update( fake_ip_set['id'], ip_addresses=new_ip_addresses) <NEW_LINE> resource = 'ip-sets/%s' % fake_ip_set['id'] <NEW_LINE> update.assert_called_with(resource, data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_update_ip_set_empty_ip_addresses(self): <NEW_LINE> <INDENT> fake_ip_set = test_constants.FAKE_IP_SET.copy() <NEW_LINE> new_ip_addresses = [] <NEW_LINE> data = { 'id': fake_ip_set['id'], 'display_name': fake_ip_set['display_name'], 'ip_addresses': new_ip_addresses, 'resource_type': 'IPSet' } <NEW_LINE> with mock.patch.object(self.nsxlib.client, 'get', return_value=fake_ip_set): <NEW_LINE> <INDENT> with mock.patch.object(self.nsxlib.client, 'update') as update: <NEW_LINE> <INDENT> self.nsxlib.ip_set.update( fake_ip_set['id'], ip_addresses=new_ip_addresses) <NEW_LINE> resource = 'ip-sets/%s' % fake_ip_set['id'] <NEW_LINE> update.assert_called_with(resource, data) | Tests for vmware_nsxlib.v3.security.NsxLibIPSet | 625990648a43f66fc4bf38bc |
class Dīc(MuxCommand): <NEW_LINE> <INDENT> key = "dīc" <NEW_LINE> aliases = ['dic'] <NEW_LINE> locks = "cmd:all()" <NEW_LINE> help_category = "Iussa Latīna" <NEW_LINE> auto_help = True <NEW_LINE> def func(self): <NEW_LINE> <INDENT> caller = self.caller <NEW_LINE> if not self.args: <NEW_LINE> <INDENT> caller.msg("Quid dīcere velis?") <NEW_LINE> return <NEW_LINE> <DEDENT> speech = self.args <NEW_LINE> speech = caller.at_before_say(speech) <NEW_LINE> if not speech: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> caller.at_say(speech, msg_self=True) | Speak as your character
Usage:
dīc <message>
Talk to those in your current location | 625990648e71fb1e983bd1f2 |
class SummaryAutosave(GetEventMixinWithAccessCheck, View): <NEW_LINE> <INDENT> def post(self, request, **kwargs): <NEW_LINE> <INDENT> if 'id' in request.POST: <NEW_LINE> <INDENT> return self.do_update(request) <NEW_LINE> <DEDENT> return self.do_create(request) <NEW_LINE> <DEDENT> def do_create(self, request): <NEW_LINE> <INDENT> result_type = request.POST.get('result_type') <NEW_LINE> result_id = request.POST.get('result_id') <NEW_LINE> content = request.POST.get('content', '') <NEW_LINE> try: <NEW_LINE> <INDENT> if result_type == 'labseventresult': <NEW_LINE> <INDENT> result = LabsEventResult.objects.select_related('block').get(id=result_id) <NEW_LINE> assert result.block.event_id == self.event.id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = None <NEW_LINE> <DEDENT> <DEDENT> except (AssertionError, ValueError, TypeError, ObjectDoesNotExist): <NEW_LINE> <INDENT> return JsonResponse({}, status=400) <NEW_LINE> <DEDENT> summary = Summary.objects.create(author=request.user, result=result, content=content, event=self.event) <NEW_LINE> return JsonResponse({'summary_id': summary.id}) <NEW_LINE> <DEDENT> def do_update(self, request): <NEW_LINE> <INDENT> summary_id = request.POST.get('id') <NEW_LINE> try: <NEW_LINE> <INDENT> summary = Summary.objects.get(id=summary_id) <NEW_LINE> assert summary.is_draft and summary.author_id == request.user.id and summary.event_id == self.event.id <NEW_LINE> <DEDENT> except (Summary.DoesNotExist, ValueError, TypeError, AssertionError): <NEW_LINE> <INDENT> return JsonResponse({}, status=400) <NEW_LINE> <DEDENT> summary.content = request.POST.get('content', '') <NEW_LINE> summary.save(update_fields=['content']) <NEW_LINE> return JsonResponse({'summary_id': summary.id}) | автосохранение черновика конспекта | 625990648e7ae83300eea7bb |
class LO(rdfSubject): <NEW_LINE> <INDENT> rdf_type = lom.LearningObject <NEW_LINE> title = rdfSingle(dcterms.title) <NEW_LINE> identifier = rdfSingle(dcterms.identifier) <NEW_LINE> description = rdfSingle(dcterms.description) <NEW_LINE> subject = rdfMultiple(dcterms.subject) | Representacao de Objeto de Aprendizagem | 625990640a50d4780f706956 |
class BankMixin(ProviderMixin): <NEW_LINE> <INDENT> @property <NEW_LINE> def processor_token_id(self): <NEW_LINE> <INDENT> return get_broker().processor_backend.token_id <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(BankMixin, self).get_context_data(**kwargs) <NEW_LINE> context.update(self.provider.get_deposit_context()) <NEW_LINE> return context | Adds bank information to the context. | 625990644e4d562566373b34 |
class SiteDaoRedis(SiteDaoBase, RedisDaoBase): <NEW_LINE> <INDENT> def insert(self, site: Site, **kwargs): <NEW_LINE> <INDENT> hash_key = self.key_schema.site_hash_key(site.id) <NEW_LINE> site_ids_key = self.key_schema.site_ids_key() <NEW_LINE> client = kwargs.get('pipeline', self.redis) <NEW_LINE> client.hset(hash_key, mapping=FlatSiteSchema().dump(site)) <NEW_LINE> client.sadd(site_ids_key, site.id) <NEW_LINE> <DEDENT> def insert_many(self, *sites: Site, **kwargs) -> None: <NEW_LINE> <INDENT> for site in sites: <NEW_LINE> <INDENT> self.insert(site, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def find_by_id(self, site_id: int, **kwargs) -> Site: <NEW_LINE> <INDENT> hash_key = self.key_schema.site_hash_key(site_id) <NEW_LINE> site_hash = self.redis.hgetall(hash_key) <NEW_LINE> if not site_hash: <NEW_LINE> <INDENT> raise SiteNotFound() <NEW_LINE> <DEDENT> return FlatSiteSchema().load(site_hash) <NEW_LINE> <DEDENT> def find_all(self, **kwargs) -> Set[Site]: <NEW_LINE> <INDENT> hash_key = self.key_schema.site_ids_key() <NEW_LINE> client = kwargs.get('pipeline', self.redis) <NEW_LINE> site_ids = client.smembers(hash_key) <NEW_LINE> site_hashes = [] <NEW_LINE> for site_id in site_ids: <NEW_LINE> <INDENT> key = self.key_schema.site_hash_key(site_id) <NEW_LINE> site_hash = client.hgetall(key) <NEW_LINE> site_hashes.append(site_hash) <NEW_LINE> <DEDENT> return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes} | SiteDaoRedis persists Site models to Redis.
This class allows persisting (and querying for) Sites in Redis. | 62599064cb5e8a47e493cd1b |
class TestPath(modulespecific.ModuleSpecificTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.basic_repo = self.test_module.BasicRepository() <NEW_LINE> self.repo = self.basic_repo.repo() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.basic_repo.teardown() | Test the Path interface. | 6259906467a9b606de547639 |
class PluginDiscoveryJSONFile(PanoptesDiscoveryPlugin): <NEW_LINE> <INDENT> def run(self, context): <NEW_LINE> <INDENT> assert context and isinstance(context, PanoptesPluginContext), 'context must be a PanoptesPluginContext' <NEW_LINE> conf = context.config <NEW_LINE> logger = context.logger <NEW_LINE> config_file = None <NEW_LINE> try: <NEW_LINE> <INDENT> config_file = conf['main']['config_file'] <NEW_LINE> with open(config_file) as f: <NEW_LINE> <INDENT> resource_specs = json.load(f) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise PanoptesDiscoveryPluginError( 'Error while attempting to parse JSON from file {}: {}'.format(config_file, repr(e)) ) <NEW_LINE> <DEDENT> resources = PanoptesResourceSet() <NEW_LINE> num_successes = 0 <NEW_LINE> num_failures = 0 <NEW_LINE> for resource_spec in resource_specs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resource = PanoptesResource.resource_from_dict(resource_spec) <NEW_LINE> resources.add(resource) <NEW_LINE> num_successes += 1 <NEW_LINE> logger.debug('Added resource {} from JSON file {}'.format(resource, config_file)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.debug('Error while attempting to create a PanoptesResource from file {}: {}'.format( config_file, repr(e))) <NEW_LINE> num_failures += 1 <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> if num_successes > 0: <NEW_LINE> <INDENT> logger.info('Tried to read {} resources from {}, {} failed'.format(num_successes + num_failures, config_file, num_failures)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error('Error while attempting to create PanoptesResources from {}.'.format(config_file)) <NEW_LINE> raise PanoptesDiscoveryPluginError( 'Error during lookup for PanoptesResource from file {}.'.format(config_file)) <NEW_LINE> <DEDENT> return resources | Standalone discovery plugin to populate PanoptesResources from a JSON file. | 625990646e29344779b01d7d |
class TempleDetailParser(html.parser.HTMLParser): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> html.parser.HTMLParser.__init__(self) <NEW_LINE> self.data = [] <NEW_LINE> self.pictureLink = ' ' <NEW_LINE> <DEDENT> def handle_data(self, data): <NEW_LINE> <INDENT> self.data.append(data) <NEW_LINE> <DEDENT> def handle_starttag(self, tag, attr): <NEW_LINE> <INDENT> if tag == 'img': <NEW_LINE> <INDENT> self.pictureLink = attr[0][1] <NEW_LINE> <DEDENT> <DEDENT> def getResult(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def getPictureLink(self): <NEW_LINE> <INDENT> return self.pictureLink <NEW_LINE> <DEDENT> def clearOldResult(self): <NEW_LINE> <INDENT> self.data = [] | Returns a list of all data collected when called. | 62599064460517430c432beb |
class Window(Gtk.Window): <NEW_LINE> <INDENT> __gtype_name__ = "Window" <NEW_LINE> def __new__(cls): <NEW_LINE> <INDENT> builder = get_builder('CatfishWindow') <NEW_LINE> new_object = builder.get_object("catfish_window") <NEW_LINE> new_object.finish_initializing(builder) <NEW_LINE> return new_object <NEW_LINE> <DEDENT> def finish_initializing(self, builder): <NEW_LINE> <INDENT> self.builder = builder <NEW_LINE> self.ui = builder.get_ui(self, True) <NEW_LINE> self.AboutDialog = None <NEW_LINE> self.sidebar = self.builder.get_object('sidebar') <NEW_LINE> button = Gtk.MenuButton() <NEW_LINE> button.set_size_request(32, 32) <NEW_LINE> image = Gtk.Image.new_from_icon_name("emblem-system-symbolic", Gtk.IconSize.MENU) <NEW_LINE> button.set_image(image) <NEW_LINE> popup = builder.get_object('appmenu') <NEW_LINE> popup.set_property("halign", Gtk.Align.CENTER) <NEW_LINE> button.set_popup(popup) <NEW_LINE> box = builder.get_object('appmenu_placeholder') <NEW_LINE> box.add(button) <NEW_LINE> button.show_all() <NEW_LINE> <DEDENT> def on_mnu_about_activate(self, widget, data=None): <NEW_LINE> <INDENT> if self.AboutDialog is not None: <NEW_LINE> <INDENT> about = self.AboutDialog() <NEW_LINE> about.run() <NEW_LINE> about.destroy() <NEW_LINE> <DEDENT> <DEDENT> def on_destroy(self, widget, data=None): <NEW_LINE> <INDENT> self.search_engine.stop() <NEW_LINE> self.settings.write() <NEW_LINE> Gtk.main_quit() <NEW_LINE> <DEDENT> def on_catfish_window_window_state_event(self, widget, event): <NEW_LINE> <INDENT> self.window_is_fullscreen = bool(event.new_window_state & Gdk.WindowState.FULLSCREEN) <NEW_LINE> <DEDENT> def on_catfish_window_key_press_event(self, widget, event): <NEW_LINE> <INDENT> key_name = Gdk.keyval_name(event.keyval) <NEW_LINE> if key_name == 'F9': <NEW_LINE> <INDENT> self.sidebar_toggle_menu.activate() <NEW_LINE> return True <NEW_LINE> <DEDENT> if key_name == 'F11': <NEW_LINE> <INDENT> if self.window_is_fullscreen: <NEW_LINE> <INDENT> self.unfullscreen() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fullscreen() <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False | This class is meant to be subclassed by CatfishWindow. It provides
common functions and some boilerplate. | 625990641f5feb6acb164319 |
class Entity(object): <NEW_LINE> <INDENT> def __init__(self, classes=None, applications=None, parameters=None, uri=None, name=None): <NEW_LINE> <INDENT> if classes is None: classes = Classes() <NEW_LINE> self._set_classes(classes) <NEW_LINE> if applications is None: applications = Applications() <NEW_LINE> self._set_applications(applications) <NEW_LINE> if parameters is None: parameters = Parameters() <NEW_LINE> self._set_parameters(parameters) <NEW_LINE> self._uri = uri or '' <NEW_LINE> self._name = name or '' <NEW_LINE> <DEDENT> name = property(lambda s: s._name) <NEW_LINE> uri = property(lambda s: s._uri) <NEW_LINE> classes = property(lambda s: s._classes) <NEW_LINE> applications = property(lambda s: s._applications) <NEW_LINE> parameters = property(lambda s: s._parameters) <NEW_LINE> def _set_classes(self, classes): <NEW_LINE> <INDENT> if not isinstance(classes, Classes): <NEW_LINE> <INDENT> raise TypeError('Entity.classes cannot be set to ' 'instance of type %s' % type(classes)) <NEW_LINE> <DEDENT> self._classes = classes <NEW_LINE> <DEDENT> def _set_applications(self, applications): <NEW_LINE> <INDENT> if not isinstance(applications, Applications): <NEW_LINE> <INDENT> raise TypeError('Entity.applications cannot be set to ' 'instance of type %s' % type(applications)) <NEW_LINE> <DEDENT> self._applications = applications <NEW_LINE> <DEDENT> def _set_parameters(self, parameters): <NEW_LINE> <INDENT> if not isinstance(parameters, Parameters): <NEW_LINE> <INDENT> raise TypeError('Entity.parameters cannot be set to ' 'instance of type %s' % type(parameters)) <NEW_LINE> <DEDENT> self._parameters = parameters <NEW_LINE> <DEDENT> def merge(self, other): <NEW_LINE> <INDENT> self._classes.merge_unique(other._classes) <NEW_LINE> self._applications.merge_unique(other._applications) <NEW_LINE> self._parameters.merge(other._parameters) <NEW_LINE> self._name = other.name <NEW_LINE> self._uri = other.uri <NEW_LINE> <DEDENT> def interpolate(self): <NEW_LINE> <INDENT> self._parameters.interpolate() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, type(self)) and self._applications == other._applications and self._classes == other._classes and self._parameters == other._parameters and self._name == other._name and self._uri == other._uri <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r, %r, %r, uri=%r, name=%r)" % (self.__class__.__name__, self.classes, self.applications, self.parameters, self.uri, self.name) <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> return {'classes': self._classes.as_list(), 'applications': self._applications.as_list(), 'parameters': self._parameters.as_dict() } | A collection of Classes, Parameters, and Applications, mainly as a wrapper
for merging. The name and uri of an Entity will be updated to the name and
uri of the Entity that is being merged. | 6259906444b2445a339b74f7 |
class ConnectToken(Resource): <NEW_LINE> <INDENT> keys = ['token', 'email', 'created', 'used', 'expires', 'callback_url', 'first_name', 'last_name', 'account'] <NEW_LINE> def __init__(self, parent, defn): <NEW_LINE> <INDENT> super(ConnectToken, self).__init__( parent, 'connect_tokens/{token}', defn ) <NEW_LINE> if 'account' in defn: <NEW_LINE> <INDENT> if defn['account']: <NEW_LINE> <INDENT> self.account = Account(self.parent, defn['account']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> self.__init__(self.parent, self._request_uri('')) <NEW_LINE> return True <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> status = self._request_uri('', method='DELETE') <NEW_LINE> return bool(status['success']) | Class to represent the connect_token resource.
Properties:
token: string - Id of the connect_token
email: string - email address specified on token creation
created: integer - Unix timestamp of the connect_token was created
used: integer - Unix time this token was been used. 0 means it no
account has been created with this token yet
expires: mixed - Unix time this token will expire and be purged. Once
the token is used, this property will be set to false
callback_url: string - URL of your app we'll redirect the browser to
when the account is created
first_name: string - First name specified on token creation
last_name: string - Last name specified on token creation
account: Account object | 62599064435de62698e9d537 |
class GtmMonitorSmtp(GtmMonitorSmtpSchema): <NEW_LINE> <INDENT> cli_command = "/mgmt/tm/gtm/monitor/smtp" <NEW_LINE> def rest(self): <NEW_LINE> <INDENT> response = self.device.get(self.cli_command) <NEW_LINE> response_json = response.json() <NEW_LINE> if not response_json: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return response_json | To F5 resource for /mgmt/tm/gtm/monitor/smtp
| 6259906429b78933be26ac5b |
class FibreChannelTestCase(DriverTestCase): <NEW_LINE> <INDENT> driver_name = "cinder.volume.driver.FibreChannelDriver" <NEW_LINE> def test_initialize_connection(self): <NEW_LINE> <INDENT> self.driver = driver.FibreChannelDriver() <NEW_LINE> self.driver.do_setup(None) <NEW_LINE> self.assertRaises(NotImplementedError, self.driver.initialize_connection, {}, {}) | Test Case for FibreChannelDriver | 625990642ae34c7f260ac816 |
class CourseAccessRedirect(Redirect): <NEW_LINE> <INDENT> pass | Redirect raised when user does not have access to a course. | 625990643617ad0b5ee0787e |
class Bitbar(Configuration): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Bitbar, self).__init__() | Bitbar is a class which represents an instance of Bitbar.
In a distinction from BitbarProject, methods implemented in
Bitbar can be called without requiring a project id. | 6259906445492302aabfdc0a |
class FakeSession(object): <NEW_LINE> <INDENT> def __init__(self, md5_error=False, connection_error=False): <NEW_LINE> <INDENT> self.s3 = {} <NEW_LINE> self.service = FakeService(self) <NEW_LINE> self.md5_error = md5_error <NEW_LINE> self.connection_error = connection_error <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return {'region': 'us-west-2'} <NEW_LINE> <DEDENT> def get_service(self, service='s3'): <NEW_LINE> <INDENT> return self.service <NEW_LINE> <DEDENT> def emit(self, *args, **kwargs): <NEW_LINE> <INDENT> pass | This class drives the operations for the unit tests for the plugin by
providing an emulation of botocore's session module. It is by no
means a complete emulation of the session module. The class also
keeps track of dictionary that tracks an emulated state of s3.
This feature allows the unit tests to preform commands on the
emulated s3 profile and actually affect the emulated s3 profile
without ever affecting an actual s3 profile or contacting s3.
:var self.s3: This holds the current state of the emulated s3
profile. The variable is ordered such that the top level keys are
bucket names. Each bucket name is a key to a dictionary of
s3 objects. The key's of the s3 objects are keys to another
dictionary of relevant info about the object like its data,
last modified time, size, and etag.
:type self.s3: Dictionary. A sample form of the dictionary with
a single object and bucket is
{'mybucket': {'mykey': {'Body': "This is a test.",
'Size': 15,
'LastModified': '2013-07-15T17:03:43.000Z',
'ETag': 'ad35657fshafq4tg46'}}}
:var self.service: This is a mock serice to emulate botocore's
session module
:param md5_error: If true, some operations will raise an exception
signaling the md5's do not match
:param connection_error: If true, some operations will raise an exception
signalling that there was a connection_error. | 625990647cff6e4e811b7175 |
class Compartment: <NEW_LINE> <INDENT> def __init__(self, index: float, volume: float, in_func, out_func) -> None: <NEW_LINE> <INDENT> self.input_funcs = [in_func] if in_func is not None else [] <NEW_LINE> self.output_funcs = [out_func] if out_func is not None else [] <NEW_LINE> self.index = index <NEW_LINE> self.volume = volume <NEW_LINE> <DEDENT> def differential_eq(self, t: float, q: list) -> float: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return sum([func(t, q) for func in self.input_funcs]) - sum( [func(t, q) for func in self.output_funcs] ) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise TypeError("All inputs and outputs must be functions which take 2 arguments: time t and mass distribution vector q.") | Class to represent a pharmacological compartment, to be used in generating
a graph of compartments in the PKModel class. Functionality is to sum
up the appropriate partial differential equation for this compartment.
Fields:
- index: Index of this Compartment within the PKModel
- volume: Volume of this Compartment
- input_funcs: List of all input functions leading into this compartment
- output_funcs: List of all output function leading from this compartment
Methods:
- __init__: Initialise all fields of the class
- differential_eq: Sum of inputs and outputs to arrive at RHS of dq_i/dt | 6259906416aa5153ce401c0b |
class ProjectSearchViaDatabase(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self._database = Database(config) <NEW_LINE> <DEDENT> def set_config(self, config): <NEW_LINE> <INDENT> self._database.set_config(config) <NEW_LINE> <DEDENT> def set_alternate_connection(self, conn): <NEW_LINE> <INDENT> self._database.set_alternate_connection(conn) <NEW_LINE> <DEDENT> def get_matching_projects(self, keyword): <NEW_LINE> <INDENT> conn = self._database.get_connection() <NEW_LINE> cursor = conn.cursor() <NEW_LINE> res = [] <NEW_LINE> try: <NEW_LINE> <INDENT> if keyword is None: <NEW_LINE> <INDENT> logger.debug('keyword is None getting all projects') <NEW_LINE> cursor.execute("SELECT Project_id,project_name FROM Project") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cursor.execute("SELECT Project_id,project_name FROM Project " "WHERE project_name ILIKE '%%" + keyword + "%%' OR " " project_desc ILIKE '%%" + keyword + "%%'") <NEW_LINE> <DEDENT> for tuple in cursor.fetchall(): <NEW_LINE> <INDENT> res.append(str(tuple[0]) + ' ' + str(tuple[1])) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> cursor.close() <NEW_LINE> conn.commit() <NEW_LINE> conn.close() <NEW_LINE> <DEDENT> return res | Searches for Projects via Database
| 62599064f7d966606f749451 |
class Environment(object): <NEW_LINE> <INDENT> def __init__(self, name="environment"): <NEW_LINE> <INDENT> self.__name = name <NEW_LINE> self.theta_t = 0. <NEW_LINE> self.phi_t = 0. <NEW_LINE> <DEDENT> def __call__(self, theta=None, phi=None, *args, **kwargs): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.__name | Basic environment class. More specific classes can inherit from this class to let agents sense light. | 625990641b99ca40022900cd |
class GetResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_NewAccessToken(self): <NEW_LINE> <INDENT> return self._output.get('NewAccessToken', None) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) <NEW_LINE> <DEDENT> def getComment(self): <NEW_LINE> <INDENT> return GoogleComment(self.getJSONFromString(self._output.get('Response', []))) | A ResultSet with methods tailored to the values returned by the Get Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution. | 6259906463d6d428bbee3e20 |
class PublicUserApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> <DEDENT> def test_create_valid_user_success(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'testpass', 'name': 'Test Name' } <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_201_CREATED) <NEW_LINE> user = get_user_model().objects.get(**res.data) <NEW_LINE> self.assertTrue(user.check_password(payload['password'])) <NEW_LINE> self.assertNotIn('password', res.data) <NEW_LINE> <DEDENT> def test_user_exists(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'testpass', 'name': 'Test Name' } <NEW_LINE> create_user(**payload) <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_password_too_short(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'pw', 'name': 'Test Name' } <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> user_exists = get_user_model().objects.filter( email=payload['email'] ).exists() <NEW_LINE> self.assertFalse(user_exists) <NEW_LINE> <DEDENT> def test_create_token_for_user(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'pw', 'name': 'Test Name' } <NEW_LINE> create_user(**payload) <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> <DEDENT> def test_create_token_invalid_creadentials(self): <NEW_LINE> <INDENT> create_user(email='[email protected]', password='testpass') <NEW_LINE> payload = { 'email': '[email protected]', 'password': 'wrongpassword' } <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_create_token_no_user(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'testpass', 'name': 'Test Name' } <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_create_token_missing_field(self): <NEW_LINE> <INDENT> res = self.client.post(TOKEN_URL, {'email': 'one', 'password': ''}) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_user_aunauthorized(self): <NEW_LINE> <INDENT> res = self.client.get(ME_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED) | Test the users API(public) | 625990644428ac0f6e659c61 |
class ProgressBarDrawer: <NEW_LINE> <INDENT> def __init__(self, progress_image): <NEW_LINE> <INDENT> self._progress_image = progress_image <NEW_LINE> <DEDENT> def draw(self, surface, rect, percentage): <NEW_LINE> <INDENT> crop_rect = self._progress_image.get_rect() <NEW_LINE> crop_rect.width = crop_rect.width * percentage <NEW_LINE> surface.blit(self._progress_image.subsurface(crop_rect), rect) | Progress bar drawer | 625990643d592f4c4edbc60c |
class HTTPProxy( Entity, EntityCreateMixin, EntityDeleteMixin, EntityReadMixin, EntitySearchMixin, EntityUpdateMixin, ): <NEW_LINE> <INDENT> def __init__(self, server_config=None, **kwargs): <NEW_LINE> <INDENT> self._fields = { 'name': entity_fields.StringField( required=True, str_type='alpha', length=(6, 12), unique=True ), 'url': entity_fields.URLField(required=True), 'username': entity_fields.StringField(), 'password': entity_fields.StringField(), 'organization': entity_fields.OneToManyField(Organization), 'location': entity_fields.OneToManyField(Location), } <NEW_LINE> self._meta = {'api_path': 'api/v2/http_proxies'} <NEW_LINE> super().__init__(server_config, **kwargs) <NEW_LINE> <DEDENT> def update_payload(self, fields=None): <NEW_LINE> <INDENT> return {'http_proxy': super().update_payload(fields)} <NEW_LINE> <DEDENT> def create_payload(self): <NEW_LINE> <INDENT> return {'http_proxy': super().create_payload()} <NEW_LINE> <DEDENT> def read(self, entity=None, attrs=None, ignore=None, params=None): <NEW_LINE> <INDENT> if ignore is None: <NEW_LINE> <INDENT> ignore = set() <NEW_LINE> <DEDENT> ignore.add('password') <NEW_LINE> ignore.add('organization') <NEW_LINE> ignore.add('location') <NEW_LINE> return super().read(entity, attrs, ignore, params) | A representation of a HTTP Proxy entity. | 625990640a50d4780f706957 |
class TableType(enum.Enum): <NEW_LINE> <INDENT> NEUTRON_CONTINUOUS = 'c' <NEW_LINE> NEUTRON_DISCRETE = 'd' <NEW_LINE> THERMAL_SCATTERING = 't' <NEW_LINE> DOSIMETRY = 'y' <NEW_LINE> PHOTOATOMIC = 'p' <NEW_LINE> PHOTONUCLEAR = 'u' <NEW_LINE> PROTON = 'h' <NEW_LINE> DEUTERON = 'o' <NEW_LINE> TRITON = 'r' <NEW_LINE> HELIUM3 = 's' <NEW_LINE> ALPHA = 'a' <NEW_LINE> @classmethod <NEW_LINE> def from_suffix(cls, suffix): <NEW_LINE> <INDENT> for member in cls: <NEW_LINE> <INDENT> if suffix.endswith(member.value): <NEW_LINE> <INDENT> return member <NEW_LINE> <DEDENT> <DEDENT> raise ValueError("Suffix '{}' has no corresponding ACE table type." .format(suffix)) | Type of ACE data table. | 625990644e4d562566373b36 |
class ActionCurves(object): <NEW_LINE> <INDENT> def __init__(self, curves=None): <NEW_LINE> <INDENT> super(ActionCurves, self).__init__() <NEW_LINE> if not curves: <NEW_LINE> <INDENT> curves = cmds.keyframe(q=True, selected=True, name=True) <NEW_LINE> <DEDENT> self.curves = curves <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.curves) | docstring for ActionCurves | 625990644f88993c371f10b6 |
class Enclosure(object): <NEW_LINE> <INDENT> def __init__(self, headers=()): <NEW_LINE> <INDENT> self.headers = Headers(headers) <NEW_LINE> self.content = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def sender(self): <NEW_LINE> <INDENT> return self.headers.sender <NEW_LINE> <DEDENT> @property <NEW_LINE> def receivers(self): <NEW_LINE> <INDENT> return self.headers.receivers <NEW_LINE> <DEDENT> def mime_object(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def mime(self): <NEW_LINE> <INDENT> mime = self.mime_object() <NEW_LINE> self.headers.prepare(mime) <NEW_LINE> return mime <NEW_LINE> <DEDENT> def string(self): <NEW_LINE> <INDENT> return self.mime().as_string() | Base class for Enclosure objects to inherit from.
An enclosure can be sent on it's own or wrapped
inside an Envelope object.
:param headers: Iterable of headers to include. | 62599064f548e778e596ccb9 |
class NoEnvironmentError(Exception): <NEW_LINE> <INDENT> pass | exception raised when no environment is matched | 62599064e76e3b2f99fda12f |
@Operations.register_operation("create_unique_constraint") <NEW_LINE> @BatchOperations.register_operation( "create_unique_constraint", "batch_create_unique_constraint" ) <NEW_LINE> @AddConstraintOp.register_add_constraint("unique_constraint") <NEW_LINE> class CreateUniqueConstraintOp(AddConstraintOp): <NEW_LINE> <INDENT> constraint_type = "unique" <NEW_LINE> def __init__( self, constraint_name: Optional[str], table_name: str, columns: Sequence[str], schema: Optional[str] = None, **kw ) -> None: <NEW_LINE> <INDENT> self.constraint_name = constraint_name <NEW_LINE> self.table_name = table_name <NEW_LINE> self.columns = columns <NEW_LINE> self.schema = schema <NEW_LINE> self.kw = kw <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_constraint( cls, constraint: "Constraint" ) -> "CreateUniqueConstraintOp": <NEW_LINE> <INDENT> constraint_table = sqla_compat._table_for_constraint(constraint) <NEW_LINE> uq_constraint = cast("UniqueConstraint", constraint) <NEW_LINE> kw: dict = {} <NEW_LINE> if uq_constraint.deferrable: <NEW_LINE> <INDENT> kw["deferrable"] = uq_constraint.deferrable <NEW_LINE> <DEDENT> if uq_constraint.initially: <NEW_LINE> <INDENT> kw["initially"] = uq_constraint.initially <NEW_LINE> <DEDENT> kw.update(uq_constraint.dialect_kwargs) <NEW_LINE> return cls( uq_constraint.name, constraint_table.name, [c.name for c in uq_constraint.columns], schema=constraint_table.schema, **kw, ) <NEW_LINE> <DEDENT> def to_constraint( self, migration_context: Optional["MigrationContext"] = None ) -> "UniqueConstraint": <NEW_LINE> <INDENT> schema_obj = schemaobj.SchemaObjects(migration_context) <NEW_LINE> return schema_obj.unique_constraint( self.constraint_name, self.table_name, self.columns, schema=self.schema, **self.kw, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_unique_constraint( cls, operations: "Operations", constraint_name: Optional[str], table_name: str, columns: Sequence[str], schema: Optional[str] = None, **kw ) -> Any: <NEW_LINE> <INDENT> op = cls(constraint_name, table_name, columns, schema=schema, **kw) <NEW_LINE> return operations.invoke(op) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def batch_create_unique_constraint( cls, operations: "BatchOperations", constraint_name: str, columns: Sequence[str], **kw ) -> Any: <NEW_LINE> <INDENT> kw["schema"] = operations.impl.schema <NEW_LINE> op = cls(constraint_name, operations.impl.table_name, columns, **kw) <NEW_LINE> return operations.invoke(op) | Represent a create unique constraint operation. | 6259906491af0d3eaad3b558 |
@provider(IFormFieldProvider) <NEW_LINE> class IMaybeMustRead(model.Schema): <NEW_LINE> <INDENT> must_read = schema.Bool( title=_( u'label_must_read_authenticated', default=u'All users must read this' ), required=False, default=False, ) | Choice whether this object MUST be read.
Only makes sense in combination with a read tracking behavior. | 62599064baa26c4b54d509d4 |
class InvalidFilePathException(OrigamiException): <NEW_LINE> <INDENT> STATUS_CODE = 502 | File not found for the path provided | 62599064be8e80087fbc07b8 |
class HypernetApiService(WsgiService): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def create(cls, app_name='hypernet'): <NEW_LINE> <INDENT> cfg.CONF.log_opt_values(LOG, std_logging.DEBUG) <NEW_LINE> service = cls(app_name) <NEW_LINE> return service | Class for hypernet-api service. | 6259906499cbb53fe6832614 |
class InternetExplorerRedirectMiddleware: <NEW_LINE> <INDENT> def _parse_major_ie_version_from_user_agent(self, user_agent): <NEW_LINE> <INDENT> search_result = _ie_version_regex.search(user_agent) <NEW_LINE> if search_result: <NEW_LINE> <INDENT> return int(search_result.groups()[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def process_request(self, request): <NEW_LINE> <INDENT> if not hasattr(settings, 'IE_VERSION_MINIMUM'): <NEW_LINE> <INDENT> logger.warning('InternetExplorerRedirectMiddleware is loaded ' 'but IE_VERSION_MINIMUM was not found in settings.') <NEW_LINE> return None <NEW_LINE> <DEDENT> if not hasattr(settings, 'IE_VERSION_UNSUPPORTED_REDIRECT_PATH'): <NEW_LINE> <INDENT> logger.warning('InternetExplorerRedirectMiddleware is loaded ' 'but IE_VERSION_UNSUPPORTED_REDIRECT_PATH was ' 'not found in settings.') <NEW_LINE> return None <NEW_LINE> <DEDENT> if 'HTTP_USER_AGENT' not in request.META: <NEW_LINE> <INDENT> logger.warning('The request did not include an HTTP_USER_AGENT') <NEW_LINE> return None <NEW_LINE> <DEDENT> if 'PATH_INFO' not in request.META: <NEW_LINE> <INDENT> logger.warning('The request did not include PATH_INFO') <NEW_LINE> return None <NEW_LINE> <DEDENT> request.ie_version = self._parse_major_ie_version_from_user_agent( request.META['HTTP_USER_AGENT']) <NEW_LINE> if request.ie_version is not None: <NEW_LINE> <INDENT> request.from_ie = True <NEW_LINE> if request.ie_version < settings.IE_VERSION_MINIMUM: <NEW_LINE> <INDENT> path = request.META['PATH_INFO'] <NEW_LINE> redirect_path = settings.IE_VERSION_UNSUPPORTED_REDIRECT_PATH <NEW_LINE> if path != redirect_path: <NEW_LINE> <INDENT> return HttpResponseRedirect(redirect_path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> request.from_ie = False | Sets `from_ie` and `ie_version` on the request. If the `ie_version` is
less than `settings.IE_VERSION_MINIMUM` the response redirects to
`settings.IE_VERSION_UNSUPPORTED_REDIRECT_PATH` | 62599064fff4ab517ebcef51 |
class NotificationList(ListResource): <NEW_LINE> <INDENT> def __init__(self, version, service_sid): <NEW_LINE> <INDENT> super(NotificationList, self).__init__(version) <NEW_LINE> self._solution = {'service_sid': service_sid, } <NEW_LINE> self._uri = '/Services/{service_sid}/Notifications'.format(**self._solution) <NEW_LINE> <DEDENT> def create(self, body=values.unset, priority=values.unset, ttl=values.unset, title=values.unset, sound=values.unset, action=values.unset, data=values.unset, apn=values.unset, gcm=values.unset, sms=values.unset, facebook_messenger=values.unset, fcm=values.unset, segment=values.unset, alexa=values.unset, to_binding=values.unset, identity=values.unset, tag=values.unset): <NEW_LINE> <INDENT> data = values.of({ 'Identity': serialize.map(identity, lambda e: e), 'Tag': serialize.map(tag, lambda e: e), 'Body': body, 'Priority': priority, 'Ttl': ttl, 'Title': title, 'Sound': sound, 'Action': action, 'Data': serialize.object(data), 'Apn': serialize.object(apn), 'Gcm': serialize.object(gcm), 'Sms': serialize.object(sms), 'FacebookMessenger': serialize.object(facebook_messenger), 'Fcm': serialize.object(fcm), 'Segment': serialize.map(segment, lambda e: e), 'Alexa': serialize.object(alexa), 'ToBinding': serialize.map(to_binding, lambda e: e), }) <NEW_LINE> payload = self._version.create( 'POST', self._uri, data=data, ) <NEW_LINE> return NotificationInstance(self._version, payload, service_sid=self._solution['service_sid'], ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Twilio.Notify.V1.NotificationList>' | PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. | 62599064f548e778e596ccba |
class Mint(AptGetInstall, rosdep.base_rosdep.RosdepBaseOS): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.mint_detector = roslib.os_detect.Mint() <NEW_LINE> self.version_map = {'9':'10.04', '8':'9.10', '7':'9.04', '6':'8.10', '5':'8.04'} <NEW_LINE> <DEDENT> def get_version(self): <NEW_LINE> <INDENT> return self.version_map[self.mint_detector.get_version()] <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return 'ubuntu' <NEW_LINE> <DEDENT> def check_presence(self): <NEW_LINE> <INDENT> return self.mint_detector.check_presence() <NEW_LINE> <DEDENT> pass | This is an implementation of a standard interface for
interacting with rosdep. Mint is closely coupled to Ubuntu, it
will masquerade as ubuntu for the purposes of rosdep. | 625990640c0af96317c578f7 |
class CollDeexcRateCoeff(ProcessingPlasmaProperty): <NEW_LINE> <INDENT> outputs = ("coll_deexc_coeff",) <NEW_LINE> latex_name = ("c_{ul}",) <NEW_LINE> def calculate(self, thermal_lte_level_boltzmann_factor, coll_exc_coeff): <NEW_LINE> <INDENT> level_lower_index = coll_exc_coeff.index.droplevel("level_number_upper") <NEW_LINE> level_upper_index = coll_exc_coeff.index.droplevel("level_number_lower") <NEW_LINE> n_lower_prop = thermal_lte_level_boltzmann_factor.loc[ level_lower_index ].values <NEW_LINE> n_upper_prop = thermal_lte_level_boltzmann_factor.loc[ level_upper_index ].values <NEW_LINE> coll_deexc_coeff = coll_exc_coeff * n_lower_prop / n_upper_prop <NEW_LINE> return coll_deexc_coeff | Attributes
----------
coll_deexc_coeff : pandas.DataFrame, dtype float
Rate coefficient for collisional deexcitation. | 6259906432920d7e50bc7777 |
class Bucket(Object): <NEW_LINE> <INDENT> def __init__( self, id=None, name=None, status=None, user=None, created=None, storage=None, transfer=None, pubkeys=None, publicPermissions=None, encryptionKey=None, index=None ): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.name = name <NEW_LINE> self.status = status <NEW_LINE> self.user = user <NEW_LINE> self.storage = storage <NEW_LINE> self.transfer = transfer <NEW_LINE> self.pubkeys = pubkeys <NEW_LINE> self.publicPermissions = publicPermissions <NEW_LINE> self.encryptionKey = encryptionKey <NEW_LINE> self.index = index <NEW_LINE> if created is not None: <NEW_LINE> <INDENT> self.created = datetime.fromtimestamp( strict_rfc3339.rfc3339_to_timestamp(created)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.created = None | Storage bucket.
A bucket is a logical grouping of files
which the user can assign permissions and limits to.
Attributes:
id (str): unique identifier.
name (str): name.
status (str): bucket status (Active, ...).
user (str): user email address.
created (:py:class:`datetime.datetime`): time when the bucket was created.
storage (int): storage limit (in GB).
transfer (int): transfer limit (in GB).
pubkeys (): | 625990643eb6a72ae038bd90 |
class Map(object): <NEW_LINE> <INDENT> n = None <NEW_LINE> m = None <NEW_LINE> units = {} <NEW_LINE> maze = None <NEW_LINE> def __init__(self, n, m): <NEW_LINE> <INDENT> self.n, self.m = n, m <NEW_LINE> self.maze = [['_'] * m for i in range(n)] | General visible map | 625990647047854f46340ae5 |
class FilesHandler(OutputHandler): <NEW_LINE> <INDENT> def __init__(self, gc, blobs_root): <NEW_LINE> <INDENT> OutputHandler.__init__(self) <NEW_LINE> self.gc = gc <NEW_LINE> self.blobs_root = blobs_root <NEW_LINE> self.message = _("Finding blobs ... {et} {ed}") <NEW_LINE> <DEDENT> def outputStat(self, h): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not self.gc.quiet: <NEW_LINE> <INDENT> self.gc.eta.increment() <NEW_LINE> ProgressReporter.increment(self.message.format( et = self.gc.eta.eta_str() , ed = self.gc.eta.eta_delta_str())) <NEW_LINE> <DEDENT> depot_path = h['depotFile'] <NEW_LINE> deleted = 1 if h['action'] in ['delete','move/delete'] else 0 <NEW_LINE> sha1 = depot_path.replace(self.blobs_root,'') <NEW_LINE> sha1 = sha1.replace('/','') <NEW_LINE> if deleted: <NEW_LINE> <INDENT> self.gc.sql_insert_object(DELETED_BLOBS, depot_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.gc.sql_insert_object(BLOBS, sha1, depot_path) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.exception("FilesHandler:outputStat {}".format(str(e))) <NEW_LINE> <DEDENT> return OutputHandler.HANDLED | OutputHandler for p4 files; prevents memory overrun of stat data. | 625990642c8b7c6e89bd4f21 |
class _BottleNeckLayer(nn.Module): <NEW_LINE> <INDENT> def __init__( self, num_intput_features: int, growth_rate: int, bn_size: int, drop_rate: float, memory_efficient: bool = False ) -> None: <NEW_LINE> <INDENT> super(_BottleNeckLayer, self).__init__() <NEW_LINE> self.norm1: nn.BatchNorm2d <NEW_LINE> self.add_module('norm1', nn.BatchNorm2d(num_intput_features)) <NEW_LINE> self.relu1: nn.ReLU <NEW_LINE> self.add_module('relu1', nn.ReLU(inplace = True)) <NEW_LINE> self.conv1: nn.Conv2d <NEW_LINE> self.add_module('conv1', nn.Conv2d(num_intput_features, growth_rate * bn_size, kernel_size = 1, stride = 1, bias = False)) <NEW_LINE> self.norm2: nn.BatchNorm2d <NEW_LINE> self.add_module('norm2', nn.BatchNorm2d(growth_rate * bn_size)) <NEW_LINE> self.relu2: nn.ReLU <NEW_LINE> self.add_module('relu2', nn.ReLU(inplace = True)) <NEW_LINE> self.conv2: nn.Conv2d <NEW_LINE> self.add_module('conv2', nn.Conv2d(bn_size * growth_rate, growth_rate, kernel_size = 3, stride = 1, padding = 1, bias = False)) <NEW_LINE> self.drop_rate = float(drop_rate) <NEW_LINE> self.memory_efficient = memory_efficient <NEW_LINE> <DEDENT> def bn_function(self, inputs: List[Tensor]) -> Tensor: <NEW_LINE> <INDENT> concated_features = torch.cat(inputs, 1) <NEW_LINE> bottleneck_output = self.conv1(self.relu1(self.norm1(concated_features))) <NEW_LINE> return bottleneck_output <NEW_LINE> <DEDENT> def forward(self, input: Tensor) -> Tensor: <NEW_LINE> <INDENT> if isinstance(input, Tensor): <NEW_LINE> <INDENT> prev_features = [input] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prev_features = input <NEW_LINE> <DEDENT> bottleneck_output = self.bn_function(prev_features) <NEW_LINE> new_features = self.conv2(self.relu2(self.norm2(bottleneck_output))) <NEW_LINE> if self.drop_rate > 0: <NEW_LINE> <INDENT> new_features = F.dropout(new_features, p = self.drop_rate, training = self.training) <NEW_LINE> <DEDENT> return new_features | Architecture: BN-ReLU-Conv(1x1)-BN-ReLU-Conv(3x3) of H_l layer
Let each 1x1 convolution produce 4*growth_rate feature-maps. | 62599064ac7a0e7691f73c16 |
class Groups(BaseDictObject): <NEW_LINE> <INDENT> changeNotificationName = "Groups.Changed" <NEW_LINE> beginUndoNotificationName = "Groups.BeginUndo" <NEW_LINE> endUndoNotificationName = "Groups.EndUndo" <NEW_LINE> beginRedoNotificationName = "Groups.BeginRedo" <NEW_LINE> endRedoNotificationName = "Groups.EndRedo" <NEW_LINE> setItemNotificationName = "Groups.GroupSet" <NEW_LINE> deleteItemNotificationName = "Groups.GroupDeleted" <NEW_LINE> clearNotificationName = "Groups.Cleared" <NEW_LINE> updateNotificationName = "Groups.Updated" <NEW_LINE> representationFactories = { "defcon.groups.kerningSide1Groups" : dict( factory=kerningSide1GroupsRepresentationFactory, destructiveNotifications=("Groups.Changed") ), "defcon.groups.kerningSide2Groups" : dict( factory=kerningSide2GroupsRepresentationFactory, destructiveNotifications=("Groups.Changed") ), "defcon.groups.kerningGlyphToSide1Group" : dict( factory=glyphToKerningSide1GroupsRepresentationFactory, destructiveNotifications=("Groups.Changed") ), "defcon.groups.kerningGlyphToSide2Group" : dict( factory=glyphToKerningSide2GroupsRepresentationFactory, destructiveNotifications=("Groups.Changed") ), } <NEW_LINE> def __init__(self, font=None): <NEW_LINE> <INDENT> self._font = None <NEW_LINE> if font is not None: <NEW_LINE> <INDENT> self._font = weakref.ref(font) <NEW_LINE> <DEDENT> super(Groups, self).__init__() <NEW_LINE> self.beginSelfNotificationObservation() <NEW_LINE> <DEDENT> def getParent(self): <NEW_LINE> <INDENT> return self.font <NEW_LINE> <DEDENT> def _get_font(self): <NEW_LINE> <INDENT> if self._font is not None: <NEW_LINE> <INDENT> return self._font() <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> font = property(_get_font, doc="The :class:`Font` that this object belongs to.") <NEW_LINE> def endSelfNotificationObservation(self): <NEW_LINE> <INDENT> super(Groups, self).endSelfNotificationObservation() <NEW_LINE> self._font = None | This object contains all of the groups in a font.
**This object posts the following notifications:**
- Groups.Changed
- Groups.BeginUndo
- Groups.EndUndo
- Groups.BeginRedo
- Groups.EndRedo
- Groups.GroupSet
- Groups.GroupDeleted
- Groups.Cleared
- Groups.Updated
This object behaves like a dict. The keys are group names and the
values are lists of glyph names::
{
"myGroup" : ["a", "b"],
"myOtherGroup" : ["a.alt", "g.alt"],
}
The API for interacting with the data is the same as a standard dict.
For example, to get a list of all group names::
groupNames = groups.keys()
To get all groups including the glyph lists::
for groupName, glyphList in groups.items():
To get the glyph list for a particular group name::
glyphList = groups["myGroup"]
To set the glyph list for a particular group name::
groups["myGroup"] = ["x", "y", "z"]
And so on.
**Note:** You should not modify the group list and expect the object to
know about it. For example, this could cause your changes to be lost::
glyphList = groups["myGroups"]
glyphList.append("n")
To make sure the change is noticed, reset the list into the object::
glyphList = groups["myGroups"]
glyphList.append("n")
groups["myGroups"] = glyphList
This may change in the future. | 6259906445492302aabfdc0d |
class TestRender: <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> class TestTable(tables.MemoryTable): <NEW_LINE> <INDENT> public_name = tables.Column(model_rel='private_name') <NEW_LINE> def render_public_name(self, data): <NEW_LINE> <INDENT> return "%s:%s" % (data['private_name'], data['additional']) <NEW_LINE> <DEDENT> <DEDENT> table = TestTable([{'private_name': 'FOO', 'additional': 'BAR'}]) <NEW_LINE> assert table.rows[0]['public_name'] == 'FOO:BAR' <NEW_LINE> <DEDENT> def test_not_sorted(self): <NEW_LINE> <INDENT> class TestTable(tables.MemoryTable): <NEW_LINE> <INDENT> foo = tables.Column() <NEW_LINE> def render_foo(self, data): <NEW_LINE> <INDENT> return -data['foo'] <NEW_LINE> <DEDENT> <DEDENT> table = TestTable([{'foo': 1}, {'foo': 2}], order_by='asc') <NEW_LINE> assert [r['foo'] for r in table.rows] == [-1, -2] | Test use of the render_* methods.
| 625990644a966d76dd5f0627 |
class SemesterCreateView(BSModalCreateView): <NEW_LINE> <INDENT> template_name = "semester/create.html" <NEW_LINE> form_class = TermModelForm <NEW_LINE> success_message = 'Success: Semester was added.' <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse_lazy('school:manage_semester', kwargs={'school_id':self.request.user.school.pk}) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> if self.request.is_ajax(): <NEW_LINE> <INDENT> instance = form.save(commit=False) <NEW_LINE> instance.school = self.request.user.school <NEW_LINE> instance.save() <NEW_LINE> <DEDENT> return redirect(self.get_success_url()) | module doc | 625990644e4d562566373b39 |
@dataclasses.dataclass <NEW_LINE> class CreatorsConverter(sourdough.Converter): <NEW_LINE> <INDENT> base: str = 'creator' <NEW_LINE> parameters: Dict[str, Any] = dataclasses.field(default_factory = dict) <NEW_LINE> alternatives: Tuple[Type] = None <NEW_LINE> def validate(self, item: Any, instance: object) -> object: <NEW_LINE> <INDENT> new_creators = [] <NEW_LINE> for creator in item: <NEW_LINE> <INDENT> converter = instance.initialize_converter( name = 'creator', converter = 'creator') <NEW_LINE> new_creators.append(converter.validate( item = [creator, 'worker'], instance = instance)) <NEW_LINE> <DEDENT> return new_creators | Type converter for Creators.
Args:
base (str):
parameters (Dict[str, Any]):
alternatives (Tuple[Type])
| 625990641f5feb6acb16431d |
class InlineSearchInput(ForeignKeyRawIdWidget): <NEW_LINE> <INDENT> widget_template = None <NEW_LINE> search_path = '../foreignkey_autocomplete/' <NEW_LINE> admin_site = site <NEW_LINE> class Media: <NEW_LINE> <INDENT> css = { 'all': ('autocomplete/css/jquery.autocomplete.css',) } <NEW_LINE> js = ( 'autocomplete/js/jquery.bgiframe.min.js', 'autocomplete/js/jquery.ajaxQueue.js', 'autocomplete/js/jquery.autocomplete.js', ) <NEW_LINE> <DEDENT> def label_for_value(self, value): <NEW_LINE> <INDENT> key = self.rel.get_related_field().name <NEW_LINE> obj = self.rel.to._default_manager.get(**{key: value}) <NEW_LINE> return Truncate(obj).words(14) <NEW_LINE> <DEDENT> def __init__(self, rel, search_fields, attrs=None, admin_site=site): <NEW_LINE> <INDENT> self.search_fields = search_fields <NEW_LINE> self.admin_site = admin_site <NEW_LINE> super(InlineSearchInput, self).__init__(rel, admin_site, attrs ) <NEW_LINE> <DEDENT> def render(self, name, value, attrs=None): <NEW_LINE> <INDENT> if attrs is None: <NEW_LINE> <INDENT> attrs = {} <NEW_LINE> <DEDENT> output = [super(InlineSearchInput, self).render(name, value, attrs)] <NEW_LINE> opts = self.rel.to._meta <NEW_LINE> app_label = opts.app_label <NEW_LINE> model_name = opts.object_name.lower() <NEW_LINE> related_url = '../../../%s/%s/' % (app_label, model_name) <NEW_LINE> params = self.url_parameters() <NEW_LINE> if params: <NEW_LINE> <INDENT> url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = '' <NEW_LINE> <DEDENT> if not attrs.has_key('class'): <NEW_LINE> <INDENT> attrs['class'] = 'vForeignKeyRawIdAdminField' <NEW_LINE> <DEDENT> output = [forms.TextInput.render(self, name, value, attrs)] <NEW_LINE> if value: <NEW_LINE> <INDENT> label = self.label_for_value(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label = u'' <NEW_LINE> <DEDENT> context = { 'url': url, 'related_url': related_url, 'admin_media_prefix': settings.STATIC_URL, 'search_path': self.search_path, 'search_fields': ','.join(self.search_fields), 'model_name': model_name, 'app_label': app_label, 'label': label, 'name': name, } <NEW_LINE> output.append(render_to_string(self.widget_template or ( '%s/%s/inline_searchinput.html' % (app_label, model_name), '%s/inline_searchinput.html' % app_label, 'admin/autocomplete/inline_searchinput.html', ), context)) <NEW_LINE> output.reverse() <NEW_LINE> return mark_safe(u''.join(output)) | A Widget for displaying ForeignKeys in an autocomplete search input
instead in a <select> box. | 62599064d7e4931a7ef3d71e |
class Reuss(AveragingScheme): <NEW_LINE> <INDENT> def average_bulk_moduli(self, volumes, bulk_moduli, shear_moduli): <NEW_LINE> <INDENT> return reuss_average_function(volumes, bulk_moduli) <NEW_LINE> <DEDENT> def average_shear_moduli(self, volumes, bulk_moduli, shear_moduli): <NEW_LINE> <INDENT> return reuss_average_function(volumes, shear_moduli) | Class for computing the Reuss (iso-stress) bound for elastic properties.
This derives from :class:`burnman.averaging_schemes.averaging_scheme`, and implements
the :func:`burnman.averaging_schemes.averaging_scheme.average_bulk_moduli` and
:func:`burnman.averaging_schemes.averaging_scheme.average_shear_moduli` functions. | 62599064a8ecb0332587294a |
class GridBuilder: <NEW_LINE> <INDENT> def __init__(self, parameters: Dict[Any, Any]): <NEW_LINE> <INDENT> if parameters is None or not isinstance(parameters, dict) or len(parameters) == 0: <NEW_LINE> <INDENT> raise ValueError("Parameters should be a valid non-empty dict") <NEW_LINE> <DEDENT> self.__parameters = parameters <NEW_LINE> self.__combinations = -1 <NEW_LINE> <DEDENT> def combinations(self) -> int: <NEW_LINE> <INDENT> prod = 1 <NEW_LINE> for key in self.__parameters: <NEW_LINE> <INDENT> row = self.__parameters[key] <NEW_LINE> prod *= len(row) <NEW_LINE> <DEDENT> return prod <NEW_LINE> <DEDENT> def random(self) -> Iterable[dict]: <NEW_LINE> <INDENT> result = list() <NEW_LINE> for space in self.sequenial(): <NEW_LINE> <INDENT> result.append(space) <NEW_LINE> <DEDENT> random.shuffle(result) <NEW_LINE> for v in result: <NEW_LINE> <INDENT> yield v <NEW_LINE> <DEDENT> <DEDENT> def sequenial(self) -> Iterable[dict]: <NEW_LINE> <INDENT> if self.__combinations < 0: <NEW_LINE> <INDENT> self.__combinations = self.combinations() <NEW_LINE> <DEDENT> keys = self.__parameters.keys() <NEW_LINE> if len(keys) == 1: <NEW_LINE> <INDENT> for key in keys: <NEW_LINE> <INDENT> for v in self.__parameters[key]: <NEW_LINE> <INDENT> yield {key: v} <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> counters = dict() <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> counters[key] = 0 <NEW_LINE> <DEDENT> for primary in keys: <NEW_LINE> <INDENT> result = dict() <NEW_LINE> filtered_keys = list() <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> if key == primary: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> filtered_keys.append(key) <NEW_LINE> <DEDENT> for pv in self.__parameters[primary]: <NEW_LINE> <INDENT> result[primary] = pv <NEW_LINE> counters = dict() <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> counters[key] = 0 <NEW_LINE> <DEDENT> combinations_left = 1 <NEW_LINE> for incremental in filtered_keys: <NEW_LINE> <INDENT> combinations_left *= len(self.__parameters[incremental]) <NEW_LINE> <DEDENT> key_index = 0 <NEW_LINE> for i in range(combinations_left): <NEW_LINE> <INDENT> index = i <NEW_LINE> for r in range(len(filtered_keys) - 1, 0, -1): <NEW_LINE> <INDENT> key = filtered_keys[r] <NEW_LINE> counters[key] = index % len(self.__parameters[key]) <NEW_LINE> index //= len(self.__parameters[key]) <NEW_LINE> <DEDENT> counters[filtered_keys[0]] = index <NEW_LINE> for secondary in filtered_keys: <NEW_LINE> <INDENT> result[secondary] = self.__parameters[secondary][counters[secondary]] <NEW_LINE> <DEDENT> yield result.copy() <NEW_LINE> <DEDENT> <DEDENT> break | GridBuilder class provides utilities for building grid of all possible params, in a form suitable for hyperparameters search | 62599064be8e80087fbc07ba |
class AggregatedRunsByState(Model): <NEW_LINE> <INDENT> _attribute_map = { 'runs_count': {'key': 'runsCount', 'type': 'int'}, 'state': {'key': 'state', 'type': 'object'} } <NEW_LINE> def __init__(self, runs_count=None, state=None): <NEW_LINE> <INDENT> super(AggregatedRunsByState, self).__init__() <NEW_LINE> self.runs_count = runs_count <NEW_LINE> self.state = state | AggregatedRunsByState.
:param runs_count:
:type runs_count: int
:param state:
:type state: object | 6259906444b2445a339b74f9 |
class VerticalDivider(Widget): <NEW_LINE> <INDENT> __slots__ = ["_required_height"] <NEW_LINE> def __init__(self, height=Widget.FILL_COLUMN): <NEW_LINE> <INDENT> super(VerticalDivider, self).__init__(None, tab_stop=False) <NEW_LINE> self._required_height = height <NEW_LINE> <DEDENT> def process_event(self, event): <NEW_LINE> <INDENT> return event <NEW_LINE> <DEDENT> def update(self, frame_no): <NEW_LINE> <INDENT> (color, attr, background) = self._frame.palette["borders"] <NEW_LINE> vert = u"│" if self._frame.canvas.unicode_aware else "|" <NEW_LINE> for i in range(self._h): <NEW_LINE> <INDENT> self._frame.canvas.print_at(vert, self._x, self._y + i, color, attr, background) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def required_height(self, offset, width): <NEW_LINE> <INDENT> return self._required_height <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value | A vertical divider for separating columns.
This widget should be put into a column of its own in the Layout. | 625990642ae34c7f260ac819 |
class bm_mt_reset_default_entry_result(object): <NEW_LINE> <INDENT> def __init__(self, ouch=None,): <NEW_LINE> <INDENT> self.ouch = ouch <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.ouch = InvalidTableOperation() <NEW_LINE> self.ouch.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('bm_mt_reset_default_entry_result') <NEW_LINE> if self.ouch is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('ouch', TType.STRUCT, 1) <NEW_LINE> self.ouch.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- ouch | 625990647d43ff2487427fa9 |
class ParseWithErrorHandlingTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_sanity(self): <NEW_LINE> <INDENT> database.clean() <NEW_LINE> text = "The fox is brown." <NEW_LINE> result = t.parse_with_error_handling(text) <NEW_LINE> expected_result = t.parser.raw_parse(text) <NEW_LINE> self.failUnless(result == expected_result) | Test the parse_with_error_handling method.
| 6259906432920d7e50bc7778 |
class NetworkManagerMethodException(NetworkManagerException): <NEW_LINE> <INDENT> pass | Required method of object does not exist exception | 62599064f548e778e596ccbc |
class FromPackageLoader: <NEW_LINE> <INDENT> pkg_name: str <NEW_LINE> search_paths: Tuple[str, ...] <NEW_LINE> def __init__(self, pkg_name: str, search_paths: Tuple[str, ...]=("", )) -> None: <NEW_LINE> <INDENT> self.pkg_name = pkg_name <NEW_LINE> self.search_paths = search_paths <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r, %r)" % (type(self).__name__, self.pkg_name, self.search_paths) <NEW_LINE> <DEDENT> def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: <NEW_LINE> <INDENT> if base_path is None: <NEW_LINE> <INDENT> to_try = self.search_paths <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(base_path, PackageResource) or base_path.pkg_name != self.pkg_name: <NEW_LINE> <INDENT> raise IOError() <NEW_LINE> <DEDENT> to_try = [base_path.path] <NEW_LINE> <DEDENT> err = None <NEW_LINE> for path in to_try: <NEW_LINE> <INDENT> full_path = os.path.join(path, grammar_path) <NEW_LINE> try: <NEW_LINE> <INDENT> text: Optional[str] = pkgutil.get_data(self.pkg_name, full_path) <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> err = e <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return PackageResource(self.pkg_name, full_path), (text.decode() if text else '') <NEW_LINE> <DEDENT> <DEDENT> raise IOError('Cannot find grammar in given paths') from err | Provides a simple way of creating custom import loaders that load from packages via ``pkgutil.get_data`` instead of using `open`.
This allows them to be compatible even from within zip files.
Relative imports are handled, so you can just freely use them.
pkg_name: The name of the package. You can probably provide `__name__` most of the time
search_paths: All the path that will be search on absolute imports. | 625990643eb6a72ae038bd92 |
@plugins.register <NEW_LINE> class SlrEccentricityParser(SinexParser): <NEW_LINE> <INDENT> max_line_width = 100 <NEW_LINE> def setup_parser(self): <NEW_LINE> <INDENT> return (self.slr_site_id, self.slr_site_eccentricity) <NEW_LINE> <DEDENT> @property <NEW_LINE> def slr_site_id(self): <NEW_LINE> <INDENT> return SinexBlock( marker="SITE/ID", fields=( SinexField("site_code", 1, "U4"), SinexField("point_code", 6, "U2"), SinexField("domes", 9, "U5"), SinexField("marker", 14, "U4"), SinexField("obs_code", 19, "U1"), SinexField("description", 21, "U22", "utf8"), SinexField("approx_lon", 44, "f8", "dms2deg"), SinexField("approx_lat", 56, "f8", "dms2deg"), SinexField("approx_height", 68, "f8"), SinexField("cdp_sod", 80, "U8"), ), parser=self.parse_site_id, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def slr_site_eccentricity(self): <NEW_LINE> <INDENT> return SinexBlock( marker="SITE/ECCENTRICITY", fields=( SinexField("site_code", 1, "U4"), SinexField("point_code", 6, "U2"), SinexField("soln", 9, "U4"), SinexField("obs_code", 14, "U1"), SinexField("start_time", 16, "O", "epoch"), SinexField("end_time", 29, "O", "epoch"), SinexField("vector_type", 42, "U3"), SinexField("vector_1", 46, "f8"), SinexField("vector_2", 55, "f8"), SinexField("vector_3", 64, "f8"), SinexField("cdp_sod", 80, "U8"), ), parser=self.parse_site_eccentricity, ) <NEW_LINE> <DEDENT> def parse_site_id(self, data): <NEW_LINE> <INDENT> for d in data: <NEW_LINE> <INDENT> site_key = d["site_code"] <NEW_LINE> self.data.setdefault(site_key, dict()) <NEW_LINE> self.data[site_key] = dict( antenna_id=d["site_code"], marker=d["marker"], domes=d["domes"], name=d["description"], cdp_sod=d["cdp_sod"], ) <NEW_LINE> <DEDENT> <DEDENT> def parse_site_eccentricity(self, data): <NEW_LINE> <INDENT> for d in data: <NEW_LINE> <INDENT> start_time = datetime.min if d["start_time"] is None else d["start_time"] <NEW_LINE> end_time = datetime.max if d["end_time"] is None else d["end_time"] <NEW_LINE> key = (start_time, end_time) <NEW_LINE> if d["vector_type"] == "UNE": <NEW_LINE> <INDENT> self.data[d["site_code"]].setdefault(key, {}).update( dict(vector=(d["vector_3"], d["vector_2"], d["vector_1"]), coord_type="ENU") ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data[d["site_code"]].setdefault(key, {}).update( dict(vector=(d["vector_1"], d["vector_2"], d["vector_3"]), coord_type=d["vector_type"]) ) | A parser for reading data from ITRF files in SNX format
| 625990647d847024c075db0a |
class TestVersion(test_utils.TestCase): <NEW_LINE> <INDENT> fixtures = ['base/addon_3615'] <NEW_LINE> def test_compatible_apps(self): <NEW_LINE> <INDENT> v = Version.objects.get(pk=81551) <NEW_LINE> assert amo.FIREFOX in v.compatible_apps, "Missing Firefox >_<" <NEW_LINE> <DEDENT> def test_supported_platforms(self): <NEW_LINE> <INDENT> v = Version.objects.get(pk=81551) <NEW_LINE> assert amo.PLATFORM_ALL in v.supported_platforms <NEW_LINE> <DEDENT> def test_major_minor(self): <NEW_LINE> <INDENT> v = Version(version='3.0.12b2') <NEW_LINE> eq_(v.major, 3) <NEW_LINE> eq_(v.minor1, 0) <NEW_LINE> eq_(v.minor2, 12) <NEW_LINE> eq_(v.minor3, None) <NEW_LINE> eq_(v.alpha, 'b') <NEW_LINE> eq_(v.alpha_ver, 2) <NEW_LINE> v = Version(version='3.6.1apre2+') <NEW_LINE> eq_(v.major, 3) <NEW_LINE> eq_(v.minor1, 6) <NEW_LINE> eq_(v.minor2, 1) <NEW_LINE> eq_(v.alpha, 'a') <NEW_LINE> eq_(v.pre, 'pre') <NEW_LINE> eq_(v.pre_ver, 2) <NEW_LINE> v = Version(version='') <NEW_LINE> eq_(v.major, None) <NEW_LINE> eq_(v.minor1, None) <NEW_LINE> eq_(v.minor2, None) <NEW_LINE> eq_(v.minor3, None) <NEW_LINE> <DEDENT> def test_has_files(self): <NEW_LINE> <INDENT> v = Version.objects.get(pk=81551) <NEW_LINE> assert v.has_files, 'Version with files not recognized.' <NEW_LINE> v.files.all().delete() <NEW_LINE> v = Version.objects.get(pk=81551) <NEW_LINE> assert not v.has_files, 'Version without files not recognized.' <NEW_LINE> <DEDENT> def _get_version(self, status): <NEW_LINE> <INDENT> v = Version() <NEW_LINE> v.all_files = [mock.Mock()] <NEW_LINE> v.all_files[0].status = status <NEW_LINE> return v <NEW_LINE> <DEDENT> def test_is_unreviewed(self): <NEW_LINE> <INDENT> assert self._get_version(amo.STATUS_UNREVIEWED).is_unreviewed <NEW_LINE> assert self._get_version(amo.STATUS_PENDING).is_unreviewed <NEW_LINE> assert not self._get_version(amo.STATUS_PUBLIC).is_unreviewed | Test methods of the version class. | 62599064009cb60464d02c6b |
class GradientAnisotropicDiffusion(miapy_fltr.IFilter): <NEW_LINE> <INDENT> def __init__(self, time_step: float=0.125, conductance: int=3, conductance_scaling_update_interval: int=1, no_iterations: int=5): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.time_step = time_step <NEW_LINE> self.conductance = conductance <NEW_LINE> self.conductance_scaling_update_interval = conductance_scaling_update_interval <NEW_LINE> self.no_iterations = no_iterations <NEW_LINE> <DEDENT> def execute(self, image: sitk.Image, params: miapy_fltr.IFilterParams=None) -> sitk.Image: <NEW_LINE> <INDENT> return sitk.GradientAnisotropicDiffusion(sitk.Cast(image, sitk.sitkFloat32), self.time_step, self.conductance, self.conductance_scaling_update_interval, self.no_iterations) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'GradientAnisotropicDiffusion:\n' ' time_step: {self.time_step}\n' ' conductance: {self.conductance}\n' ' conductance_scaling_update_interval: {self.conductance_scaling_update_interval}\n' ' no_iterations: {self.no_iterations}\n' .format(self=self) | Represents a gradient anisotropic diffusion filter. | 62599064462c4b4f79dbd13a |
@patch("dakara_player.media_player.vlc.METADATA_KEYS_COUNT", 10) <NEW_LINE> class SetMetadataTestCase(TestCase): <NEW_LINE> <INDENT> def test_set_first(self): <NEW_LINE> <INDENT> media = MagicMock() <NEW_LINE> media.get_meta.return_value = None <NEW_LINE> set_metadata(media, {"data": "value"}) <NEW_LINE> media.set_meta.assert_called_with(0, '{"data": "value"}') <NEW_LINE> <DEDENT> def test_set_second(self): <NEW_LINE> <INDENT> media = MagicMock() <NEW_LINE> media.get_meta.side_effect = ["value", None] <NEW_LINE> set_metadata(media, {"data": "value"}) <NEW_LINE> media.set_meta.assert_called_with(1, '{"data": "value"}') <NEW_LINE> <DEDENT> def test_set_fail(self): <NEW_LINE> <INDENT> media = MagicMock() <NEW_LINE> media.get_meta.return_value = "value" <NEW_LINE> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> set_metadata(media, {"data": "value"}) | Test the set_metadata function. | 62599064e76e3b2f99fda133 |
@register_command <NEW_LINE> class CanaryCommand(GenericCommand): <NEW_LINE> <INDENT> _cmdline_ = "canary" <NEW_LINE> _syntax_ = _cmdline_ <NEW_LINE> @only_if_gdb_running <NEW_LINE> def do_invoke(self, argv: List[str]) -> None: <NEW_LINE> <INDENT> self.dont_repeat() <NEW_LINE> has_canary = checksec(get_filepath())["Canary"] <NEW_LINE> if not has_canary: <NEW_LINE> <INDENT> warn("This binary was not compiled with SSP.") <NEW_LINE> return <NEW_LINE> <DEDENT> res = gef.session.canary <NEW_LINE> if not res: <NEW_LINE> <INDENT> err("Failed to get the canary") <NEW_LINE> return <NEW_LINE> <DEDENT> canary, location = res <NEW_LINE> info(f"The canary of process {gef.session.pid} is at {location:#x}, value is {canary:#x}") <NEW_LINE> return | Shows the canary value of the current process. | 62599064097d151d1a2c279f |
class QuantileAT(base.BaseEstimator): <NEW_LINE> <INDENT> def __init__(self, gamma=0.5, alpha=1., verbose=0, max_iter=1000, kernel_type='linear', kernel_param=1, loss_function='logistic', opt_type = 'SGD', opt_params={'learning_rate': 1e-8}, classes_=[]): <NEW_LINE> <INDENT> self.gamma = gamma <NEW_LINE> self.alpha = alpha <NEW_LINE> self.verbose = verbose <NEW_LINE> self.max_iter = max_iter <NEW_LINE> self.kernel_type = kernel_type <NEW_LINE> self.kernel_param = kernel_param <NEW_LINE> self.loss_function = loss_function <NEW_LINE> self.opt_type = opt_type <NEW_LINE> self.opt_params = opt_params <NEW_LINE> self.scale = 1 <NEW_LINE> self.classes_ = classes_ <NEW_LINE> <DEDENT> def fit(self, X, y, sample_weight=None): <NEW_LINE> <INDENT> _y = np.array(y).astype(np.int) <NEW_LINE> if np.abs(_y - y).sum() > 0.1: <NEW_LINE> <INDENT> raise ValueError('y must only contain integer values') <NEW_LINE> <DEDENT> if(self.classes_ == []): <NEW_LINE> <INDENT> self.classes_ = np.unique(y) <NEW_LINE> <DEDENT> self.n_class_ = self.classes_.max() - self.classes_.min() + 1 <NEW_LINE> self.train_set = X <NEW_LINE> K = transform_kernel(X, X, self.kernel_type, self.kernel_param) <NEW_LINE> self.scale = 1.0 <NEW_LINE> K = K * self.scale <NEW_LINE> y_tmp = y - y.min() <NEW_LINE> self.coef_, self.theta_ = threshold_fit_quantile( K, y_tmp, self.alpha, self.gamma, self.n_class_, self.kernel_type, self.loss_function, self.opt_type, self.opt_params, mode='AE', verbose=self.verbose, max_iter=self.max_iter, sample_weight=sample_weight) <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> K = transform_kernel(X, self.train_set, self.kernel_type, self.kernel_param, scale=self.scale) <NEW_LINE> return threshold_predict(K, self.coef_, self.theta_) + self.classes_.min() <NEW_LINE> <DEDENT> def predict_proba(self, X): <NEW_LINE> <INDENT> K = transform_kernel(X, self.train_set, self.kernel_type, self.kernel_param, scale=self.scale) <NEW_LINE> return threshold_proba(K, self.coef_, self.theta_) <NEW_LINE> <DEDENT> def score(self, X, y, sample_weight=None): <NEW_LINE> <INDENT> K = transform_kernel(X, self.train_set, self.kernel_type, self.kernel_param, scale=self.scale) <NEW_LINE> pred = self.predict(K) <NEW_LINE> return metrics.accuracy_score( pred, y, sample_weight=sample_weight) | Classifier that implements the ordinal logistic model for quantile estimation | 625990648da39b475be0491d |
@registry.expose(["Strategy", "MultiArmedBandit"]) <NEW_LINE> class MultiArmedBandit_ListISingleAssetStrategyISingleAssetStrategyIAccountIAccountFloatFloatFloatListFloatListFloat(ISingleAssetStrategy,_MultiarmedBandit2_Impl): <NEW_LINE> <INDENT> def __init__(self, strategies = None, account = None, weight = None, normalizer = None, corrector = None): <NEW_LINE> <INDENT> from marketsim import rtti <NEW_LINE> from marketsim.gen._out.strategy.weight.trader._trader_traderefficiencytrend import trader_TraderEfficiencyTrend_Float as _strategy_weight_trader_trader_TraderEfficiencyTrend_Float <NEW_LINE> from marketsim.gen._out.strategy._noise import Noise_IEventSideIObservableIOrder as _strategy_Noise_IEventSideIObservableIOrder <NEW_LINE> from marketsim.gen._out.strategy.account.inner._inner_virtualmarket import inner_VirtualMarket_ as _strategy_account_inner_inner_VirtualMarket_ <NEW_LINE> from marketsim.gen._out.strategy.weight.f._f_atanpow import f_AtanPow_Float as _strategy_weight_f_f_AtanPow_Float <NEW_LINE> from marketsim.gen._out.strategy.weight.array._array_identityl import array_IdentityL_ as _strategy_weight_array_array_IdentityL_ <NEW_LINE> self.strategies = strategies if strategies is not None else [_strategy_Noise_IEventSideIObservableIOrder()] <NEW_LINE> self.account = account if account is not None else _strategy_account_inner_inner_VirtualMarket_() <NEW_LINE> self.weight = weight if weight is not None else _strategy_weight_trader_trader_TraderEfficiencyTrend_Float() <NEW_LINE> self.normalizer = normalizer if normalizer is not None else _strategy_weight_f_f_AtanPow_Float() <NEW_LINE> self.corrector = corrector if corrector is not None else _strategy_weight_array_array_IdentityL_() <NEW_LINE> rtti.check_fields(self) <NEW_LINE> _MultiarmedBandit2_Impl.__init__(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def label(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> _properties = { 'strategies' : listOf(ISingleAssetStrategy), 'account' : IFunctionIAccountISingleAssetStrategy, 'weight' : IFunctionIFunctionfloatIAccount, 'normalizer' : IFunctionIFunctionfloatIFunctionfloat, 'corrector' : IFunctionIFunctionlistOffloatlistOffloat } <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "MultiArmedBandit(%(strategies)s, %(account)s, %(weight)s, %(normalizer)s, %(corrector)s)" % self.__dict__ | In some moments of time the efficiency of the strategies is evaluated
These efficiencies are mapped into weights using *weight* and *normilizer*
functions per every strategy and *corrector* for the whole collection of weights
These weights are used to choose randomly a strategy to run for the next quant of time.
All other strategies are suspended | 6259906476e4537e8c3f0cb6 |
class DummyDocument(object): <NEW_LINE> <INDENT> def __init__(self, reporter): <NEW_LINE> <INDENT> self.reporter = reporter | Used where the signature requires a docutils.node.Document but only its reporter
is being used.
Up until the current Sphinx 2.3.1 calls to env.get_doctree() (which would get
us said docutils.node.Document) result in pickle.load() calls which are expensive
enough to dominate the runtime entirely when the number of endpoints and references
is large enough. | 625990644e4d562566373b3b |
class GitItem(ItemModel): <NEW_LINE> <INDENT> _attribute_map = { '_links': {'key': '_links', 'type': 'ReferenceLinks'}, 'content': {'key': 'content', 'type': 'str'}, 'content_metadata': {'key': 'contentMetadata', 'type': 'FileContentMetadata'}, 'is_folder': {'key': 'isFolder', 'type': 'bool'}, 'is_sym_link': {'key': 'isSymLink', 'type': 'bool'}, 'path': {'key': 'path', 'type': 'str'}, 'url': {'key': 'url', 'type': 'str'}, 'commit_id': {'key': 'commitId', 'type': 'str'}, 'git_object_type': {'key': 'gitObjectType', 'type': 'object'}, 'latest_processed_change': {'key': 'latestProcessedChange', 'type': 'GitCommitRef'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'original_object_id': {'key': 'originalObjectId', 'type': 'str'} } <NEW_LINE> def __init__(self, _links=None, content=None, content_metadata=None, is_folder=None, is_sym_link=None, path=None, url=None, commit_id=None, git_object_type=None, latest_processed_change=None, object_id=None, original_object_id=None): <NEW_LINE> <INDENT> super(GitItem, self).__init__(_links=_links, content=content, content_metadata=content_metadata, is_folder=is_folder, is_sym_link=is_sym_link, path=path, url=url) <NEW_LINE> self.commit_id = commit_id <NEW_LINE> self.git_object_type = git_object_type <NEW_LINE> self.latest_processed_change = latest_processed_change <NEW_LINE> self.object_id = object_id <NEW_LINE> self.original_object_id = original_object_id | GitItem.
:param _links:
:type _links: :class:`ReferenceLinks <git.v4_1.models.ReferenceLinks>`
:param content:
:type content: str
:param content_metadata:
:type content_metadata: :class:`FileContentMetadata <git.v4_1.models.FileContentMetadata>`
:param is_folder:
:type is_folder: bool
:param is_sym_link:
:type is_sym_link: bool
:param path:
:type path: str
:param url:
:type url: str
:param commit_id: SHA1 of commit item was fetched at
:type commit_id: str
:param git_object_type: Type of object (Commit, Tree, Blob, Tag, ...)
:type git_object_type: object
:param latest_processed_change: Shallow ref to commit that last changed this item Only populated if latestProcessedChange is requested May not be accurate if latest change is not yet cached
:type latest_processed_change: :class:`GitCommitRef <git.v4_1.models.GitCommitRef>`
:param object_id: Git object id
:type object_id: str
:param original_object_id: Git object id
:type original_object_id: str | 62599064dd821e528d6da51b |
class TopicStoriesFeed(StoriesFeed): <NEW_LINE> <INDENT> def get_object(self, request, slug): <NEW_LINE> <INDENT> return get_object_or_404(Category, categorytranslation__slug=slug) <NEW_LINE> <DEDENT> def title(self, obj): <NEW_LINE> <INDENT> return "%s %s %s" % (settings.STORYBASE_SITE_NAME, obj.name, _("Stories")) <NEW_LINE> <DEDENT> def description(self, obj): <NEW_LINE> <INDENT> return _("Recent ") + obj.name + _(" stories from ") + settings.STORYBASE_SITE_NAME <NEW_LINE> <DEDENT> def link(self, obj): <NEW_LINE> <INDENT> return "%s?topics=%s" % (reverse('explore_stories'), obj.pk) <NEW_LINE> <DEDENT> def items(self, obj): <NEW_LINE> <INDENT> return Story.objects.exclude(source__relation_type='connected').published().filter(topics=obj).order_by('-published')[:25] | Generates a feed of the 25 most recently published stories in a particular
topic
The topic is passed to the feed via a ``slug`` keyword argument in the URL
configuration for the feed. | 625990643539df3088ecd9d2 |
class xi2P(mcfit): <NEW_LINE> <INDENT> def __init__(self, r, l=0, deriv=0, q=1.5, **kwargs): <NEW_LINE> <INDENT> self.l = l <NEW_LINE> MK = kernels.Mellin_SphericalBesselJ(l, deriv) <NEW_LINE> mcfit.__init__(self, r, MK, q, **kwargs) <NEW_LINE> self.prefac *= self.x**3 <NEW_LINE> phase = (-1 if l & 2 else 1) * (1j if l & 1 else 1) <NEW_LINE> self.postfac *= (2*pi)**1.5 / phase | Correlation function to power spectrum, also radial profile to its
Fourier transform.
Parameters
----------
r : see `x` in :class:`mcfit.mcfit`
l : int
order
See :class:`mcfit.mcfit` | 6259906444b2445a339b74fa |
class ExecutePreprocessor(Preprocessor, NotebookClient): <NEW_LINE> <INDENT> def __init__(self, **kw): <NEW_LINE> <INDENT> nb = kw.get('nb') <NEW_LINE> Preprocessor.__init__(self, nb=nb, **kw) <NEW_LINE> NotebookClient.__init__(self, nb, **kw) <NEW_LINE> <DEDENT> def _check_assign_resources(self, resources): <NEW_LINE> <INDENT> if resources or not hasattr(self, 'resources'): <NEW_LINE> <INDENT> self.resources = resources <NEW_LINE> <DEDENT> <DEDENT> def preprocess(self, nb, resources=None, km=None): <NEW_LINE> <INDENT> NotebookClient.__init__(self, nb, km) <NEW_LINE> self._check_assign_resources(resources) <NEW_LINE> self.execute() <NEW_LINE> return self.nb, self.resources <NEW_LINE> <DEDENT> async def async_execute_cell( self, cell: NotebookNode, cell_index: int, execution_count: Optional[int] = None, store_history: bool = False) -> NotebookNode: <NEW_LINE> <INDENT> self.store_history = store_history <NEW_LINE> cell, resources = self.preprocess_cell(cell, self.resources, cell_index) <NEW_LINE> if execution_count and cell.cell_type == 'code' and cell.source.strip(): <NEW_LINE> <INDENT> cell['execution_count'] = execution_count <NEW_LINE> <DEDENT> return cell, resources <NEW_LINE> <DEDENT> def preprocess_cell(self, cell, resources, index, **kwargs): <NEW_LINE> <INDENT> self._check_assign_resources(resources) <NEW_LINE> cell = run_sync(NotebookClient.async_execute_cell)(self, cell, index, store_history=self.store_history) <NEW_LINE> return cell, self.resources | Executes all the cells in a notebook | 62599064be8e80087fbc07bc |
class TestGetFactoryName(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.board = 'x86-alex' <NEW_LINE> self.version_string = '0.12.433.269/stable' <NEW_LINE> <DEDENT> def testFactoryUseDefaultNaming(self): <NEW_LINE> <INDENT> expected = (IMAGE_SERVER_PREFIX + '/stable-channel/x86-alex/0.12.433.269', ['chromeos-factory', '0.12.433.269', 'x86-alex', '.zip']) <NEW_LINE> actual = cb_name_lib.GetFactoryName(self.board, self.version_string, 0) <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def testFactoryUseAlternativeNamingOne(self): <NEW_LINE> <INDENT> expected = (IMAGE_SERVER_PREFIX + '/stable-channel/x86-alex-rc/' + '0.12.433.269', ['chromeos-factory', '0.12.433.269', 'x86-alex', '.zip']) <NEW_LINE> actual = cb_name_lib.GetFactoryName(self.board, self.version_string, 1) <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def testFactoryUseAlternativeNamingTwo(self): <NEW_LINE> <INDENT> mod_prefix = '/'.join(IMAGE_SERVER_PREFIX.split('/')[:-1]) <NEW_LINE> expected = (mod_prefix + '/stable-channel/x86-alex/0.12.433.269', ['chromeos-factory', '0.12.433.269', 'x86-alex', '.zip']) <NEW_LINE> actual = cb_name_lib.GetFactoryName(self.board, self.version_string, 2) <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def testFactoryUseAlternativeNamingThree(self): <NEW_LINE> <INDENT> expected = (IMAGE_GSD_PREFIX + '/stable-channel/x86-alex/0.12.433.269', ['chromeos-factory', '0.12.433.269', 'x86-alex', '.zip']) <NEW_LINE> actual = cb_name_lib.GetFactoryName(self.board, self.version_string, 3) <NEW_LINE> self.assertEqual(expected, actual) | Tests related to GetFactoryName. | 625990649c8ee82313040d22 |
class Env(dict): <NEW_LINE> <INDENT> def __init__(self, options): <NEW_LINE> <INDENT> dict.__init__(self) <NEW_LINE> self.options = options <NEW_LINE> <DEDENT> def _report_and_set_hdlmake_var(self, name): <NEW_LINE> <INDENT> def _get(name): <NEW_LINE> <INDENT> assert not name.startswith("HDLMAKE_") <NEW_LINE> assert isinstance(name, six.string_types) <NEW_LINE> name = name.upper() <NEW_LINE> return os.environ.get("HDLMAKE_%s" % name) <NEW_LINE> <DEDENT> name = name.upper() <NEW_LINE> val = _get(name) <NEW_LINE> if val: <NEW_LINE> <INDENT> logging.debug('Environmental variable HDLMAKE_%s is set: "%s".', name, val) <NEW_LINE> self[name.lower()] = val <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.warning("Environmental variable HDLMAKE_%s is not set.", name) <NEW_LINE> self[name.lower()] = None <NEW_LINE> return False | The Env (Environment) is a dictionary containing the environmental
variables related with HDLMake for a proper use in the Python code | 6259906491f36d47f2231a29 |
class TrafficEventType(Enum): <NEW_LINE> <INDENT> NORMAL_DRIVING = 0 <NEW_LINE> COLLISION_STATIC = 1 <NEW_LINE> COLLISION_VEHICLE = 2 <NEW_LINE> COLLISION_PEDESTRIAN = 3 <NEW_LINE> ROUTE_DEVIATION = 4 <NEW_LINE> ROUTE_COMPLETION = 5 <NEW_LINE> ROUTE_COMPLETED = 6 <NEW_LINE> TRAFFIC_LIGHT_INFRACTION = 7 <NEW_LINE> WRONG_WAY_INFRACTION = 8 <NEW_LINE> ON_SIDEWALK_INFRACTION = 9 <NEW_LINE> STOP_INFRACTION = 10 | This enum represents different traffic events that occur during driving. | 6259906445492302aabfdc10 |
class Command(object): <NEW_LINE> <INDENT> _default_verbosity = False <NEW_LINE> @classmethod <NEW_LINE> def set_default_verbosity(cls, flag): <NEW_LINE> <INDENT> cls._default_verbosity = flag <NEW_LINE> <DEDENT> def __init__(self, command_string, verbose=None): <NEW_LINE> <INDENT> self._verbose = verbose if verbose is not None else Command._default_verbosity <NEW_LINE> self._command_string = command_string <NEW_LINE> <DEDENT> def __call__(self, ins=None, outs=None): <NEW_LINE> <INDENT> ins = self._format_args(ins) <NEW_LINE> outs = self._format_args(outs) <NEW_LINE> command = self._command_string.format(ins=ins, outs=outs) <NEW_LINE> self._run_command(command) <NEW_LINE> <DEDENT> def _format_args(self, args): <NEW_LINE> <INDENT> if args is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(args, list): <NEW_LINE> <INDENT> return '" "'.join([str(i) for i in args]) <NEW_LINE> <DEDENT> elif not isinstance(args, str): <NEW_LINE> <INDENT> return str(args) <NEW_LINE> <DEDENT> return args <NEW_LINE> <DEDENT> def _run_command(self, command): <NEW_LINE> <INDENT> if self._verbose: <NEW_LINE> <INDENT> print(command) <NEW_LINE> <DEDENT> os.system(command) <NEW_LINE> <DEDENT> def each(self, ins=None, outs=None): <NEW_LINE> <INDENT> for i in range(len(ins)): <NEW_LINE> <INDENT> ins_i = ins[i] <NEW_LINE> outs_i = outs[i] if isinstance(outs, list) else outs <NEW_LINE> self(ins_i, outs_i) | Command represents a reusable shell command.
Examples creating a command:
concat = Command('cat "{ins}" > "{outs}"')
concat2 = Command('cat "{ins}" > "{ins}.2"')
IMPORTANT: Always encluse placeholders in double quotes.
Example calls:
concat('f1', 'f2') # copies f1 to f2
concat(['f1', 'f2'], 'f3') # lists, concats f1 and f2 into f3
concat(pathlib.Path('f1'), 'f2') # you may pass Paths.
concat(o1, o2) # o1 and o2 will be converted to a string via str().
concat(Path('.').glob('*.txt'), 'out.txt') # globbing
concat(outs='f3', ins=['f1', 'f2']) # named parameters
concat2('f1') # copies f1 to f1.2 ; optional parameters
concat2.each(['f1', 'f2]) # copies f1 to f1.2 and f2 to f2.2
concat.each(['f1', 'f2'], ['f3', 'f4']) # copies f1 to f3 and f2 to f4 | 625990644428ac0f6e659c66 |
class MessageEventDefinition(NamedEventDefinition): <NEW_LINE> <INDENT> def __init__(self, name, payload=None, result_var=None): <NEW_LINE> <INDENT> super(MessageEventDefinition, self).__init__(name) <NEW_LINE> self.payload = payload <NEW_LINE> self.result_var = result_var <NEW_LINE> <DEDENT> def catch(self, my_task, event_definition): <NEW_LINE> <INDENT> if event_definition.result_var is None: <NEW_LINE> <INDENT> result_var = f'{my_task.task_spec.name}_Response' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result_var = event_definition.result_var <NEW_LINE> <DEDENT> my_task.internal_data[event_definition.name] = result_var, event_definition.payload <NEW_LINE> super(MessageEventDefinition, self).catch(my_task, event_definition) <NEW_LINE> <DEDENT> def throw(self, my_task): <NEW_LINE> <INDENT> result = my_task.workflow.script_engine.evaluate(my_task, self.payload) <NEW_LINE> event = MessageEventDefinition(self.name, payload=result, result_var=self.result_var) <NEW_LINE> self._throw(event, my_task.workflow, my_task.workflow.outer_workflow) <NEW_LINE> <DEDENT> def reset(self, my_task): <NEW_LINE> <INDENT> my_task.internal_data.pop(self.name, None) <NEW_LINE> super(MessageEventDefinition, self).reset(my_task) <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> retdict = super(MessageEventDefinition, self).serialize() <NEW_LINE> retdict['payload'] = self.payload <NEW_LINE> retdict['result_var'] = self.result_var <NEW_LINE> return retdict | Message Events have both a name and a payload. | 62599064442bda511e95d8f4 |
class HDAWGChannelGrouping(Enum): <NEW_LINE> <INDENT> CHAN_GROUP_4x2 = 0 <NEW_LINE> CHAN_GROUP_2x4 = 1 <NEW_LINE> CHAN_GROUP_1x8 = 2 <NEW_LINE> def group_size(self) -> int: <NEW_LINE> <INDENT> return { HDAWGChannelGrouping.CHAN_GROUP_4x2: 2, HDAWGChannelGrouping.CHAN_GROUP_2x4: 4, HDAWGChannelGrouping.CHAN_GROUP_1x8: 8 }[self] | How many independent sequencers should run on the AWG and how the outputs should be grouped by sequencer. | 62599064f7d966606f749454 |
class Device(cir.Element): <NEW_LINE> <INDENT> category = "Sources" <NEW_LINE> devType = "isin" <NEW_LINE> numTerms = 2 <NEW_LINE> isDCSource = True <NEW_LINE> isTDSource = True <NEW_LINE> isFDSource = True <NEW_LINE> sourceOutput = (0, 1) <NEW_LINE> paramDict = dict( idc = ('DC current', 'A', float, 0.), mag = ('Amplitude', 'A', float, 0.), acmag = ('Amplitude for AC analysis only', 'A', float, None), phase = ('Phase', 'degrees', float, 0.), freq = ('Frequency', 'Hz', float, 1e3) ) <NEW_LINE> def __init__(self, instanceName): <NEW_LINE> <INDENT> cir.Element.__init__(self, instanceName) <NEW_LINE> <DEDENT> def process_params(self): <NEW_LINE> <INDENT> if self.acmag == None: <NEW_LINE> <INDENT> self._acmag = self.mag <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._acmag = self.acmag <NEW_LINE> <DEDENT> self._omega = 2. * np.pi * self.freq <NEW_LINE> self._phase = self.phase * np.pi / 180. <NEW_LINE> <DEDENT> def get_DCsource(self): <NEW_LINE> <INDENT> return self.idc <NEW_LINE> <DEDENT> def get_TDsource(self, time): <NEW_LINE> <INDENT> return self.mag * np.cos(self._omega * time + self._phase) <NEW_LINE> <DEDENT> def get_FDsource(self): <NEW_LINE> <INDENT> fvec = np.array([self.freq]) <NEW_LINE> currentVec = np.array([cm.rect(self.mag, self._phase)]) <NEW_LINE> return (fvec, currentVec) <NEW_LINE> <DEDENT> def get_AC(self): <NEW_LINE> <INDENT> return cm.rect(self._acmag, self._phase) | (Co-)Sinusoidal current source
------------------------------
Connection diagram::
,---, iout
0 o-------( --> )---------o 1
'---'
iout = idc + mag * cos(2 * pi * freq * t + phase)
This source works for time and frequency domain. For AC analysis,
the 'acmag' parameter is provided. By default acmag = mag.
Netlist example::
isin:i1 gnd 4 idc=2mA amp=2mA freq=1GHz phase=90 | 6259906467a9b606de54763d |
class CompressionHeader: <NEW_LINE> <INDENT> LANGUAGES = ['German', 'English', 'Italian', 'French', 'Spanish', 'Dutch', 'Swedish', 'Danish', 'Portuguese', 'Finnish', 'Norwegian', 'Greek', 'Turkish', 'Hungarian', 'Polish', 'Language unspecified'] <NEW_LINE> def __init__(self, bytes): <NEW_LINE> <INDENT> octet = bytes.pop(0) <NEW_LINE> self.compression_language_context = (octet & 0x78) >> 3 <NEW_LINE> self.punctuation_processing = bool(octet & 0x4) <NEW_LINE> self.keyword_processing = bool(octet & 0x2) <NEW_LINE> self.character_group_processing = bool(octet & 0x1) <NEW_LINE> while octet & 0x80: <NEW_LINE> <INDENT> octet = bytes.pop(0) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '; '.join([ 'Language: %s' % self.LANGUAGES[self.compression_language_context], 'Punctuation: %s' % self.punctuation_processing, 'Keywords: %s' % self.keyword_processing, 'Character Groups: %s' % self.character_group_processing, ]) | Parse the compression header from the byte stream, consuming its
bytes as it goes. | 62599064e76e3b2f99fda135 |
class OfficialLoglossObjective: <NEW_LINE> <INDENT> def calc_ders_range(self, preds, targets, weights=None): <NEW_LINE> <INDENT> assert len(preds) == len(targets) <NEW_LINE> if weights is not None: <NEW_LINE> <INDENT> assert len(weights) == len(preds) <NEW_LINE> <DEDENT> result = [] <NEW_LINE> for index in range(len(targets)): <NEW_LINE> <INDENT> e = np.exp(preds[index]) <NEW_LINE> p = e / (1 + e) <NEW_LINE> der1 = targets[index] - p <NEW_LINE> der2 = -p * (1 - p) <NEW_LINE> if weights is not None: <NEW_LINE> <INDENT> der1 *= weights[index] <NEW_LINE> der2 *= weights[index] <NEW_LINE> <DEDENT> result.append((der1, der2)) <NEW_LINE> <DEDENT> return result | An official Catboost example | 62599064498bea3a75a5919b |
class Body(Model): <NEW_LINE> <INDENT> def __init__(self, time_series: List[float]=None): <NEW_LINE> <INDENT> self.swagger_types = { 'time_series': List[float] } <NEW_LINE> self.attribute_map = { 'time_series': 'time_series' } <NEW_LINE> self._time_series = time_series <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'Body': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def time_series(self) -> List[float]: <NEW_LINE> <INDENT> return self._time_series <NEW_LINE> <DEDENT> @time_series.setter <NEW_LINE> def time_series(self, time_series: List[float]): <NEW_LINE> <INDENT> self._time_series = time_series | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599064d486a94d0ba2d6ff |
class CleanDict(Dict): <NEW_LINE> <INDENT> def __init__(self, key_schema, value_schema, max_length=None, **kwargs): <NEW_LINE> <INDENT> super(CleanDict, self).__init__(**kwargs) <NEW_LINE> self.key_schema = key_schema <NEW_LINE> self.value_schema = value_schema <NEW_LINE> self.max_length = max_length <NEW_LINE> <DEDENT> def clean(self, value): <NEW_LINE> <INDENT> value = super(CleanDict, self).clean(value) <NEW_LINE> if self.max_length and len(value) > self.max_length: <NEW_LINE> <INDENT> raise ValidationError('Dict is too long.') <NEW_LINE> <DEDENT> errors = {} <NEW_LINE> data = {} <NEW_LINE> for key, value in value.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cleaned_key = self.key_schema.clean(key) <NEW_LINE> <DEDENT> except ValidationError as e: <NEW_LINE> <INDENT> errors[key] = e.args and e.args[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cleaned_value = self.value_schema.clean(value) <NEW_LINE> <DEDENT> except ValidationError as e: <NEW_LINE> <INDENT> errors[key] = e.args and e.args[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data[cleaned_key] = cleaned_value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if errors: <NEW_LINE> <INDENT> raise ValidationError({'errors': errors}) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def serialize(self, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return { self.key_schema.serialize(key): self.value_schema.serialize(value) for key, value in value.items() } | A dictionary in which both keys and values are validated with separate schema fields. | 6259906499cbb53fe6832619 |
class TestExceptions(cros_test_lib.MockOutputTestCase): <NEW_LINE> <INDENT> def _SystemExit(self): <NEW_LINE> <INDENT> sys.stdout.write(_GREETING) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> def _KeyboardInterrupt(self): <NEW_LINE> <INDENT> sys.stdout.write(_GREETING) <NEW_LINE> raise KeyboardInterrupt() <NEW_LINE> <DEDENT> def _BadPickler(self): <NEW_LINE> <INDENT> return self._BadPickler <NEW_LINE> <DEDENT> def _VerifyExceptionRaised(self, fn, exc_type): <NEW_LINE> <INDENT> for task in (lambda: parallel.RunTasksInProcessPool(fn, [[]]), lambda: parallel.RunParallelSteps([fn])): <NEW_LINE> <INDENT> output_str = ex_str = ex = None <NEW_LINE> with self.OutputCapturer() as capture: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> task() <NEW_LINE> <DEDENT> except parallel.BackgroundFailure as ex: <NEW_LINE> <INDENT> output_str = capture.GetStdout() <NEW_LINE> ex_str = str(ex) <NEW_LINE> <DEDENT> <DEDENT> self.assertTrue(exc_type in [x.type for x in ex.exc_infos]) <NEW_LINE> self.assertEqual(output_str, _GREETING) <NEW_LINE> self.assertTrue(str(exc_type) in ex_str) <NEW_LINE> <DEDENT> <DEDENT> def testExceptionRaising(self): <NEW_LINE> <INDENT> self.StartPatcher(BackgroundTaskVerifier()) <NEW_LINE> self._VerifyExceptionRaised(self._KeyboardInterrupt, KeyboardInterrupt) <NEW_LINE> self._VerifyExceptionRaised(self._SystemExit, SystemExit) <NEW_LINE> <DEDENT> def testFailedPickle(self): <NEW_LINE> <INDENT> with self.assertRaises(cPickle.PicklingError): <NEW_LINE> <INDENT> parallel.RunTasksInProcessPool(self._SystemExit, [self._SystemExit]) <NEW_LINE> <DEDENT> <DEDENT> def testFailedPickleOnReturn(self): <NEW_LINE> <INDENT> with self.assertRaises(parallel.BackgroundFailure): <NEW_LINE> <INDENT> parallel.RunParallelSteps([self._BadPickler], return_values=True) | Test cases where child processes raise exceptions. | 625990642ae34c7f260ac81e |
class ObjFromDict(object): <NEW_LINE> <INDENT> def __init__(self, d): <NEW_LINE> <INDENT> for a, b in d.iteritems(): <NEW_LINE> <INDENT> if isinstance(b, (list, tuple)): <NEW_LINE> <INDENT> setattr(self, a, [ObjFromDict(x) if isinstance(x, dict) else x for x in b]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, a, ObjFromDict(b) if isinstance(b, dict) else b) | Takes a dictionary and returns an object that can use dot notation | 6259906476e4537e8c3f0cb9 |
class AddItemHandler(APIHandler): <NEW_LINE> <INDENT> level = UserLevel.kernel <NEW_LINE> async def process(self, uid, data): <NEW_LINE> <INDENT> uid = int(uid) <NEW_LINE> proset = await get_proset(self.user, uid) <NEW_LINE> if proset is None: <NEW_LINE> <INDENT> return 'Error' <NEW_LINE> <DEDENT> problem_uid = int(data['problem_uid']) <NEW_LINE> problem = await model.problem.get(problem_uid) <NEW_LINE> if problem is None: <NEW_LINE> <INDENT> return 'Error' <NEW_LINE> <DEDENT> proitem = await proset.add(problem, True) <NEW_LINE> if proitem is None: <NEW_LINE> <INDENT> return 'Error' <NEW_LINE> <DEDENT> await model.scoring.change_problem(problem.uid) <NEW_LINE> return proitem.uid | Add problem item handler. | 6259906455399d3f05627c56 |
class PaymentDetails(Model): <NEW_LINE> <INDENT> _attribute_map = { 'total': {'key': 'total', 'type': 'PaymentItem'}, 'display_items': {'key': 'displayItems', 'type': '[PaymentItem]'}, 'shipping_options': {'key': 'shippingOptions', 'type': '[PaymentShippingOption]'}, 'modifiers': {'key': 'modifiers', 'type': '[PaymentDetailsModifier]'}, 'error': {'key': 'error', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(PaymentDetails, self).__init__(**kwargs) <NEW_LINE> self.total = kwargs.get('total', None) <NEW_LINE> self.display_items = kwargs.get('display_items', None) <NEW_LINE> self.shipping_options = kwargs.get('shipping_options', None) <NEW_LINE> self.modifiers = kwargs.get('modifiers', None) <NEW_LINE> self.error = kwargs.get('error', None) | Provides information about the requested transaction.
:param total: Contains the total amount of the payment request
:type total: ~botframework.connector.models.PaymentItem
:param display_items: Contains line items for the payment request that the
user agent may display
:type display_items: list[~botframework.connector.models.PaymentItem]
:param shipping_options: A sequence containing the different shipping
options for the user to choose from
:type shipping_options:
list[~botframework.connector.models.PaymentShippingOption]
:param modifiers: Contains modifiers for particular payment method
identifiers
:type modifiers:
list[~botframework.connector.models.PaymentDetailsModifier]
:param error: Error description
:type error: str | 625990640c0af96317c578fa |
class draw_thing(): <NEW_LINE> <INDENT> def __init__(self, drawings, name, padding_x=0, padding_y=0): <NEW_LINE> <INDENT> self.drawings = drawings <NEW_LINE> self.padding_x = padding_x <NEW_LINE> self.padding_y = padding_y <NEW_LINE> noun_keyword_offset = { 'fish': 200, 'bird': -200, } <NEW_LINE> if name in noun_keyword_offset: <NEW_LINE> <INDENT> self.padding_y = self.padding_y + noun_keyword_offset[name] <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.lines = [] <NEW_LINE> <DEDENT> def animate(self): <NEW_LINE> <INDENT> window.update() <NEW_LINE> for i in range(0, self.drawings.drawing_count): <NEW_LINE> <INDENT> time.sleep(0.05) <NEW_LINE> self.draw(drawing=self.drawings.get_drawing(index=i)) <NEW_LINE> if i < (self.drawings.drawing_count - 1): <NEW_LINE> <INDENT> self.erase() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def draw(self, drawing=0): <NEW_LINE> <INDENT> self.lines = [] <NEW_LINE> for stroke in drawing.strokes: <NEW_LINE> <INDENT> x_last = 0 <NEW_LINE> y_last = 0 <NEW_LINE> index = 0 <NEW_LINE> for x, y in stroke: <NEW_LINE> <INDENT> x = x + self.padding_x <NEW_LINE> y = y + self.padding_y <NEW_LINE> if index > 0: <NEW_LINE> <INDENT> self.lines.append(canvas.create_line(x_last, y_last, x, y, width=5, cap=tkinter.ROUND, join=tkinter.ROUND)) <NEW_LINE> <DEDENT> x_last = x <NEW_LINE> y_last = y <NEW_LINE> index = index + 1 <NEW_LINE> window.update() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def erase(self): <NEW_LINE> <INDENT> for line_id in self.lines: <NEW_LINE> <INDENT> canvas.after(50, canvas.delete, line_id) <NEW_LINE> window.update() | draws and animates objects
also stores the canvas objects to be deleted later | 6259906432920d7e50bc777d |
class LockError(LockingError): <NEW_LINE> <INDENT> pass | Raised when a lock cannot be created due to a conflicting lock.
Instances of this class have a ``lock`` attribute which is a
:class:`substanced.locking.Lock` object, representing the conflicting
lock. | 625990647047854f46340aea |
class Command(BaseCommand): <NEW_LINE> <INDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> self.stdout.write('waiting database...') <NEW_LINE> db_conn = None <NEW_LINE> while not db_conn: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> db_conn = connections['default'] <NEW_LINE> <DEDENT> except OperationalError: <NEW_LINE> <INDENT> self.stdout.write("Database unavailable, waiting 1 second...") <NEW_LINE> time.sleep(1) <NEW_LINE> <DEDENT> <DEDENT> self.stdout.write(self.style.SUCCESS('Database available')) | Django command to pause execution until database is available | 62599064a17c0f6771d5d741 |
class Query(object): <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> <DEDENT> def get_by_model(self , name): <NEW_LINE> <INDENT> self.model_query = self.model.get(self.model.model == str(name)) <NEW_LINE> return self.model_query | docstring for Query | 625990643eb6a72ae038bd96 |
class SiteTags(Table): <NEW_LINE> <INDENT> def __init__(self, api, site_tag_filter = None, columns = None): <NEW_LINE> <INDENT> Table.__init__(self, api, SiteTag, columns) <NEW_LINE> sql = "SELECT %s FROM view_site_tags WHERE True" % ", ".join(self.columns) <NEW_LINE> if site_tag_filter is not None: <NEW_LINE> <INDENT> if isinstance(site_tag_filter, (list, tuple, set, int)): <NEW_LINE> <INDENT> site_tag_filter = Filter(SiteTag.fields, {'site_tag_id': site_tag_filter}) <NEW_LINE> <DEDENT> elif isinstance(site_tag_filter, dict): <NEW_LINE> <INDENT> site_tag_filter = Filter(SiteTag.fields, site_tag_filter) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PLCInvalidArgument("Wrong site setting filter %r"%site_tag_filter) <NEW_LINE> <DEDENT> sql += " AND (%s) %s" % site_tag_filter.sql(api) <NEW_LINE> <DEDENT> self.selectall(sql) | Representation of row(s) from the site_tag table in the
database. | 625990651f037a2d8b9e5406 |
class ClientsSettings(FeatureSettings): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ClientsSettings, self).__init__(*args, **kwargs) <NEW_LINE> self.image_dir = None <NEW_LINE> self.provisioning_server = None <NEW_LINE> <DEDENT> def read(self, reader): <NEW_LINE> <INDENT> self.image_dir = reader.get( 'clients', 'image_dir', '/opt/robottelo/images') <NEW_LINE> self.provisioning_server = reader.get( 'clients', 'provisioning_server') <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> validation_errors = [] <NEW_LINE> if self.provisioning_server is None: <NEW_LINE> <INDENT> validation_errors.append( '[clients] provisioning_server option must be provided.') <NEW_LINE> <DEDENT> return validation_errors | Clients settings definitions. | 62599064cc0a2c111447c66b |
class GroupSchemaCustom( OktaObject ): <NEW_LINE> <INDENT> def __init__(self, config=None): <NEW_LINE> <INDENT> super().__init__(config) <NEW_LINE> if config: <NEW_LINE> <INDENT> self.id = config["id"] if "id" in config else None <NEW_LINE> self.properties = config["properties"] if "properties" in config else None <NEW_LINE> self.required = OktaCollection.form_list( config["required"] if "required" in config else [], str ) <NEW_LINE> self.type = config["type"] if "type" in config else None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.id = None <NEW_LINE> self.properties = None <NEW_LINE> self.required = [] <NEW_LINE> self.type = None <NEW_LINE> <DEDENT> <DEDENT> def request_format(self): <NEW_LINE> <INDENT> parent_req_format = super().request_format() <NEW_LINE> current_obj_format = { "id": self.id, "properties": self.properties, "required": self.required, "type": self.type } <NEW_LINE> parent_req_format.update(current_obj_format) <NEW_LINE> return parent_req_format | A class for GroupSchemaCustom objects. | 6259906401c39578d7f142d0 |
class NullDev(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> print("(%s) invoking get method, inst = %r, owner = %r" % (self.name, instance, owner)) <NEW_LINE> return None <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> print("(%s) involing set method with value %r" % (self.name, value)) <NEW_LINE> instance.lst_data = value | data descriptor that will get/set None | 625990654f88993c371f10ba |
class SchedulerTestBase(object): <NEW_LINE> <INDENT> _native = init_native() <NEW_LINE> def _create_work_dir(self): <NEW_LINE> <INDENT> work_dir = safe_mkdtemp() <NEW_LINE> self.addCleanup(safe_rmtree, work_dir) <NEW_LINE> return work_dir <NEW_LINE> <DEDENT> def mk_fs_tree(self, build_root_src=None, ignore_patterns=None, work_dir=None): <NEW_LINE> <INDENT> work_dir = work_dir or self._create_work_dir() <NEW_LINE> build_root = os.path.join(work_dir, 'build_root') <NEW_LINE> if build_root_src is not None: <NEW_LINE> <INDENT> shutil.copytree(build_root_src, build_root, symlinks=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.makedirs(build_root) <NEW_LINE> <DEDENT> return FileSystemProjectTree(build_root, ignore_patterns=ignore_patterns) <NEW_LINE> <DEDENT> def mk_scheduler(self, rules=None, project_tree=None, work_dir=None, include_trace_on_error=True): <NEW_LINE> <INDENT> rules = rules or [] <NEW_LINE> goals = {} <NEW_LINE> work_dir = work_dir or self._create_work_dir() <NEW_LINE> project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir) <NEW_LINE> return LocalScheduler(work_dir, goals, rules, project_tree, self._native, include_trace_on_error=include_trace_on_error) <NEW_LINE> <DEDENT> def context_with_scheduler(self, scheduler, *args, **kwargs): <NEW_LINE> <INDENT> return self.context(*args, scheduler=scheduler, **kwargs) <NEW_LINE> <DEDENT> def execute(self, scheduler, product, *subjects): <NEW_LINE> <INDENT> request = scheduler.execution_request([product], subjects) <NEW_LINE> result = scheduler.execute(request) <NEW_LINE> if result.error: <NEW_LINE> <INDENT> raise result.error <NEW_LINE> <DEDENT> states = [state for _, state in result.root_products] <NEW_LINE> if any(type(state) is not Return for state in states): <NEW_LINE> <INDENT> with temporary_file_path(cleanup=False, suffix='.dot') as dot_file: <NEW_LINE> <INDENT> scheduler.visualize_graph_to_file(request, dot_file) <NEW_LINE> raise ValueError('At least one request failed: {}. Visualized as {}'.format(states, dot_file)) <NEW_LINE> <DEDENT> <DEDENT> return list(state.value for state in states) <NEW_LINE> <DEDENT> def execute_expecting_one_result(self, scheduler, product, subject): <NEW_LINE> <INDENT> request = scheduler.execution_request([product], [subject]) <NEW_LINE> result = scheduler.execute(request) <NEW_LINE> if result.error: <NEW_LINE> <INDENT> raise result.error <NEW_LINE> <DEDENT> states = [state for _, state in result.root_products] <NEW_LINE> self.assertEqual(len(states), 1) <NEW_LINE> return states[0] <NEW_LINE> <DEDENT> def execute_raising_throw(self, scheduler, product, subject): <NEW_LINE> <INDENT> resulting_value = self.execute_expecting_one_result(scheduler, product, subject) <NEW_LINE> self.assertTrue(type(resulting_value) is Throw) <NEW_LINE> raise resulting_value.exc | A mixin for classes (tests, presumably) which need to create temporary schedulers.
TODO: In the medium term, this should be part of pants_test.base_test.BaseTest. | 625990651b99ca40022900d1 |
class VisualizeAll(Command): <NEW_LINE> <INDENT> def do(self): <NEW_LINE> <INDENT> browser = utils.shell_split(self.model.history_browser()) <NEW_LINE> utils.fork(browser + ['--all']) | Visualize all branches. | 625990653539df3088ecd9d5 |
class Strategy: <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def fit(self, data: List, labels: List) -> None: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def predict(self, data: List) -> List: <NEW_LINE> <INDENT> raise NotImplementedError | Base class for all classification strategies. | 62599065d6c5a102081e385d |
class AddProjectTreeNodeCommand(BaseProjectTreeUndoCommand): <NEW_LINE> <INDENT> undo_id = gen_undo_id() <NEW_LINE> def __init__(self, project_tree_controller, node, parent_node, parent=None): <NEW_LINE> <INDENT> super().__init__(project_tree_controller, parent) <NEW_LINE> if node.parent is not None: <NEW_LINE> <INDENT> raise ValueError( QCoreApplication.translate('AddProjectTreeNodeCommand', 'Cannot add a node that already exists in the project tree.')) <NEW_LINE> <DEDENT> self._new_node = node <NEW_LINE> self._parent_node = parent_node <NEW_LINE> self.setText(QCoreApplication.translate('AddProjectTreeNodeCommand', "Create '{}'").format(node.name)) <NEW_LINE> <DEDENT> def redo(self): <NEW_LINE> <INDENT> self.project_tree_controller.add_node(self._new_node, self._parent_node) <NEW_LINE> <DEDENT> def undo(self): <NEW_LINE> <INDENT> self.project_tree_controller.delete_node(self._new_node) | QUndoCommand class for adding a new node to the project tree. | 62599065796e427e5384feae |
class QuotaErrorMsg(basestring): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_api_name(): <NEW_LINE> <INDENT> return "quota-error-msg" | Information about the single quota error. | 625990654f6381625f19a040 |
class ServerGroupsManager(base.ManagerWithFind): <NEW_LINE> <INDENT> resource_class = ServerGroup <NEW_LINE> def list(self, all_projects=False): <NEW_LINE> <INDENT> all = '?all_projects' if all_projects else '' <NEW_LINE> return self._list('/os-server-groups%s' % all, 'server_groups') <NEW_LINE> <DEDENT> def get(self, id): <NEW_LINE> <INDENT> return self._get('/os-server-groups/%s' % id, 'server_group') <NEW_LINE> <DEDENT> def delete(self, id): <NEW_LINE> <INDENT> self._delete('/os-server-groups/%s' % id) <NEW_LINE> <DEDENT> def create(self, **kwargs): <NEW_LINE> <INDENT> body = {'server_group': kwargs} <NEW_LINE> return self._create('/os-server-groups', body, 'server_group') | Manage :class:`ServerGroup` resources. | 625990657b25080760ed887d |
class ShowPtpParentSchema(MetaParser): <NEW_LINE> <INDENT> schema = { 'ptp_parent_property': { 'parent_clock': { 'identity': str, 'port_number': int, 'observed_parent_offset': int, 'phase_change_rate': str, }, 'grandmaster_clock': { 'identity': str, 'gd_class': int, 'accuracy': str, 'offset': str, 'priority1': int, 'priority2': int } }, } | Schema for 'show ptp parent'
| 6259906556b00c62f0fb4004 |
class AppleMusic(APIBase): <NEW_LINE> <INDENT> def __init__(self, proxies=None): <NEW_LINE> <INDENT> super().__init__(root='https://api.music.apple.com/v1/', proxies=proxies) <NEW_LINE> <DEDENT> def generate_token(self, secret_key, key_id, team_id, session_length=12): <NEW_LINE> <INDENT> alg = 'ES256' <NEW_LINE> headers = { 'alg': alg, 'kid': key_id } <NEW_LINE> payload = { 'iss': team_id, 'iat': int(datetime.now().timestamp()), 'exp': int((datetime.now() + timedelta(hours=session_length)).timestamp()) } <NEW_LINE> token = jwt.encode(payload, secret_key, algorithm=alg, headers=headers) <NEW_LINE> self.token_str = token.decode() <NEW_LINE> <DEDENT> def charts(self, storefront='us', chart=None, types=None, l=None, genre=None, limit=None, offset=None): <NEW_LINE> <INDENT> url = self.root + 'catalog/{}/charts'.format(storefront) <NEW_LINE> if types: <NEW_LINE> <INDENT> type_str = ','.join(types) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> type_str = None <NEW_LINE> <DEDENT> return self._get(url, types=type_str, chart=chart, l=l, genre=genre, limit=limit, offset=offset) | This class inherits from API Base. This is an authenticated API that needs a token. | 625990652ae34c7f260ac81f |
class AddProviderForm(forms.Form): <NEW_LINE> <INDENT> name = forms.CharField(label='Provider Name', required=False) <NEW_LINE> email = forms.EmailField(label='Provider Email', required=False) <NEW_LINE> phone_no = forms.CharField(label='Provider Phone', required=False) <NEW_LINE> language = forms.CharField(label='Provider Language', required=False) <NEW_LINE> currency= forms.CharField(label='Provider Currency', required=False) <NEW_LINE> def save(self, force_insert=False, force_update=False, commit=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj = Provider.objects.get(name=self.data['name'], email=self.data['email'], phone_no=self.data['phone_no'], language=self.data['language'], currency=self.data['currency']) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> obj = Provider(name=self.data['name'], email=self.data['email'], phone_no=self.data['phone_no'], language=self.data['language'], currency=self.data['currency']) <NEW_LINE> <DEDENT> obj.save() | Form to save/edit a Service area. | 625990654428ac0f6e659c6a |
class GainVals16(GetSpotValue, ArraySpotValue): <NEW_LINE> <INDENT> _nParam = SpotCamConstant.GAINVALS16 <NEW_LINE> _ctype = ctypes.c_short*257 <NEW_LINE> _dtype = np.int16 | The allowable gain values for 10-16 bit per channel still image
capture. | 6259906516aa5153ce401c15 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.