code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class CustomTopo(Topo): <NEW_LINE> <INDENT> def __init__(self, **opts): <NEW_LINE> <INDENT> Topo.__init__(self, **opts) <NEW_LINE> s = [] <NEW_LINE> s.append(self.addSwitch('s1')) <NEW_LINE> s.append(self.addSwitch('s2')) <NEW_LINE> s.append(self.addSwitch('s3')) <NEW_LINE> s.append(self.addSwitch('s4')) <NEW_LINE> s.append(self.addSwitch('s5')) <NEW_LINE> s.append(self.addSwitch('s6')) <NEW_LINE> self.addLink(s[0], s[1], bw=10) <NEW_LINE> self.addLink(s[1], s[2], bw=10) <NEW_LINE> self.addLink(s[2], s[0], bw=10) <NEW_LINE> self.addLink(s[0], s[3], bw=5) <NEW_LINE> self.addLink(s[1], s[4], bw=5) <NEW_LINE> self.addLink(s[2], s[5], bw=5) <NEW_LINE> h1 = self.addHost('h1', mac='00:00:00:00:00:01') <NEW_LINE> h2 = self.addHost('h2', mac='00:00:00:00:00:02') <NEW_LINE> h3 = self.addHost('h3', mac='00:00:00:00:00:03') <NEW_LINE> h4 = self.addHost('h4', mac='00:00:00:00:00:04') <NEW_LINE> h5 = self.addHost('h5', mac='00:00:00:00:00:05') <NEW_LINE> h6 = self.addHost('h6', mac='00:00:00:00:00:06') <NEW_LINE> self.addLink(s[3], h1, bw=1) <NEW_LINE> self.addLink(s[3], h2, bw=1) <NEW_LINE> self.addLink(s[4], h3, bw=1) <NEW_LINE> self.addLink(s[4], h4, bw=1) <NEW_LINE> self.addLink(s[5], h5, bw=1) <NEW_LINE> self.addLink(s[5], h6, bw=1) | Simple Data Center Topology | 62599042d99f1b3c44d0696d |
class ConfigError(JukeboxException): <NEW_LINE> <INDENT> pass | Jukebox Exception that is raised when some error occurs concerning Config files. | 625990426fece00bbacccc81 |
class CloudBioLinux(Edition): <NEW_LINE> <INDENT> def __init__(self, env): <NEW_LINE> <INDENT> Edition.__init__(self,env) <NEW_LINE> self.name = "CloudBioLinux Edition" <NEW_LINE> self.short_name = "cloudbiolinux" <NEW_LINE> <DEDENT> def post_install(self, pkg_install=[]): <NEW_LINE> <INDENT> _freenx_scripts(self.env) <NEW_LINE> if 'cloudman' in pkg_install: <NEW_LINE> <INDENT> _configure_cloudman(self.env) | Specific customizations for CloudBioLinux builds.
| 6259904276d4e153a661dbdc |
class ClearSessionsCommandOptions(management.CommandOptions): <NEW_LINE> <INDENT> args = base.args <NEW_LINE> help = base.help <NEW_LINE> option_list = base.option_list[ len(management.BaseCommandOptions.option_list):] <NEW_LINE> option_groups = ( ("[clearsessions options]", "These options will be passed to clearsessions.", option_list, ),) if option_list else () <NEW_LINE> actions = ("clearsessions",) <NEW_LINE> def handle_clearsessions(self, *args, **options): <NEW_LINE> <INDENT> return self.call_command("clearsessions", *args, **options) | ClearSessions command options. | 6259904221bff66bcd723f3a |
class FormChecker(BaseChecker): <NEW_LINE> <INDENT> __implements__ = IAstroidChecker <NEW_LINE> name = "django-form-checker" <NEW_LINE> msgs = { f"W{BASE_ID}04": ( "Use explicit fields instead of exclude in ModelForm", "modelform-uses-exclude", "Prevents accidentally allowing users to set fields, especially when adding new fields to a Model", ) } <NEW_LINE> @check_messages("modelform-uses-exclude") <NEW_LINE> def visit_classdef(self, node): <NEW_LINE> <INDENT> if not node_is_subclass(node, "django.forms.models.ModelForm", ".ModelForm"): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> meta = _get_child_meta(node) <NEW_LINE> if not meta: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for child in meta.get_children(): <NEW_LINE> <INDENT> if not isinstance(child, Assign) or not isinstance(child.targets[0], AssignName): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if child.targets[0].name == "exclude": <NEW_LINE> <INDENT> self.add_message(f"W{BASE_ID}04", node=child) <NEW_LINE> break | Django model checker. | 6259904294891a1f408ba05e |
class ThreadHandlerHeader(object): <NEW_LINE> <INDENT> def __init__(self, key, trigger_frequency, process_entry): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.trigger_frequency = trigger_frequency <NEW_LINE> self.process_entry = process_entry | ThreadHandlerHeader is a data structure representing key Thread Handler features.
It is passed to the Timer instance and later on - to the Scheduler's running function as an argument | 6259904230c21e258be99ad6 |
class GetJSON(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_json(cls, entity): <NEW_LINE> <INDENT> return cls().dump(entity).data | docstring for GetJSON | 6259904266673b3332c316c9 |
class HomeViewSetRetrieveTests(HomeViewSetBaseTests): <NEW_LINE> <INDENT> def test_retrieve_home_obj(self): <NEW_LINE> <INDENT> home_obj = self.create_home_obj() <NEW_LINE> response = self.retrieve_obj( url=self.detail_url, data=self.data, pk=home_obj.pk, ) <NEW_LINE> self.assertEqual( response.status_code, status.HTTP_200_OK, "#THV-R01: Retrieve one home object failed" ) <NEW_LINE> self.assertEqual( response.json(), self.get_home_obj_url(self.data), "#THV-R01: Retrieve data is not same as the posted data" ) <NEW_LINE> <DEDENT> def test_retrieve_multiple_home_obj(self): <NEW_LINE> <INDENT> home_objs = HomeFactory.create_batch(10) <NEW_LINE> response = self.retrieve_obj( url=self.list_url ) <NEW_LINE> self.assertEqual( response.status_code, status.HTTP_200_OK, "#THV-R02: Retrieve home objects failed" ) <NEW_LINE> json_data = response.json() <NEW_LINE> self.assertEqual( json_data['count'], 10, "#THV-R02: Total count of home object is not correct" ) | Test Case Code Format: #THV-R00
Test cases for retrieving an existing home object | 6259904224f1403a92686234 |
class SlicingAdapter(Adapter): <NEW_LINE> <INDENT> __slots__ = ["start", "stop", "step"] <NEW_LINE> def __init__(self, subcon, start, stop = None): <NEW_LINE> <INDENT> Adapter.__init__(self, subcon) <NEW_LINE> self.start = start <NEW_LINE> self.stop = stop <NEW_LINE> <DEDENT> def _encode(self, obj, context): <NEW_LINE> <INDENT> if self.start is None: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> return [None] * self.start + obj <NEW_LINE> <DEDENT> def _decode(self, obj, context): <NEW_LINE> <INDENT> return obj[self.start:self.stop] | Adapter for slicing a list (getting a slice from that list)
:param subcon: the subcon to slice
:param start: start index
:param stop: stop index (or None for up-to-end)
:param step: step (or None for every element) | 625990420fa83653e46f61aa |
class PluginInstanceDetail(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> http_method_names = ['get', 'put', 'delete'] <NEW_LINE> serializer_class = PluginInstanceSerializer <NEW_LINE> queryset = PluginInstance.objects.all() <NEW_LINE> permission_classes = (permissions.IsAuthenticated, IsOwnerOrChrisOrReadOnly,) <NEW_LINE> def retrieve(self, request, *args, **kwargs): <NEW_LINE> <INDENT> response = super(PluginInstanceDetail, self).retrieve(request, *args, **kwargs) <NEW_LINE> template_data = {'title': '', 'status': ''} <NEW_LINE> return services.append_collection_template(response, template_data) <NEW_LINE> <DEDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> data = self.request.data <NEW_LINE> data.pop('gpu_limit', None) <NEW_LINE> data.pop('number_of_workers', None) <NEW_LINE> data.pop('cpu_limit', None) <NEW_LINE> data.pop('memory_limit', None) <NEW_LINE> return super(PluginInstanceDetail, self).update(request, *args, **kwargs) <NEW_LINE> <DEDENT> def perform_update(self, serializer): <NEW_LINE> <INDENT> if 'status' in self.request.data: <NEW_LINE> <INDENT> instance = self.get_object() <NEW_LINE> if instance.status != 'cancelled': <NEW_LINE> <INDENT> descendants = instance.get_descendant_instances() <NEW_LINE> if instance.status == 'started': <NEW_LINE> <INDENT> cancel_plugin_instance.delay(instance.id) <NEW_LINE> <DEDENT> for plg_inst in descendants: <NEW_LINE> <INDENT> plg_inst.status = 'cancelled' <NEW_LINE> plg_inst.save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> super(PluginInstanceDetail, self).perform_update(serializer) <NEW_LINE> <DEDENT> def destroy(self, request, *args, **kwargs): <NEW_LINE> <INDENT> instance = self.get_object() <NEW_LINE> descendants = instance.get_descendant_instances() <NEW_LINE> if instance.status == 'started': <NEW_LINE> <INDENT> cancel_plugin_instance(instance.id) <NEW_LINE> <DEDENT> for plg_inst in descendants: <NEW_LINE> <INDENT> if plg_inst.status not in ('finishedSuccessfully', 'finishedWithError', 'cancelled'): <NEW_LINE> <INDENT> plg_inst.status = 'cancelled' <NEW_LINE> plg_inst.save() <NEW_LINE> <DEDENT> <DEDENT> return super(PluginInstanceDetail, self).destroy(request, *args, **kwargs) | A plugin instance view. | 6259904207f4c71912bb0702 |
class TestTrapRockpage(TestwithMocking): <NEW_LINE> <INDENT> def test_TrapRockPage_read(self): <NEW_LINE> <INDENT> trap_rock_page = TrapRockPage(mocked=True) <NEW_LINE> assert trap_rock_page is not None <NEW_LINE> status = trap_rock_page.fetch_taplist(brewery='Trap Rock') <NEW_LINE> assert not status <NEW_LINE> <DEDENT> def test_TrapRockPage_ssml(self): <NEW_LINE> <INDENT> trap_rock_page = TrapRockPage(mocked=True) <NEW_LINE> assert trap_rock_page is not None <NEW_LINE> status = trap_rock_page.fetch_taplist(brewery='Trap Rock') <NEW_LINE> assert not status <NEW_LINE> ssml = trap_rock_page.ssml_taplist() <NEW_LINE> assert ssml <NEW_LINE> file_name = data_dir() + trap_rock_page._brewery_name.replace(' ', '') + '.SSML' <NEW_LINE> file_pointer = open(file_name, mode='r', encoding='utf8') <NEW_LINE> tst_data = file_pointer.read() <NEW_LINE> file_pointer.close() <NEW_LINE> assert tst_data == ssml <NEW_LINE> <DEDENT> def test_TrapRockPage_cached(self): <NEW_LINE> <INDENT> traprock_page = TrapRockPage(mocked=True) <NEW_LINE> from_cache = traprock_page.fetch_taplist(brewery="Trap Rock") <NEW_LINE> assert not from_cache <NEW_LINE> traprock_page.ssml_taplist() <NEW_LINE> from_cache = traprock_page.fetch_taplist(brewery="Trap Rock") <NEW_LINE> assert from_cache | test for the departed soles web scraping page | 6259904226068e7796d4dc16 |
class OrganisationUserListSerializer(CustomUserListSerializer): <NEW_LINE> <INDENT> organisation_name = serializers.SerializerMethodField() <NEW_LINE> size = serializers.SerializerMethodField() <NEW_LINE> description = serializers.SerializerMethodField() <NEW_LINE> sports = serializers.SerializerMethodField() <NEW_LINE> phone_number = serializers.CharField() <NEW_LINE> team_ownerships = serializers.SerializerMethodField() <NEW_LINE> def get_team_ownerships(self, obj): <NEW_LINE> <INDENT> request = self.context.get('request') <NEW_LINE> teams = [] <NEW_LINE> if obj.user_type == USER_TYPE_ORG and request: <NEW_LINE> <INDENT> teams = obj.team_ownership.all() <NEW_LINE> <DEDENT> return TeamMembershipOwnershipListSerializer(teams, many=True, context={"request": request}).data <NEW_LINE> <DEDENT> def get_organisation_name(self, obj): <NEW_LINE> <INDENT> return obj.organisation.name <NEW_LINE> <DEDENT> def get_size(self, obj): <NEW_LINE> <INDENT> return obj.organisation.size <NEW_LINE> <DEDENT> def get_description(self, obj): <NEW_LINE> <INDENT> return obj.organisation.description <NEW_LINE> <DEDENT> def get_sports(self, obj): <NEW_LINE> <INDENT> return obj.organisation.sports <NEW_LINE> <DEDENT> class Meta(CustomUserListSerializer.Meta): <NEW_LINE> <INDENT> fields = CustomUserListSerializer.Meta.fields + ('size', 'description', 'sports', 'phone_number', 'organisation_name', 'team_ownerships') | Serializer to list organisation profile. | 62599042e76e3b2f99fd9cdb |
class IterMultipleComponents(object): <NEW_LINE> <INDENT> def __init__(self, stream, key=None, number_components=None): <NEW_LINE> <INDENT> substreams = collections.defaultdict(stream.__class__) <NEW_LINE> for tr in stream: <NEW_LINE> <INDENT> k = (tr.id[:-1], str(tr.stats[key]) if key is not None else None) <NEW_LINE> substreams[k].append(tr) <NEW_LINE> <DEDENT> n = number_components <NEW_LINE> self.substreams = [s for _, s in sorted(substreams.items()) if n is None or len(s) == n or (not isinstance(n, int) and len(s) in n)] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.substreams) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for s in self.substreams: <NEW_LINE> <INDENT> yield s | Return iterable to iterate over associated components of a stream.
:param stream: Stream with different, possibly many traces. It is
split into substreams with the same seed id (only last character
i.e. component may vary)
:type key: str or None
:param key: Additionally, the stream is grouped by the values of
the given stats entry to differentiate between e.g. different events
(for example key='starttime', key='onset')
:type number_components: int, tuple of ints or None
:param number_components: Only iterate through substreams with
matching number of components. | 6259904229b78933be26aa2b |
class TestSuite: <NEW_LINE> <INDENT> def test_missing_reputation(self): <NEW_LINE> <INDENT> assert is_healthy() <NEW_LINE> delete_reputation(get_ip()) <NEW_LINE> r = simple_request() <NEW_LINE> assert r.status_code == 200 <NEW_LINE> assert r.text == "the backend!\n" <NEW_LINE> <DEDENT> def test_good_reputation(self): <NEW_LINE> <INDENT> time.sleep(7) <NEW_LINE> assert is_healthy() <NEW_LINE> update_reputation(90, get_ip()) <NEW_LINE> r = simple_request() <NEW_LINE> assert r.status_code == 200 <NEW_LINE> assert r.text == "the backend!\n" <NEW_LINE> <DEDENT> def test_bad_reputation(self): <NEW_LINE> <INDENT> time.sleep(7) <NEW_LINE> assert is_healthy() <NEW_LINE> update_reputation(0, get_ip()) <NEW_LINE> r = simple_request() <NEW_LINE> assert r.status_code == 429 | Basic Test Suite to be run on blocking mode iprepd-nginx | 62599042e64d504609df9d39 |
class TestRingBinarySensorSetup(unittest.TestCase): <NEW_LINE> <INDENT> DEVICES = [] <NEW_LINE> def add_entities(self, devices, action): <NEW_LINE> <INDENT> for device in devices: <NEW_LINE> <INDENT> self.DEVICES.append(device) <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> if os.path.isfile(self.cache): <NEW_LINE> <INDENT> os.remove(self.cache) <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.hass = get_test_home_assistant() <NEW_LINE> self.cache = get_test_config_dir(base_ring.DEFAULT_CACHEDB) <NEW_LINE> self.config = { 'username': 'foo', 'password': 'bar', 'monitored_conditions': ['ding', 'motion'], } <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.hass.stop() <NEW_LINE> self.cleanup() <NEW_LINE> <DEDENT> @requests_mock.Mocker() <NEW_LINE> def test_binary_sensor(self, mock): <NEW_LINE> <INDENT> mock.post('https://oauth.ring.com/oauth/token', text=load_fixture('ring_oauth.json')) <NEW_LINE> mock.post('https://api.ring.com/clients_api/session', text=load_fixture('ring_session.json')) <NEW_LINE> mock.get('https://api.ring.com/clients_api/ring_devices', text=load_fixture('ring_devices.json')) <NEW_LINE> mock.get('https://api.ring.com/clients_api/dings/active', text=load_fixture('ring_ding_active.json')) <NEW_LINE> mock.get('https://api.ring.com/clients_api/doorbots/987652/health', text=load_fixture('ring_doorboot_health_attrs.json')) <NEW_LINE> base_ring.setup(self.hass, VALID_CONFIG) <NEW_LINE> ring.setup_platform(self.hass, self.config, self.add_entities, None) <NEW_LINE> for device in self.DEVICES: <NEW_LINE> <INDENT> device.update() <NEW_LINE> if device.name == 'Front Door Ding': <NEW_LINE> <INDENT> self.assertEqual('on', device.state) <NEW_LINE> self.assertEqual('America/New_York', device.device_state_attributes['timezone']) <NEW_LINE> <DEDENT> elif device.name == 'Front Door Motion': <NEW_LINE> <INDENT> self.assertEqual('off', device.state) <NEW_LINE> self.assertEqual('motion', device.device_class) <NEW_LINE> <DEDENT> self.assertIsNone(device.entity_picture) <NEW_LINE> self.assertEqual(ATTRIBUTION, device.device_state_attributes['attribution']) | Test the Ring Binary Sensor platform. | 62599042d4950a0f3b1117a9 |
class NodeClientFactory(pb.PBClientFactory): <NEW_LINE> <INDENT> node = None <NEW_LINE> def __init__(self, node, manager): <NEW_LINE> <INDENT> self.node = node <NEW_LINE> self.connection_manager = manager <NEW_LINE> pb.PBClientFactory.__init__(self) <NEW_LINE> <DEDENT> def clientConnectionLost(self, connector, reason): <NEW_LINE> <INDENT> with self.connection_manager._lock: <NEW_LINE> <INDENT> node = self.node <NEW_LINE> node.ref = None <NEW_LINE> for i in range(node.cores): <NEW_LINE> <INDENT> w_key = '%s:%s:%i' % (node.host, node.port, i) <NEW_LINE> del self.connection_manager.workers[w_key] <NEW_LINE> self.connection_manager.emit('WORKER_DISCONNECTED', w_key) <NEW_LINE> <DEDENT> <DEDENT> self.connection_manager.reconnect_nodes(True); <NEW_LINE> pb.PBClientFactory.clientConnectionLost(self, connector, reason) | Subclassing of PBClientFactory to add auto-reconnect via Master's reconnection code.
This factory is specific to the master acting as a client of a Node. | 62599042c432627299fa426a |
class FunctionalTests_AcmeDnsServerAccount(AppTest): <NEW_LINE> <INDENT> def _get_one(self): <NEW_LINE> <INDENT> focus_item = ( self.ctx.dbSession.query(model_objects.AcmeDnsServerAccount) .order_by(model_objects.AcmeDnsServerAccount.id.asc()) .first() ) <NEW_LINE> assert focus_item is not None <NEW_LINE> return focus_item, focus_item.id <NEW_LINE> <DEDENT> @routes_tested( ("admin:acme_dns_server_accounts", "admin:acme_dns_server_accounts_paginated") ) <NEW_LINE> def test_list_html(self): <NEW_LINE> <INDENT> res = self.testapp.get( "/.well-known/admin/acme-dns-server-accounts", status=200 ) <NEW_LINE> res = self.testapp.get( "/.well-known/admin/acme-dns-server-accounts/1", status=200 ) <NEW_LINE> <DEDENT> @routes_tested( ( "admin:acme_dns_server_accounts|json", "admin:acme_dns_server_accounts_paginated|json", ) ) <NEW_LINE> def test_list_json(self): <NEW_LINE> <INDENT> res = self.testapp.get( "/.well-known/admin/acme-dns-server-accounts.json", status=200 ) <NEW_LINE> assert "AcmeDnsServerAccounts" in res.json <NEW_LINE> res = self.testapp.get( "/.well-known/admin/acme-dns-server-accounts/1.json", status=200 ) <NEW_LINE> assert "AcmeDnsServerAccounts" in res.json <NEW_LINE> <DEDENT> @routes_tested(("admin:acme_dns_server_account:focus",)) <NEW_LINE> def test_focus_html(self): <NEW_LINE> <INDENT> (focus_item, focus_id) = self._get_one() <NEW_LINE> res = self.testapp.get( "/.well-known/admin/acme-dns-server-account/%s" % focus_id, status=200 ) <NEW_LINE> <DEDENT> @routes_tested(("admin:acme_dns_server_account:focus|json",)) <NEW_LINE> def test_focus_json(self): <NEW_LINE> <INDENT> (focus_item, focus_id) = self._get_one() <NEW_LINE> res = self.testapp.get( "/.well-known/admin/acme-dns-server-account/%s.json" % focus_id, status=200 ) <NEW_LINE> assert "AcmeDnsServerAccount" in res.json <NEW_LINE> assert res.json["AcmeDnsServerAccount"]["id"] == focus_item.id | python -m unittest tests.test_pyramid_app.FunctionalTests_AcmeDnsServerAccount | 6259904210dbd63aa1c71ea9 |
class HaystackFilter(BaseFilterBackend): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_request_filters(request): <NEW_LINE> <INDENT> return request.GET.copy() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def build_filter(view, filters=None): <NEW_LINE> <INDENT> terms = [] <NEW_LINE> exclude_terms = [] <NEW_LINE> if filters is None: <NEW_LINE> <INDENT> filters = {} <NEW_LINE> <DEDENT> for param, value in filters.items(): <NEW_LINE> <INDENT> excluding_term = False <NEW_LINE> param_parts = param.split("__") <NEW_LINE> base_param = param_parts[0] <NEW_LINE> negation_keyword = getattr(settings, "DRF_HAYSTACK_NEGATION_KEYWORD", "not") <NEW_LINE> if len(param_parts) > 1 and param_parts[1] == negation_keyword: <NEW_LINE> <INDENT> excluding_term = True <NEW_LINE> param = param.replace("__%s" % negation_keyword, "") <NEW_LINE> <DEDENT> if view.serializer_class: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if hasattr(view.serializer_class.Meta, "field_aliases"): <NEW_LINE> <INDENT> old_base = base_param <NEW_LINE> base_param = view.serializer_class.Meta.field_aliases.get(base_param, base_param) <NEW_LINE> param = param.replace(old_base, base_param) <NEW_LINE> <DEDENT> fields = getattr(view.serializer_class.Meta, "fields", []) <NEW_LINE> exclude = getattr(view.serializer_class.Meta, "exclude", []) <NEW_LINE> search_fields = getattr(view.serializer_class.Meta, "search_fields", []) <NEW_LINE> if ((fields or search_fields) and base_param not in chain(fields, search_fields)) or base_param in exclude or not value: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise ImproperlyConfigured("%s must implement a Meta class." % view.serializer_class.__class__.__name__) <NEW_LINE> <DEDENT> <DEDENT> tokens = [token.strip() for token in value.split(view.lookup_sep)] <NEW_LINE> field_queries = [] <NEW_LINE> for token in tokens: <NEW_LINE> <INDENT> if token: <NEW_LINE> <INDENT> field_queries.append(view.query_object((param, token))) <NEW_LINE> <DEDENT> <DEDENT> term = six.moves.reduce(operator.or_, filter(lambda x: x, field_queries)) <NEW_LINE> if excluding_term: <NEW_LINE> <INDENT> exclude_terms.append(term) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> terms.append(term) <NEW_LINE> <DEDENT> <DEDENT> terms = six.moves.reduce(operator.and_, filter(lambda x: x, terms)) if terms else [] <NEW_LINE> exclude_terms = six.moves.reduce(operator.and_, filter(lambda x: x, exclude_terms)) if exclude_terms else [] <NEW_LINE> return terms, exclude_terms <NEW_LINE> <DEDENT> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> applicable_filters, applicable_exclusions = self.build_filter(view, filters=self.get_request_filters(request)) <NEW_LINE> if applicable_filters: <NEW_LINE> <INDENT> queryset = queryset.filter(applicable_filters) <NEW_LINE> <DEDENT> if applicable_exclusions: <NEW_LINE> <INDENT> queryset = queryset.exclude(applicable_exclusions) <NEW_LINE> <DEDENT> return queryset | A filter backend that compiles a haystack compatible
filtering query. | 6259904271ff763f4b5e8a71 |
class IUnauthorizedEvent(IObjectEvent): <NEW_LINE> <INDENT> pass | An Event that's called during the Challenge phase of the PAS process
Allows custom handling of access to the object | 6259904273bcbd0ca4bcb55c |
class Nature(TableBase): <NEW_LINE> <INDENT> __tablename__ = 'natures' <NEW_LINE> __singlename__ = 'nature' <NEW_LINE> id = Column(Integer, primary_key=True, nullable=False, info=dict(description="A numeric ID")) <NEW_LINE> identifier = Column(Unicode(8), nullable=False, info=dict(description="An identifier", format='identifier')) <NEW_LINE> decreased_stat_id = Column(Integer, ForeignKey('stats.id'), nullable=False, info=dict(description="ID of the stat that this nature decreases by 10% (if decreased_stat_id is the same, the effects cancel out)")) <NEW_LINE> increased_stat_id = Column(Integer, ForeignKey('stats.id'), nullable=False, info=dict(description="ID of the stat that this nature increases by 10% (if decreased_stat_id is the same, the effects cancel out)")) <NEW_LINE> hates_flavor_id = Column(Integer, ForeignKey('contest_types.id'), nullable=False, info=dict(description=u"ID of the Berry flavor the Pokémon hates (if likes_flavor_id is the same, the effects cancel out)")) <NEW_LINE> likes_flavor_id = Column(Integer, ForeignKey('contest_types.id'), nullable=False, info=dict(description=u"ID of the Berry flavor the Pokémon likes (if hates_flavor_id is the same, the effects cancel out)")) <NEW_LINE> @property <NEW_LINE> def is_neutral(self): <NEW_LINE> <INDENT> return self.increased_stat_id == self.decreased_stat_id | A nature a Pokémon can have, such as Calm or Brave
| 62599042d53ae8145f91972f |
class Scasb(X86InstructionBase): <NEW_LINE> <INDENT> def __init__(self, prefix, mnemonic, operands, architecture_mode): <NEW_LINE> <INDENT> super(Scasb, self).__init__(prefix, mnemonic, operands, architecture_mode) <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_operands(self): <NEW_LINE> <INDENT> return [ ] <NEW_LINE> <DEDENT> @property <NEW_LINE> def destination_operands(self): <NEW_LINE> <INDENT> return [ ] | Representation of Scasb x86 instruction. | 6259904296565a6dacd2d8f3 |
class SQLiteClient(BaseRDMSClient): <NEW_LINE> <INDENT> data_table_cls = DataTable <NEW_LINE> result_cls = Result <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(SQLiteClient, self).__init__(*args, **kwargs) <NEW_LINE> self.connection = Database.connect(**self._conn_params) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _conn_params(self): <NEW_LINE> <INDENT> params = { 'database': self.query_server['name'], 'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES, } <NEW_LINE> if self.query_server['options']: <NEW_LINE> <INDENT> params.update(self.query_server['options']) <NEW_LINE> <DEDENT> params['check_same_thread'] = False <NEW_LINE> return params <NEW_LINE> <DEDENT> def use(self, database): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute_statement(self, statement): <NEW_LINE> <INDENT> cursor = self.connection.cursor() <NEW_LINE> cursor.execute(statement) <NEW_LINE> self.connection.commit() <NEW_LINE> if cursor.description: <NEW_LINE> <INDENT> columns = [column[0] for column in cursor.description] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = [] <NEW_LINE> <DEDENT> return self.data_table_cls(cursor, columns) <NEW_LINE> <DEDENT> def get_databases(self): <NEW_LINE> <INDENT> return [self._conn_params['database']] <NEW_LINE> <DEDENT> def get_tables(self, database, table_names=[]): <NEW_LINE> <INDENT> cursor = self.connection.cursor() <NEW_LINE> query = "SELECT name FROM sqlite_master WHERE type='table'" <NEW_LINE> if table_names: <NEW_LINE> <INDENT> clause = ' OR '.join(["tablename LIKE '%%%(table)s%%'" % {'table': table} for table in table_names]) <NEW_LINE> query += ' AND (%s)' % clause <NEW_LINE> <DEDENT> cursor.execute(query) <NEW_LINE> self.connection.commit() <NEW_LINE> return [row[0] for row in cursor.fetchall()] <NEW_LINE> <DEDENT> def get_columns(self, database, table, names_only=True): <NEW_LINE> <INDENT> cursor = self.connection.cursor() <NEW_LINE> cursor.execute("PRAGMA table_info(%s)" % table) <NEW_LINE> self.connection.commit() <NEW_LINE> if names_only: <NEW_LINE> <INDENT> columns = [row[1] for row in cursor.fetchall()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = [dict(name=row[1], type=row[2], comment='') for row in cursor.fetchall()] <NEW_LINE> <DEDENT> return columns <NEW_LINE> <DEDENT> def get_sample_data(self, database, table, column=None, limit=100): <NEW_LINE> <INDENT> column = '`%s`' % column if column else '*' <NEW_LINE> statement = 'SELECT %s FROM `%s` LIMIT %d' % (column, table, limit) <NEW_LINE> return self.execute_statement(statement) | Same API as Beeswax | 62599042b57a9660fecd2d4d |
class threaded_cached_property(cached_property): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> super(threaded_cached_property, self).__init__(func) <NEW_LINE> self.lock = threading.RLock() <NEW_LINE> <DEDENT> def __get__(self, obj, cls): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> obj_dict = obj.__dict__ <NEW_LINE> name = self.func.__name__ <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return obj_dict[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return obj_dict.setdefault(name, self.func(obj)) | A cached_property version for use in environments where multiple threads
might concurrently try to access the property. | 6259904223e79379d538d7d0 |
class UserProfile(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> phone_regex = RegexValidator(regex=r'(\+?\d{1,3})?[-\s]?[6789]\d{9}', message = "Phone number entered is invalid") <NEW_LINE> corporate_email = models.EmailField(max_length=255, unique=True) <NEW_LINE> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> phone_no = models.CharField(validators=[phone_regex], max_length=17, blank=True) <NEW_LINE> image = models.ImageField(upload_to="users/", null=True, blank=True) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> date_joined = models.DateTimeField(auto_now_add=True) <NEW_LINE> last_login = models.DateTimeField(null=True) <NEW_LINE> objects = UserProfileManager() <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['corporate_email','name'] <NEW_LINE> def get_full_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email | Represents the User Profile inside our system. | 6259904229b78933be26aa2c |
class TVBExporter(ABCExporter): <NEW_LINE> <INDENT> OPERATION_FOLDER_PREFIX = "Operation_" <NEW_LINE> def get_supported_types(self): <NEW_LINE> <INDENT> return [model.DataType] <NEW_LINE> <DEDENT> def get_label(self): <NEW_LINE> <INDENT> return "TVB Format" <NEW_LINE> <DEDENT> def export(self, data, export_folder, project): <NEW_LINE> <INDENT> download_file_name = self.get_export_file_name(data) <NEW_LINE> files_helper = FilesHelper() <NEW_LINE> if self.is_data_a_group(data): <NEW_LINE> <INDENT> all_datatypes = self._get_all_data_types_arr(data) <NEW_LINE> if all_datatypes is None or len(all_datatypes) == 0: <NEW_LINE> <INDENT> raise ExportException("Could not export a data type group with no data") <NEW_LINE> <DEDENT> zip_file = os.path.join(export_folder, download_file_name) <NEW_LINE> operation_folders = [] <NEW_LINE> for data_type in all_datatypes: <NEW_LINE> <INDENT> operation_folder = files_helper.get_operation_folder(project.name, data_type.fk_from_operation) <NEW_LINE> operation_folders.append(operation_folder) <NEW_LINE> <DEDENT> files_helper.zip_folders(zip_file, operation_folders, self.OPERATION_FOLDER_PREFIX) <NEW_LINE> return download_file_name, zip_file, True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> project_folder = files_helper.get_project_folder(project) <NEW_LINE> data_file = os.path.join(project_folder, data.get_storage_file_path()) <NEW_LINE> return download_file_name, data_file, False <NEW_LINE> <DEDENT> <DEDENT> def get_export_file_extension(self, data): <NEW_LINE> <INDENT> if self.is_data_a_group(data): <NEW_LINE> <INDENT> return "zip" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "h5" | This exporter simply provides for download data in TVB format | 625990428e05c05ec3f6f7c4 |
class Solution: <NEW_LINE> <INDENT> def mergeKLists(self, lists): <NEW_LINE> <INDENT> if not lists: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> hp = [] <NEW_LINE> for lst in lists: <NEW_LINE> <INDENT> if lst: <NEW_LINE> <INDENT> heapq.heappush(hp, (lst.val, lst)) <NEW_LINE> <DEDENT> <DEDENT> cur = dummy_head = ListNode(-1) <NEW_LINE> while hp: <NEW_LINE> <INDENT> val, node = heapq.heappop(hp) <NEW_LINE> cur.next = node <NEW_LINE> cur = cur.next <NEW_LINE> if node.next: <NEW_LINE> <INDENT> node = node.next <NEW_LINE> heapq.heappush(hp, (node.val, node)) <NEW_LINE> <DEDENT> <DEDENT> return dummy_head.next | @param lists: a list of ListNode
@return: The head of one sorted list. | 6259904207d97122c4217f72 |
class CommentPKView(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Comment.objects.all().order_by("-create_at") <NEW_LINE> serializer_class = CommentSerializer <NEW_LINE> permission_classes = (IsAuthenticated, IsAuthor) | Interactions with the Comment Model that requires the comment PK
Comment deletion, updation or single retrieve | 6259904223e79379d538d7d1 |
class FileField(BaseField): <NEW_LINE> <INDENT> proxy_class = GridFSProxy <NEW_LINE> def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs): <NEW_LINE> <INDENT> super(FileField, self).__init__(**kwargs) <NEW_LINE> self.collection_name = collection_name <NEW_LINE> self.db_alias = db_alias <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> grid_file = instance._data.get(self.name) <NEW_LINE> if not isinstance(grid_file, self.proxy_class): <NEW_LINE> <INDENT> grid_file = self.get_proxy_obj(key=self.name, instance=instance) <NEW_LINE> instance._data[self.name] = grid_file <NEW_LINE> <DEDENT> if not grid_file.key: <NEW_LINE> <INDENT> grid_file.key = self.name <NEW_LINE> grid_file.instance = instance <NEW_LINE> <DEDENT> return grid_file <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> key = self.name <NEW_LINE> if ((hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str_types)): <NEW_LINE> <INDENT> grid_file = instance._data.get(self.name) <NEW_LINE> if grid_file: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> grid_file.delete() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> instance._data[key] = self.get_proxy_obj( key=key, instance=instance) <NEW_LINE> instance._data[key].put(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> instance._data[key] = value <NEW_LINE> <DEDENT> instance._mark_as_changed(key) <NEW_LINE> <DEDENT> def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None): <NEW_LINE> <INDENT> if db_alias is None: <NEW_LINE> <INDENT> db_alias = self.db_alias <NEW_LINE> <DEDENT> if collection_name is None: <NEW_LINE> <INDENT> collection_name = self.collection_name <NEW_LINE> <DEDENT> return self.proxy_class(key=key, instance=instance, db_alias=db_alias, collection_name=collection_name) <NEW_LINE> <DEDENT> def to_mongo(self, value): <NEW_LINE> <INDENT> if isinstance(value, self.proxy_class) and value.grid_id is not None: <NEW_LINE> <INDENT> return value.grid_id <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> return self.proxy_class(value, collection_name=self.collection_name, db_alias=self.db_alias) <NEW_LINE> <DEDENT> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> if value.grid_id is not None: <NEW_LINE> <INDENT> if not isinstance(value, self.proxy_class): <NEW_LINE> <INDENT> self.error('FileField only accepts GridFSProxy values') <NEW_LINE> <DEDENT> if not isinstance(value.grid_id, ObjectId): <NEW_LINE> <INDENT> self.error('Invalid GridFSProxy value') | A GridFS storage field.
.. versionadded:: 0.4
.. versionchanged:: 0.5 added optional size param for read
.. versionchanged:: 0.6 added db_alias for multidb support | 62599042be383301e0254aeb |
class UserViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> permission_classes = (IsOwnerOrReadOnly,) <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.serializer_class = CreateUserSerializer <NEW_LINE> self.permission_classes = (AllowAny,) <NEW_LINE> return super(UserViewSet, self).create(request, *args, **kwargs) | Creates, Updates, and retrieves User accounts | 6259904226238365f5fade2c |
class Context(object): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return self.key | Used for mocking up context type objects. | 6259904250485f2cf55dc258 |
class RHSimple(RH): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> RH.__init__(self) <NEW_LINE> self.func = func <NEW_LINE> <DEDENT> def _process(self): <NEW_LINE> <INDENT> rv = self.func() <NEW_LINE> return rv <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def wrap_function(cls, func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> return cls(partial(func, *args, **kwargs)).process() <NEW_LINE> <DEDENT> return wrapper | A simple RH that calls a function to build the response.
The preferred way to use this class is by using the
`RHSimple.wrap_function` decorator.
:param func: A function returning HTML | 6259904291af0d3eaad3b0f7 |
class AdminaccountsaddpendinguserProvider(object): <NEW_LINE> <INDENT> openapi_types = { 'name': 'str' } <NEW_LINE> attribute_map = { 'name': 'name' } <NEW_LINE> def __init__(self, name=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, must not be `None`") <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AdminaccountsaddpendinguserProvider): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62599042d6c5a102081e33f9 |
class KernelWrapper(object): <NEW_LINE> <INDENT> def __init__(self, wrapped): <NEW_LINE> <INDENT> self.wrapped=wrapped <NEW_LINE> <DEDENT> def __call__(self, inst1, inst2): <NEW_LINE> <INDENT> return self.wrapped(inst1, inst2) | A base class for kernel function wrappers.
:param wrapped: a kernel function to wrap | 62599042507cdc57c63a6070 |
class TensorboardXWriter(EventWriter): <NEW_LINE> <INDENT> def __init__(self, log_dir: str, window_size: int = 20, **kwargs): <NEW_LINE> <INDENT> self._window_size = window_size <NEW_LINE> from tensorboardX import SummaryWriter <NEW_LINE> self._writer = SummaryWriter(log_dir, **kwargs) <NEW_LINE> self._last_write = -1 <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> storage = get_event_storage() <NEW_LINE> new_last_write = self._last_write <NEW_LINE> for k, (v, iter) in storage.latest_with_smoothing_hint(self._window_size).items(): <NEW_LINE> <INDENT> if iter > self._last_write: <NEW_LINE> <INDENT> self._writer.add_scalar(k, v, iter) <NEW_LINE> new_last_write = max(new_last_write, iter) <NEW_LINE> <DEDENT> <DEDENT> self._last_write = new_last_write <NEW_LINE> if len(storage._vis_data) >= 1: <NEW_LINE> <INDENT> for img_name, img, step_num in storage._vis_data: <NEW_LINE> <INDENT> self._writer.add_image(img_name, img, step_num) <NEW_LINE> <DEDENT> storage.clear_images() <NEW_LINE> <DEDENT> if len(storage._histograms) >= 1: <NEW_LINE> <INDENT> for params in storage._histograms: <NEW_LINE> <INDENT> self._writer.add_histogram_raw(**params) <NEW_LINE> <DEDENT> storage.clear_histograms() <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if hasattr(self, "_writer"): <NEW_LINE> <INDENT> self._writer.close() | Write all scalars to a tensorboard file. | 6259904273bcbd0ca4bcb55e |
class ClusterRunner(Runner): <NEW_LINE> <INDENT> default_time = '1-00:00:00' <NEW_LINE> default_steps = [ {'process_samples': {}}, {'combine_reads': {}}, {'run_uparse': {}}, {'otu_uparse.taxonomy_silva.assign': {}}, {'otu_uparse.taxonomy_silva.make_otu_table': {}}, {'otu_uparse.taxonomy_silva.make_otu_table_norm': {}}, {'otu_uparse.taxonomy_silva.make_plots': {}}, {'otu_uparse.taxonomy_silva.stats.nmds.run': {}}, {'otu_uparse.taxonomy_silva.make_filtered_centers': {}}, {'otu_uparse.taxonomy_silva.comp_phyla.make_taxa_table': {}}, {'otu_uparse.taxonomy_silva.comp_phyla.make_plots': {}}, {'otu_uparse.taxonomy_silva.comp_phyla.stats.nmds.run': {}}, {'otu_uparse.taxonomy_silva.comp_tips.make_taxa_table': {}}, {'otu_uparse.taxonomy_silva.comp_tips.make_plots': {}}, {'otu_uparse.seqenv.run': {}}, ] <NEW_LINE> def __init__(self, parent): <NEW_LINE> <INDENT> self.parent, self.cluster = parent, parent <NEW_LINE> self.samples = parent.samples <NEW_LINE> <DEDENT> def run_slurm(self, steps=None, **kwargs): <NEW_LINE> <INDENT> if self.cluster.name == 'test': <NEW_LINE> <INDENT> kwargs['time'] = '01:00:00' <NEW_LINE> kwargs['email'] = '/dev/null' <NEW_LINE> <DEDENT> command = ["steps = %s" % steps] <NEW_LINE> command += ["name = '%s'" % self.cluster.name] <NEW_LINE> command += ["cluster = getattr(illumitag.clustering.favorites, name)"] <NEW_LINE> command += ["cluster.run(steps)"] <NEW_LINE> if 'time' not in kwargs: kwargs['time'] = self.default_time <NEW_LINE> if 'email' not in kwargs: kwargs['email'] = None <NEW_LINE> if 'dependency' not in kwargs: kwargs['dependency'] = 'singleton' <NEW_LINE> job_name = "cluster_%s" % self.cluster.name <NEW_LINE> self.parent.slurm_job = SLURMJob(command, self.parent.p.logs_dir, job_name=job_name, **kwargs) <NEW_LINE> return self.parent.slurm_job.run() | Will run stuff on an cluster | 6259904245492302aabfd7af |
class SHA1Hasher(_CryptographyHasher): <NEW_LINE> <INDENT> ENCRYPT_FIRST = False <NEW_LINE> PROVIDER = hashes.SHA1() <NEW_LINE> def __init__(self, iv): <NEW_LINE> <INDENT> super(SHA1Hasher, self).__init__(iv) | SHA1 hash, full length digest. | 6259904273bcbd0ca4bcb55f |
class LatLongField(forms.MultiValueField): <NEW_LINE> <INDENT> widget = LatLongWidget <NEW_LINE> srid = 4326 <NEW_LINE> default_error_messages = { 'invalid_latitude': ('Enter a valid latitude.'), 'invalid_longitude': ('Enter a valid longitude.'), } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> fields = (forms.FloatField(min_value=-90, max_value=90), forms.FloatField(min_value=-180, max_value=180)) <NEW_LINE> super(LatLongField, self).__init__(fields, *args, **kwargs) <NEW_LINE> <DEDENT> def compress(self, data_list): <NEW_LINE> <INDENT> if data_list: <NEW_LINE> <INDENT> if data_list[0] in validators.EMPTY_VALUES: <NEW_LINE> <INDENT> raise forms.ValidationError(self.error_messages['invalid_latitude']) <NEW_LINE> <DEDENT> if data_list[1] in validators.EMPTY_VALUES: <NEW_LINE> <INDENT> raise forms.ValidationError(self.error_messages['invalid_longitude']) <NEW_LINE> <DEDENT> srid_str = 'SRID=%d' % self.srid <NEW_LINE> point_str = 'POINT(%f %f)' % tuple(reversed(data_list)) <NEW_LINE> return ';'.join([srid_str, point_str]) <NEW_LINE> <DEDENT> return None | custom field that takes in a lat and long | 62599042a8ecb033258724e5 |
class TestInlineResponse20099PhoneNumbers(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testInlineResponse20099PhoneNumbers(self): <NEW_LINE> <INDENT> pass | InlineResponse20099PhoneNumbers unit test stubs | 62599042e76e3b2f99fd9cdf |
class FrictionData(clawdata.ClawData): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(FrictionData, self).__init__() <NEW_LINE> self.add_attribute('variable_friction',False) <NEW_LINE> self.add_attribute('friction_regions',[]) <NEW_LINE> self.add_attribute('friction_files',[]) <NEW_LINE> <DEDENT> def write(self, out_file='friction.data', data_source='setrun.py'): <NEW_LINE> <INDENT> self.open_data_file(out_file,data_source) <NEW_LINE> self.data_write('variable_friction',description="(method for setting variable friction)") <NEW_LINE> self.data_write() <NEW_LINE> if self.variable_friction: <NEW_LINE> <INDENT> self.data_write(value=len(self.friction_regions), alt_name='num_friction_regions', description="(Friction Regions)") <NEW_LINE> self.data_write() <NEW_LINE> for region in self.friction_regions: <NEW_LINE> <INDENT> self.data_write(value=region[0],alt_name="lower") <NEW_LINE> self.data_write(value=region[1],alt_name="upper") <NEW_LINE> self.data_write(value=region[2],alt_name="depths") <NEW_LINE> self.data_write(value=region[3],alt_name="manning_coefficients") <NEW_LINE> self.data_write() <NEW_LINE> <DEDENT> self.data_write(value=len(self.friction_files), alt_name='num_friction_files') <NEW_LINE> for friction_file in self.friction_files: <NEW_LINE> <INDENT> self._out_file.write("'%s' %s\n " % friction_file) <NEW_LINE> <DEDENT> <DEDENT> self.close_data_file() | Data class representing variable friction parameters and data sources | 6259904230dc7b76659a0b04 |
class TopicDetail(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Topic.objects.all() <NEW_LINE> serializer_class = TopicSerializer | Retrieve, update or delete a topic instance | 6259904223849d37ff85238e |
class OperationResourceServiceSpecification(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationResourceMetricSpecification]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(OperationResourceServiceSpecification, self).__init__(**kwargs) <NEW_LINE> self.metric_specifications = kwargs.get('metric_specifications', None) | Service specification.
:param metric_specifications: List of metric specifications.
:type metric_specifications:
list[~azure.mgmt.storagesync.models.OperationResourceMetricSpecification] | 62599042596a897236128f19 |
class ExprCaseFilter(object): <NEW_LINE> <INDENT> implements(ICaseFilter) <NEW_LINE> def __init__(self, expr): <NEW_LINE> <INDENT> self.expr = expr <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> state = self.__dict__.copy() <NEW_LINE> state['_code'] = None <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.__dict__.update(state) <NEW_LINE> self._code = compile(self._expr, '<string>', 'eval') <NEW_LINE> <DEDENT> @property <NEW_LINE> def expr(self): <NEW_LINE> <INDENT> return self._expr <NEW_LINE> <DEDENT> @expr.setter <NEW_LINE> def expr(self, value): <NEW_LINE> <INDENT> self._expr = value <NEW_LINE> try: <NEW_LINE> <INDENT> self._code = compile(self._expr, '<string>', 'eval') <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> raise type(exc)("Can't compile expression %r: %s" % (self._expr, exc)) <NEW_LINE> <DEDENT> <DEDENT> def select(self, seqno, case): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return eval(self._code, _EXPR_DICT, dict(seqno=seqno, case=case)) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> raise type(exc)("Can't evaluate expression %r: %s" % (self._expr, exc)) | Select based on a boolean Python expression of the case data or ``seqno``.
Case data is accessed by ``case['name']``
expr: string
Boolean expression referring to the case data or ``seqno``.
Examples:
- select failed cases ``'case.msg'``.
- select first 3 cases: ``'seqno < 3'``.
- select case with 'param' between 2 and 2.5: ``'case["param"] > 2 and case["param"] < 2.5'``. | 62599042e64d504609df9d3b |
class IF(object): <NEW_LINE> <INDENT> def __init__(self, conditional, action = None, delete_clause = ()): <NEW_LINE> <INDENT> if type(conditional) == list and action == None: <NEW_LINE> <INDENT> return self.__init__(*conditional) <NEW_LINE> <DEDENT> if isinstance(action, str): <NEW_LINE> <INDENT> action = [ action ] <NEW_LINE> <DEDENT> self._conditional = conditional <NEW_LINE> self._action = action <NEW_LINE> self._delete_clause = delete_clause <NEW_LINE> <DEDENT> def apply(self, rules, apply_only_one=False, verbose=False): <NEW_LINE> <INDENT> new_rules = set(rules) <NEW_LINE> old_rules_count = len(new_rules) <NEW_LINE> bindings = RuleExpression().test_term_matches( self._conditional, new_rules) <NEW_LINE> for k in bindings: <NEW_LINE> <INDENT> for a in self._action: <NEW_LINE> <INDENT> new_rules.add( populate(a, k) ) <NEW_LINE> if len(new_rules) != old_rules_count: <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print(("Rule:", self)) <NEW_LINE> print(("Added:", populate(a, k))) <NEW_LINE> <DEDENT> if apply_only_one: <NEW_LINE> <INDENT> return tuple(sorted(new_rules)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for d in self._delete_clause: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> new_rules.remove( populate(d, k) ) <NEW_LINE> if len(new_rules) != old_rules_count: <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print(("Rule:", self)) <NEW_LINE> print(("Deleted:", populate(d, k))) <NEW_LINE> <DEDENT> if apply_only_one: <NEW_LINE> <INDENT> return tuple(sorted(new_rules)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return tuple(sorted(new_rules)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "IF(%s, %s)" % (str(self._conditional), str(self._action)) <NEW_LINE> <DEDENT> def antecedent(self): <NEW_LINE> <INDENT> return self._conditional <NEW_LINE> <DEDENT> def consequent(self): <NEW_LINE> <INDENT> return self._action <NEW_LINE> <DEDENT> __repr__ = __str__ | A conditional rule.
This should have the form IF( antecedent, THEN(consequent) ),
or IF( antecedent, THEN(consequent), DELETE(delete_clause) ).
The antecedent is an expression or AND/OR tree with variables
in it, determining under what conditions the rule can fire.
The consequent is an expression or list of expressions that
will be added when the rule fires. Variables can be filled in
from the antecedent.
The delete_clause is an expression or list of expressions
that will be deleted when the rule fires. Again, variables
can be filled in from the antecedent. | 62599042097d151d1a2c233d |
class Graph: <NEW_LINE> <INDENT> def __init__(self, filename=None): <NEW_LINE> <INDENT> self._graph = [] <NEW_LINE> if filename: <NEW_LINE> <INDENT> with open(filename, "r") as fw: <NEW_LINE> <INDENT> rows = fw.read().splitlines() <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> self._graph.append([int(each) for each in row.split(" ")]) <NEW_LINE> <DEDENT> <DEDENT> self._graph = np.array(self._graph) <NEW_LINE> <DEDENT> <DEDENT> def visualize(self): <NEW_LINE> <INDENT> self._adj_matrix = nx.from_numpy_matrix(self._graph) <NEW_LINE> nx.draw_networkx(self._adj_matrix, with_labels=True) <NEW_LINE> plt.show() <NEW_LINE> <DEDENT> def repr(self): <NEW_LINE> <INDENT> print(self._graph) <NEW_LINE> <DEDENT> def bfs(self, source_node): <NEW_LINE> <INDENT> edges = nx.bfs_edges(nx.from_numpy_matrix(self._graph), source_node) <NEW_LINE> return edges | This class represents the graph, which is read from a file.
The file contains the adjcency matrix representation of the graph. | 625990426fece00bbacccc87 |
class Payline(models.Model): <NEW_LINE> <INDENT> def __unicode__(self): <NEW_LINE> <INDENT> return str(self.id) <NEW_LINE> <DEDENT> id = models.AutoField(primary_key=True, db_column='iPAYLINE_ID') <NEW_LINE> pay_run = models.ForeignKey('PayRun', db_column='iPAY_RUN_ID', null=True, default=0) <NEW_LINE> cycle = models.ForeignKey('Cycle', db_column='iCYCLE_ID') <NEW_LINE> finance_code = models.ForeignKey('FinanceCode', db_column='iFINANCE_CODE_ID') <NEW_LINE> location = models.ForeignKey('Location', db_column='iLOCATION_ID') <NEW_LINE> contractor = models.ForeignKey('Contractor', db_column='iUSER_ID', verbose_name='Payroll Number') <NEW_LINE> manual = models.BooleanField(db_column='bDELETEABLE', default=True) <NEW_LINE> held = models.BooleanField(db_column='bHELD', default=False) <NEW_LINE> calculated = models.BooleanField(db_column='bCALCULATED', default=False) <NEW_LINE> dummy = models.BooleanField(db_column='bDUMMY', default=False) <NEW_LINE> perm_held = models.BooleanField(db_column='bPERM_HELD', default=False) <NEW_LINE> active = models.BooleanField(db_column='bACTIVE', default=True) <NEW_LINE> value = models.FloatField(db_column='nVALUE') <NEW_LINE> gst_value = models.FloatField(db_column='nGST_VALUE', null=True, blank=True) <NEW_LINE> details = models.TextField(db_column='vcDETAILS') <NEW_LINE> grouping = models.CharField(max_length=50, db_column='vcGROUPING') <NEW_LINE> reference_payline = models.ForeignKey('self', db_column='iREFERENCE_PAY_ID', null=True) <NEW_LINE> external_record_id = models.IntegerField(db_column='iEXTERNAL_RECORD_ID', null=True, blank=True, verbose_name='Pay Id:') <NEW_LINE> application_id = models.CharField(max_length=8000, db_column='vcAPPLICATION_ID', null=True, blank=True) <NEW_LINE> query_string = models.CharField(max_length=8000, db_column='vcQUERY_STRING') <NEW_LINE> last_altered = models.DateTimeField(editable=False, auto_now=True, db_column='dLAST_ALTERED') <NEW_LINE> last_altered_by = models.CharField(max_length=50, editable=False, db_column='vcLAST_ALTERED_BY') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = '[pays].[XTB_Payline]' <NEW_LINE> application_name = 'pays' <NEW_LINE> alias = 'default' <NEW_LINE> ordering = ['contractor__payroll', '-cycle__id', 'finance_code__description', 'application_id', 'details'] <NEW_LINE> app_label = 'pays.models' <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def payroll_file(cycle_id, project): <NEW_LINE> <INDENT> cur = connections[project+'_pays_default'].cursor() <NEW_LINE> cur.execute('exec [pays].[XSP_Get_Aurion_Payroll_File] @cycleId=\'{0}\';commit'.format(int(cycle_id))) <NEW_LINE> results = dictfetchall(cur) <NEW_LINE> cur.close() <NEW_LINE> return results | * [pays].[XTB_Payline]
* default alias | 6259904215baa72349463267 |
class Stats(): <NEW_LINE> <INDENT> def __init__(self, vals): <NEW_LINE> <INDENT> self.__calcStats(vals) <NEW_LINE> self.vals = vals <NEW_LINE> <DEDENT> def __calcStats(self, vals): <NEW_LINE> <INDENT> self.N = len(vals) <NEW_LINE> self.min, self.max = np.min(vals), np.max(vals) <NEW_LINE> p = np.percentile(vals, [25, 50, 75]) <NEW_LINE> self.q25, self.median, self.q75 = p[0], p[1], p[2] <NEW_LINE> self.mean = np.mean(vals) <NEW_LINE> self.std = np.std(vals) <NEW_LINE> self.var = np.var(vals) <NEW_LINE> <DEDENT> def percLess(self, val): <NEW_LINE> <INDENT> cnt = 0.0 <NEW_LINE> for v in self.vals: <NEW_LINE> <INDENT> if v <= val: cnt += 1 <NEW_LINE> <DEDENT> return cnt/self.N <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'N %d, min %.4f , q25 %.4f, med %.4f, avg %.4f, q75 %.4f, max %.4f ; std %.4f' % (self.N, self.min, self.q25, self.median, self.mean, self.q75, self.max, self.std) | Calculates, stores and renders for printing statistical measures
of a list of numbers. | 625990421d351010ab8f4df4 |
class TestExtensionsCollector(object): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.workbench = Workbench() <NEW_LINE> self.workbench.register(CoreManifest()) <NEW_LINE> self.workbench.register(ErrorsManifest()) <NEW_LINE> self.workbench.register(ExtensionManifest()) <NEW_LINE> <DEDENT> def test_registation1(self, exopy_qtbot): <NEW_LINE> <INDENT> c = Contributor1() <NEW_LINE> self.workbench.register(c) <NEW_LINE> plugin = self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> assert 'contrib1.contrib' in plugin.contribs.contributions <NEW_LINE> self.workbench.unregister(c.id) <NEW_LINE> assert 'contrib1.contrib' not in plugin.contribs.contributions <NEW_LINE> assert not plugin.contribs._extensions <NEW_LINE> plugin.contribs.stop() <NEW_LINE> assert not plugin.contribs.contributions <NEW_LINE> <DEDENT> def test_registration2(self, exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.register(Contributor2()) <NEW_LINE> plugin = self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> c = Contributor1() <NEW_LINE> self.workbench.register(c) <NEW_LINE> assert 'contrib1.contrib' in plugin.contribs.contributions <NEW_LINE> self.workbench.unregister(c.id) <NEW_LINE> assert 'contrib1.contrib' not in plugin.contribs.contributions <NEW_LINE> <DEDENT> def test_factory(self, exopy_qtbot): <NEW_LINE> <INDENT> c = Contributor2() <NEW_LINE> self.workbench.register(c) <NEW_LINE> plugin = self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> assert 'contrib2.contrib' in plugin.contribs.contributions <NEW_LINE> self.workbench.unregister(c.id) <NEW_LINE> assert 'contrib2.contrib' not in plugin.contribs.contributions <NEW_LINE> <DEDENT> @pytest.mark.ui <NEW_LINE> def test_errors1(self, exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.register(Contributor1()) <NEW_LINE> self.workbench.register(Contributor1(id='bis')) <NEW_LINE> self.workbench.register(Contributor1(id='ter')) <NEW_LINE> with handle_dialog(exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> <DEDENT> <DEDENT> @pytest.mark.ui <NEW_LINE> def test_check_errors2(self, exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.register(Contributor3()) <NEW_LINE> with handle_dialog(exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> <DEDENT> <DEDENT> def test_check_errors3(self, exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.register(Contributor4()) <NEW_LINE> with handle_dialog(exopy_qtbot): <NEW_LINE> <INDENT> self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> <DEDENT> <DEDENT> def test_declared_by(self): <NEW_LINE> <INDENT> c = Contributor1() <NEW_LINE> self.workbench.register(c) <NEW_LINE> plugin = self.workbench.get_plugin(PLUGIN_ID) <NEW_LINE> assert plugin.contribs.contributed_by('contrib1.contrib') is c.extensions[0] | Test the ExtensionsCollector behaviour.
| 6259904215baa72349463268 |
class RangeUnsatisfiable(HTTPException): <NEW_LINE> <INDENT> code = 416 <NEW_LINE> description = ( '<p>The server cannot satisfy the request range(s).</p>' ) | *416* `Range Unsatisfiable`
The status code returned if the server is unable to satisfy the request range | 6259904282261d6c5273082f |
class PlugVIPPort(BaseNetworkTask): <NEW_LINE> <INDENT> def execute(self, amphora, amphorae_network_config): <NEW_LINE> <INDENT> vrrp_port = amphorae_network_config.get(amphora.id).vrrp_port <NEW_LINE> LOG.debug('Plugging VIP VRRP port ID: %(port_id)s into compute ' 'instance: %(compute_id)s.', {'port_id': vrrp_port.id, 'compute_id': amphora.compute_id}) <NEW_LINE> self.network_driver.plug_port(amphora, vrrp_port) <NEW_LINE> <DEDENT> def revert(self, result, amphora, amphorae_network_config, *args, **kwargs): <NEW_LINE> <INDENT> vrrp_port = None <NEW_LINE> try: <NEW_LINE> <INDENT> vrrp_port = amphorae_network_config.get(amphora.id).vrrp_port <NEW_LINE> self.network_driver.unplug_port(amphora, vrrp_port) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> LOG.warning('Failed to unplug vrrp port: %(port)s from amphora: ' '%(amp)s', {'port': vrrp_port.id, 'amp': amphora.id}) | Task to plug a VIP into a compute instance. | 6259904245492302aabfd7b1 |
class CollectAndDestroyGraftingNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, screen_channels, screen_resolution): <NEW_LINE> <INDENT> super(CollectAndDestroyGraftingNet, self).__init__() <NEW_LINE> self.conv_select = nn.Conv2d(screen_channels, 16, kernel_size=(5, 5), stride=1, padding=2) <NEW_LINE> self.conv_collect = nn.Conv2d(screen_channels, 16, kernel_size=(5, 5), stride=1, padding=2) <NEW_LINE> self.conv_destroy = nn.Conv2d(screen_channels, 16, kernel_size=(5, 5), stride=1, padding=2) <NEW_LINE> self.conv2 = nn.Conv2d(49, 32, kernel_size=(3, 3), stride=1, padding=1) <NEW_LINE> self.select_unit_policy = nn.Conv2d(32, 1, kernel_size=(1, 1)) <NEW_LINE> self.collect_policy = nn.Conv2d(32, 1, kernel_size=(1, 1)) <NEW_LINE> self.destroy_policy = nn.Conv2d(32, 1, kernel_size=(1, 1)) <NEW_LINE> self.non_spatial_branch = nn.Linear(screen_resolution[0] * screen_resolution[1] * 32, 256) <NEW_LINE> self.value = nn.Linear(256, 1) <NEW_LINE> self.select_task_policy = nn.Linear(256, 2) <NEW_LINE> <DEDENT> def forward(self, x, action_features, type): <NEW_LINE> <INDENT> select_x = F.relu(self.conv_select(x)) <NEW_LINE> collect_x = F.relu(self.conv_collect(x)) <NEW_LINE> destroy_x = F.relu(self.conv_destroy(x)) <NEW_LINE> concat_feature_layers = torch.cat([select_x, collect_x, destroy_x, action_features], dim=1) <NEW_LINE> x = F.relu(self.conv2(concat_feature_layers)) <NEW_LINE> if type == 0: <NEW_LINE> <INDENT> select_unit_branch = self.select_unit_policy(x) <NEW_LINE> select_unit_branch = select_unit_branch.view(select_unit_branch.shape[0], -1) <NEW_LINE> spatial_prob = nn.functional.softmax(select_unit_branch, dim=1) <NEW_LINE> <DEDENT> elif type == 1: <NEW_LINE> <INDENT> collect_branch = self.collect_policy(x) <NEW_LINE> collect_branch = collect_branch.view(collect_branch.shape[0], -1) <NEW_LINE> spatial_prob = nn.functional.softmax(collect_branch, dim=1) <NEW_LINE> <DEDENT> elif type == 2: <NEW_LINE> <INDENT> destroy_branch = self.destroy_policy(x) <NEW_LINE> destroy_branch = destroy_branch.view(destroy_branch.shape[0], -1) <NEW_LINE> spatial_prob = nn.functional.softmax(destroy_branch, dim=1) <NEW_LINE> <DEDENT> non_spatial_represenatation = F.relu(self.non_spatial_branch(x.view(-1))) <NEW_LINE> value = self.value(non_spatial_represenatation) <NEW_LINE> select_task_prob = F.softmax(self.select_task_policy(non_spatial_represenatation)) <NEW_LINE> return spatial_prob, value, select_task_prob | Multitask model with 1 shared hidden layers
task1:
select unit and task policy
task2:
collect mineral shards
task3:
attack buildings | 62599042379a373c97d9a300 |
class DNSHandler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cache = LRUCache(1000) <NEW_LINE> <DEDENT> def get(self, host, default=None): <NEW_LINE> <INDENT> addr = self.cache.get(host, None) <NEW_LINE> if addr: <NEW_LINE> <INDENT> return addr <NEW_LINE> <DEDENT> addrinfo = socket.getaddrinfo(host, 80, 0, 0, socket.SOL_TCP) <NEW_LINE> af, socktype, proto, canonname, sockaddr = addrinfo[0] <NEW_LINE> self.cache[host] = sockaddr[0] <NEW_LINE> return sockaddr[0] | Cache DNS Names - this is plugged into the Tornado Async Client | 625990423eb6a72ae038b937 |
class Talea(AbjadValueObject): <NEW_LINE> <INDENT> __documentation_section__ = 'Specifiers' <NEW_LINE> __slots__ = ( '_counts', '_denominator', ) <NEW_LINE> def __init__( self, counts=(1,), denominator=16, ): <NEW_LINE> <INDENT> counts = self._to_tuple(counts) <NEW_LINE> assert isinstance(counts, tuple) <NEW_LINE> assert all(isinstance(x, int) for x in counts) <NEW_LINE> self._counts = counts <NEW_LINE> assert mathtools.is_nonnegative_integer_power_of_two(denominator) <NEW_LINE> self._denominator = denominator <NEW_LINE> <DEDENT> def __eq__(self, expr): <NEW_LINE> <INDENT> from abjad.tools import systemtools <NEW_LINE> return systemtools.StorageFormatManager.compare(self, expr) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> counts = datastructuretools.CyclicTuple(self.counts) <NEW_LINE> if isinstance(item, int): <NEW_LINE> <INDENT> count = counts[item] <NEW_LINE> return mathtools.NonreducedFraction(count, self.denominator) <NEW_LINE> <DEDENT> elif isinstance(item, slice): <NEW_LINE> <INDENT> counts = counts[item] <NEW_LINE> result = [mathtools.NonreducedFraction(count, self.denominator) for count in counts] <NEW_LINE> return result <NEW_LINE> <DEDENT> raise ValueError(item) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> from abjad.tools import systemtools <NEW_LINE> hash_values = systemtools.StorageFormatManager.get_hash_values(self) <NEW_LINE> return hash(hash_values) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for count in self.counts: <NEW_LINE> <INDENT> duration = durationtools.Duration(count, self.denominator) <NEW_LINE> yield duration <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.counts) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _to_tuple(expr): <NEW_LINE> <INDENT> if isinstance(expr, list): <NEW_LINE> <INDENT> expr = tuple(expr) <NEW_LINE> <DEDENT> return expr <NEW_LINE> <DEDENT> @property <NEW_LINE> def counts(self): <NEW_LINE> <INDENT> return self._counts <NEW_LINE> <DEDENT> @property <NEW_LINE> def denominator(self): <NEW_LINE> <INDENT> return self._denominator | Talea.
.. container:: example
::
>>> talea = rhythmmakertools.Talea(
... counts=(2, 1, 3, 2, 4, 1, 1),
... denominator=16,
... )
.. container:: example
::
>>> talea[2]
NonreducedFraction(3, 16)
.. container:: example
::
>>> for nonreduced_fraction in talea[3:9]:
... nonreduced_fraction
...
NonreducedFraction(2, 16)
NonreducedFraction(4, 16)
NonreducedFraction(1, 16)
NonreducedFraction(1, 16)
NonreducedFraction(2, 16)
NonreducedFraction(1, 16) | 625990428a349b6b4368751f |
class SuitcaseCopyError(SuitcaseException): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.message_prefix = "Suitcase File Copy Error:" <NEW_LINE> SuitcaseException.__init__(self, value) | Exception raised when there's a problem with copying files | 62599042d7e4931a7ef3d34c |
class ParamType(str, Enum): <NEW_LINE> <INDENT> ini_cndtn = "initial condition" <NEW_LINE> param = "parameter" | These are the possible values of ``param_type`` column in ``parameters``
table in ``simulations.db`` database. | 62599042b830903b9686ede5 |
class _CacheControl(UpdateDictMixin, dict): <NEW_LINE> <INDENT> no_cache = cache_property('no-cache', '*', None) <NEW_LINE> no_store = cache_property('no-store', None, bool) <NEW_LINE> max_age = cache_property('max-age', -1, int) <NEW_LINE> no_transform = cache_property('no-transform', None, None) <NEW_LINE> def __init__(self, values=(), on_update=None): <NEW_LINE> <INDENT> dict.__init__(self, values or ()) <NEW_LINE> self.on_update = on_update <NEW_LINE> self.provided = values is not None <NEW_LINE> <DEDENT> def _get_cache_value(self, key, empty, type): <NEW_LINE> <INDENT> if type is bool: <NEW_LINE> <INDENT> return key in self <NEW_LINE> <DEDENT> if key in self: <NEW_LINE> <INDENT> value = self[key] <NEW_LINE> if value is None: <NEW_LINE> <INDENT> return empty <NEW_LINE> <DEDENT> elif type is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = type(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> <DEDENT> def _set_cache_value(self, key, value, type): <NEW_LINE> <INDENT> if type is bool: <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> self[key] = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pop(key, None) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> self.pop(key) <NEW_LINE> <DEDENT> elif value is True: <NEW_LINE> <INDENT> self[key] = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _del_cache_value(self, key): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> del self[key] <NEW_LINE> <DEDENT> <DEDENT> def to_header(self): <NEW_LINE> <INDENT> return dump_header(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.to_header() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s %r>' % ( self.__class__.__name__, self.to_header() ) | Subclass of a dict that stores values for a Cache-Control header. It
has accessors for all the cache-control directives specified in RFC 2616.
The class does not differentiate between request and response directives.
Because the cache-control directives in the HTTP header use dashes the
python descriptors use underscores for that.
To get a header of the :class:`CacheControl` object again you can convert
the object into a string or call the :meth:`to_header` method. If you plan
to subclass it and add your own items have a look at the sourcecode for
that class.
.. versionchanged:: 0.4
Setting `no_cache` or `private` to boolean `True` will set the implicit
none-value which is ``*``:
>>> cc = ResponseCacheControl()
>>> cc.no_cache = True
>>> cc
<ResponseCacheControl 'no-cache'>
>>> cc.no_cache
'*'
>>> cc.no_cache = None
>>> cc
<ResponseCacheControl ''>
In versions before 0.5 the behavior documented here affected the now
no longer existing `CacheControl` class. | 6259904216aa5153ce4017c4 |
class ServerErrorMockGovDelivery(MockGovDelivery): <NEW_LINE> <INDENT> def handle(self, method, *args, **kwargs): <NEW_LINE> <INDENT> response = super(ServerErrorMockGovDelivery, self).handle( method, *args, **kwargs ) <NEW_LINE> response.status_code = 500 <NEW_LINE> return response | Mock class for testing the GovDelivery API.
Behaves like MockGovDelivery but returns a failing response that contains
an HTTP status code of 500 | 625990428c3a8732951f7831 |
class TestQtProgressBar(QtTestAssistant, progress_bar.TestProgressBar): <NEW_LINE> <INDENT> def get_value(self, widget): <NEW_LINE> <INDENT> return widget.value() <NEW_LINE> <DEDENT> def get_minimum(self, widget): <NEW_LINE> <INDENT> return widget.minimum() <NEW_LINE> <DEDENT> def get_maximum(self, widget): <NEW_LINE> <INDENT> return widget.maximum() | QtProgressBar tests.
| 6259904271ff763f4b5e8a77 |
class NT_FE_CONSOLE_PROPS(object): <NEW_LINE> <INDENT> def __new__(cls): <NEW_LINE> <INDENT> raise Exception('This class just for typing, can not be instanced!') <NEW_LINE> <DEDENT> @property <NEW_LINE> def Signature(self)->'Any': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def CodePage(self)->'Any': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def Size(self)->'Any': <NEW_LINE> <INDENT> pass | Dictionary containing information for a NT_FE_CONSOLE_PROPS struct | 6259904230c21e258be99ade |
class FunctionNameExistsError(Error): <NEW_LINE> <INDENT> def __init__(self,message): <NEW_LINE> <INDENT> self.message = message | Raised when attempt made to create a function with a non-unique name | 6259904282261d6c52730830 |
class LiveQueryJob(BaseQueryJob): <NEW_LINE> <INDENT> def __init__(self, query_id, base_url, repository, user_token): <NEW_LINE> <INDENT> super().__init__(query_id, base_url, repository, user_token) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> headers = self._default_user_headers <NEW_LINE> endpoint = "dataspaces/{}/queryjobs/{}".format(self.repository, self.query_id) <NEW_LINE> self.webcaller.call_rest("delete", endpoint, headers) <NEW_LINE> <DEDENT> except HumioHTTPException: <NEW_LINE> <INDENT> pass | Manages a live queryjob | 6259904245492302aabfd7b3 |
class Ratings(models.Model): <NEW_LINE> <INDENT> article = models.ForeignKey(Article, on_delete=models.CASCADE) <NEW_LINE> """user_id of the person who rates the article has to be stored""" <NEW_LINE> user_id = models.IntegerField() <NEW_LINE> """this column takes the rating/score given by a user for an article.""" <NEW_LINE> rating = models.IntegerField() | This class enables authenticated users to rate articles on a scale of 1 to 5
and average ratings to be returned for every article. It also allows authenticated
users to re-rate articles. | 6259904273bcbd0ca4bcb563 |
class ILinkIntegrityNotificationException(Interface): <NEW_LINE> <INDENT> pass | an exception indicating a prevented link integrity breach | 6259904223e79379d538d7d6 |
class Array(Type): <NEW_LINE> <INDENT> def __init__(self, length, baseType): <NEW_LINE> <INDENT> self.length = length <NEW_LINE> self.baseType = baseType <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Array): <NEW_LINE> <INDENT> return self.length == other.length and self.baseType == other.baseType <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def encode(self): <NEW_LINE> <INDENT> return "[{0}{1}]".format(self.length, self.baseType.encode()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Array({0!r}, {1!r})".format(self.length, self.baseType) <NEW_LINE> <DEDENT> def sizeof(self, is64bit=False): <NEW_LINE> <INDENT> return self.baseType.sizeof(is64bit) * self.length <NEW_LINE> <DEDENT> def alignof(self, is64bit=False): <NEW_LINE> <INDENT> return self.baseType.alignof(is64bit) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.length, self.baseType)) | Fixed-length array type. An array is a contiguously allocated memory for a
single base type.
.. attribute:: length
Length of the array. It could be zero.
.. attribute:: baseType
The base :class:`Type` of the array.
| 62599042b57a9660fecd2d54 |
class EnvironmentList(collections.MutableSequence): <NEW_LINE> <INDENT> def __init__(self, environment): <NEW_LINE> <INDENT> env = os.getenv(environment) <NEW_LINE> self.env = [] if env is None else env.split(os.pathsep) <NEW_LINE> self.name = environment <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{}'.format(self.env) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{}({}({}))'.format(self.__class__.__name__, self.name, self.env) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.env) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self.env[item] <NEW_LINE> <DEDENT> def __setitem__(self, idx, item): <NEW_LINE> <INDENT> self.env[idx] = str(item) <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def __delitem__(self, item): <NEW_LINE> <INDENT> self.env.remove(str(item)) <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def insert(self, idx, value): <NEW_LINE> <INDENT> self.env.insert(idx, str(value)) <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def append(self, value): <NEW_LINE> <INDENT> if str(value) in self.env: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.env.append(str(value)) <NEW_LINE> <DEDENT> self.update() <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> os.environ[self.name] = os.pathsep.join(self.env) | Converts the system environment | 625990428a43f66fc4bf346a |
class PermissionAcceptedRequest(Request): <NEW_LINE> <INDENT> deserialized_types = { 'object_type': 'str', 'request_id': 'str', 'timestamp': 'datetime', 'locale': 'str', 'body': 'ask_sdk_model.events.skillevents.permission_body.PermissionBody', 'event_creation_time': 'datetime', 'event_publishing_time': 'datetime' } <NEW_LINE> attribute_map = { 'object_type': 'type', 'request_id': 'requestId', 'timestamp': 'timestamp', 'locale': 'locale', 'body': 'body', 'event_creation_time': 'eventCreationTime', 'event_publishing_time': 'eventPublishingTime' } <NEW_LINE> def __init__(self, request_id=None, timestamp=None, locale=None, body=None, event_creation_time=None, event_publishing_time=None): <NEW_LINE> <INDENT> self.__discriminator_value = "AlexaSkillEvent.SkillPermissionAccepted" <NEW_LINE> self.object_type = self.__discriminator_value <NEW_LINE> super(PermissionAcceptedRequest, self).__init__(object_type=self.__discriminator_value, request_id=request_id, timestamp=timestamp, locale=locale) <NEW_LINE> self.body = body <NEW_LINE> self.event_creation_time = event_creation_time <NEW_LINE> self.event_publishing_time = event_publishing_time <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PermissionAcceptedRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | :param request_id: Represents the unique identifier for the specific request.
:type request_id: (optional) str
:param timestamp: Provides the date and time when Alexa sent the request as an ISO 8601 formatted string. Used to verify the request when hosting your skill as a web service.
:type timestamp: (optional) datetime
:param locale: A string indicating the user’s locale. For example: en-US. This value is only provided with certain request types.
:type locale: (optional) str
:param body:
:type body: (optional) ask_sdk_model.events.skillevents.permission_body.PermissionBody
:param event_creation_time:
:type event_creation_time: (optional) datetime
:param event_publishing_time:
:type event_publishing_time: (optional) datetime | 62599042d164cc6175822250 |
@implementer(ICommandLineScript) <NEW_LINE> class CAScript(object): <NEW_LINE> <INDENT> def main(self, reactor, options): <NEW_LINE> <INDENT> if options.subCommand is not None: <NEW_LINE> <INDENT> return maybeDeferred(options.subOptions.run) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return options.opt_help() | Command-line script for ``flocker-ca``. | 6259904223e79379d538d7d7 |
class BidirectionalCallback(object): <NEW_LINE> <INDENT> swagger_types = { 'send_alert_actions': 'bool', 'bidirectional_callback_type': 'str' } <NEW_LINE> attribute_map = { 'send_alert_actions': 'sendAlertActions', 'bidirectional_callback_type': 'bidirectional-callback-type' } <NEW_LINE> def __init__(self, send_alert_actions=None, bidirectional_callback_type=None): <NEW_LINE> <INDENT> self._send_alert_actions = None <NEW_LINE> self._bidirectional_callback_type = None <NEW_LINE> self.discriminator = None <NEW_LINE> if send_alert_actions is not None: <NEW_LINE> <INDENT> self.send_alert_actions = send_alert_actions <NEW_LINE> <DEDENT> if bidirectional_callback_type is not None: <NEW_LINE> <INDENT> self.bidirectional_callback_type = bidirectional_callback_type <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def send_alert_actions(self): <NEW_LINE> <INDENT> return self._send_alert_actions <NEW_LINE> <DEDENT> @send_alert_actions.setter <NEW_LINE> def send_alert_actions(self, send_alert_actions): <NEW_LINE> <INDENT> self._send_alert_actions = send_alert_actions <NEW_LINE> <DEDENT> @property <NEW_LINE> def bidirectional_callback_type(self): <NEW_LINE> <INDENT> return self._bidirectional_callback_type <NEW_LINE> <DEDENT> @bidirectional_callback_type.setter <NEW_LINE> def bidirectional_callback_type(self, bidirectional_callback_type): <NEW_LINE> <INDENT> allowed_values = ["datadog-callback", "circonus-callback", "connect-wise-callback", "desk-callback", "es-watcher-callback", "freshdesk-callback", "hipchat-callback", "marid-callback", "logic-monitor-callback", "mattermost-callback", "slack-callback", "solarwinds-web-helpdesk-callback", "stackdriver-callback", "status-io-callback"] <NEW_LINE> if bidirectional_callback_type not in allowed_values: <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `bidirectional_callback_type` ({0}), must be one of {1}" .format(bidirectional_callback_type, allowed_values) ) <NEW_LINE> <DEDENT> self._bidirectional_callback_type = bidirectional_callback_type <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, BidirectionalCallback): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599042097d151d1a2c2341 |
class ApplicationGatewayBackendAddressPool(SubResource): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'}, 'backend_addresses': {'key': 'properties.backendAddresses', 'type': '[ApplicationGatewayBackendAddress]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> def __init__(self, id=None, backend_ip_configurations=None, backend_addresses=None, provisioning_state=None, name=None, etag=None, type=None): <NEW_LINE> <INDENT> super(ApplicationGatewayBackendAddressPool, self).__init__(id=id) <NEW_LINE> self.backend_ip_configurations = backend_ip_configurations <NEW_LINE> self.backend_addresses = backend_addresses <NEW_LINE> self.provisioning_state = provisioning_state <NEW_LINE> self.name = name <NEW_LINE> self.etag = etag <NEW_LINE> self.type = type | Backend Address Pool of an application gateway.
:param id: Resource ID.
:type id: str
:param backend_ip_configurations: Collection of references to IPs defined
in network interfaces.
:type backend_ip_configurations:
list[~azure.mgmt.network.v2017_06_01.models.NetworkInterfaceIPConfiguration]
:param backend_addresses: Backend addresses
:type backend_addresses:
list[~azure.mgmt.network.v2017_06_01.models.ApplicationGatewayBackendAddress]
:param provisioning_state: Provisioning state of the backend address pool
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Resource that is unique within a resource group. This name
can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
:param type: Type of the resource.
:type type: str | 62599042d4950a0f3b1117ad |
class CreateDemandCallback(object): <NEW_LINE> <INDENT> def __init__(self, locations): <NEW_LINE> <INDENT> self.matrix = locations <NEW_LINE> <DEDENT> def Demand(self, from_node, to_node): <NEW_LINE> <INDENT> return self.matrix[from_node][3] | Create callback to get demands at each location. | 62599042d99f1b3c44d06977 |
class PkgResourcesDirDiscoveryRegistry(ModuleAutoDiscoveryRegistry): <NEW_LINE> <INDENT> def _discover_module(self, pkg): <NEW_LINE> <INDENT> if resource_isdir(pkg, self.module_name): <NEW_LINE> <INDENT> for filename in resource_listdir(pkg, self.module_name): <NEW_LINE> <INDENT> self.register(os.path.join( os.path.dirname(import_string(pkg).__file__), self.module_name, filename) ) | Specialized ``ModuleAutoDiscoveryRegistry`` that will search a list of
Python packages in an ``ImportPathRegistry`` or ``ModuleRegistry`` for
a specific resource directory and register all files found in the
directories. By default the list of Python packages is read from the
``packages`` registry namespace. | 6259904250485f2cf55dc25e |
class ListGroupsForUserResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | A ResultSet with methods tailored to the values returned by the ListGroupsForUser Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution. | 62599042ec188e330fdf9b74 |
class Application: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def run(): <NEW_LINE> <INDENT> TestApp.run() <NEW_LINE> film_repo = FilmFileRepository("filme.txt") <NEW_LINE> client_repo = ClientFileRepository("clienti.txt") <NEW_LINE> rent_repo = RentFileRepository("rents.txt") <NEW_LINE> film_validator = FilmValidator() <NEW_LINE> client_validator = ClientValidator() <NEW_LINE> rent_validator = RentValidator() <NEW_LINE> film_ctr = FilmController(film_repo, film_validator) <NEW_LINE> client_ctr = ClientController(client_repo, client_validator) <NEW_LINE> rent_ctr = RentController(rent_repo, film_repo, client_repo, rent_validator) <NEW_LINE> console = Console(film_ctr, client_ctr, rent_ctr) <NEW_LINE> console.run() | Builder for the application | 6259904221bff66bcd723f44 |
class Solid(aocutils.brep.base.BaseObject): <NEW_LINE> <INDENT> def __init__(self, topods_solid): <NEW_LINE> <INDENT> if not isinstance(topods_solid, OCC.TopoDS.TopoDS_Solid): <NEW_LINE> <INDENT> msg = 'need a TopoDS_Solid, got a %s' % topods_solid.__class__ <NEW_LINE> logger.critical(msg) <NEW_LINE> raise aocutils.exceptions.WrongTopologicalType(msg) <NEW_LINE> <DEDENT> assert not topods_solid.IsNull() <NEW_LINE> aocutils.brep.base.BaseObject.__init__(self, topods_solid, 'solid') <NEW_LINE> <DEDENT> @property <NEW_LINE> def topods_solid(self): <NEW_LINE> <INDENT> return self._wrapped_instance <NEW_LINE> <DEDENT> def shells(self): <NEW_LINE> <INDENT> return (aocutils.brep.shell.Shell(sh) for sh in aocutils.topology.Topo(self._wrapped_instance)) | Solid class
Parameters
----------
topods_solid : OCC.TopoDS.TopoDS_Solid | 62599042d6c5a102081e33ff |
class LoggingTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_init(self): <NEW_LINE> <INDENT> d = RunInTempDir() <NEW_LINE> dfs = _DelayFileStream('foo') <NEW_LINE> self.assertEqual(dfs.filename, os.path.join(d.tmpdir, 'foo')) <NEW_LINE> self.assertEqual(dfs.stream, None) <NEW_LINE> self.assertEqual(os.path.exists('foo'), False) <NEW_LINE> dfs.flush() <NEW_LINE> self.assertEqual(os.path.exists('foo'), False) <NEW_LINE> print >> dfs, "test text" <NEW_LINE> dfs.flush() <NEW_LINE> self.assertEqual(os.path.exists('foo'), True) <NEW_LINE> contents = open('foo').read() <NEW_LINE> self.assertEqual(contents, 'test text\n') | Check job logging | 62599042c432627299fa426e |
class _NotFound(HTTPError): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> status = '404 Not Found' <NEW_LINE> headers = {'Content-Type': 'text/html'} <NEW_LINE> HTTPError.__init__(self, status, headers, _Pretty.handle404()) | `404 Not Found` error. | 6259904207d97122c4217f79 |
class TaskQueue(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.prio_task_cnt = [0] * 10 <NEW_LINE> self.prio_task_list = [[]] * 10 <NEW_LINE> self.total_task_cnt = 0 <NEW_LINE> <DEDENT> def normalize_priority(self, score): <NEW_LINE> <INDENT> return int(score * 1000) % 10 <NEW_LINE> <DEDENT> def en_queue(self, task): <NEW_LINE> <INDENT> task.time_enqueue = time.time() <NEW_LINE> pri = self.normalize_priority(task.score) <NEW_LINE> self.prio_task_list[pri].append(task) <NEW_LINE> self.prio_task_cnt[pri] += 1 <NEW_LINE> self.total_task_cnt += 1 <NEW_LINE> <DEDENT> def de_queue(self): <NEW_LINE> <INDENT> for pri in range(9, -1, -1): <NEW_LINE> <INDENT> if self.prio_task_cnt[pri] != 0: <NEW_LINE> <INDENT> self.prio_task_cnt[pri] -= 1 <NEW_LINE> self.total_task_cnt -= 1 <NEW_LINE> page = self.prio_task_list[pri].pop(0) <NEW_LINE> page.time_dequeue = time.time() <NEW_LINE> page.queue_duration = page.time_dequeue - page.time_enqueue <NEW_LINE> return page <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> self.prio_task_cnt = [0] * 10 <NEW_LINE> self.prio_task_list = [[]] * 10 | global crawler task queue shared by worker and page crawler
Input: page crawler write tasks into the queue
Output: worker fetches the task and assigns to page crawler | 625990424e696a045264e78e |
class ChangeBrotliSettingRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(ChangeBrotliSettingRequest, self).__init__( '/zones/{zone_identifier}/settings$$brotli', 'PATCH', header, version) <NEW_LINE> self.parameters = parameters | 当请求资产的客户端支持brotli压缩算法时,星盾将提供资产的brotli压缩版本。 | 6259904282261d6c52730831 |
class PacketReceiver(StateController): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._recvd_count = None <NEW_LINE> StateController.__init__(self) <NEW_LINE> <DEDENT> def get_received_pps(self): <NEW_LINE> <INDENT> return self._recvd_count / self.get_running_time() | Base class for any packet receivers. | 6259904245492302aabfd7b5 |
class FrameworkFactory(object): <NEW_LINE> <INDENT> __singleton = None <NEW_LINE> @classmethod <NEW_LINE> def get_framework(cls, properties=None): <NEW_LINE> <INDENT> if cls.__singleton is None: <NEW_LINE> <INDENT> cls.__singleton = Framework(properties) <NEW_LINE> <DEDENT> return cls.__singleton <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_framework_running(cls, framework=None): <NEW_LINE> <INDENT> if framework is None: <NEW_LINE> <INDENT> return cls.__singleton is not None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return cls.__singleton == framework <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def delete_framework(cls, framework): <NEW_LINE> <INDENT> if cls.__singleton is framework: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> framework.stop() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> _logger.exception("Error stopping the framework") <NEW_LINE> <DEDENT> bundles = framework.get_bundles() <NEW_LINE> for bundle in bundles: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> bundle.uninstall() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> _logger.exception("Error uninstalling bundle %s", bundle.get_symbolic_name()) <NEW_LINE> <DEDENT> <DEDENT> framework._dispatcher.clear() <NEW_LINE> cls.__singleton = None <NEW_LINE> return True <NEW_LINE> <DEDENT> return False | A framework factory | 6259904273bcbd0ca4bcb565 |
class FoodEaten(UniverseEvent): <NEW_LINE> <INDENT> def __init__(self, food_pos): <NEW_LINE> <INDENT> self.food_pos = food_pos <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'FoodEaten(%s)' % repr(self.food_pos) | Signifies that food has been eaten.
Parameters
----------
food_pos : tuple of (int, int)
position of the eaten food | 6259904223e79379d538d7d8 |
class HiveQueryTask(BaseHadoopJobTask): <NEW_LINE> <INDENT> def query(self): <NEW_LINE> <INDENT> raise RuntimeError("Must implement query!") <NEW_LINE> <DEDENT> def hiverc(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def job_runner(self): <NEW_LINE> <INDENT> return HiveQueryRunner() | Task to run a hive query
| 625990428a43f66fc4bf346c |
class ExecutorRequirement(PyEnum): <NEW_LINE> <INDENT> RECONSTRUCTABLE_PIPELINE = "RECONSTRUCTABLE_PIPELINE" <NEW_LINE> RECONSTRUCTABLE_JOB = "RECONSTRUCTABLE_PIPELINE" <NEW_LINE> NON_EPHEMERAL_INSTANCE = "NON_EPHEMERAL_INSTANCE" <NEW_LINE> PERSISTENT_OUTPUTS = "PERSISTENT_OUTPUTS" | An ExecutorDefinition can include a list of requirements that the system uses to
check whether the executor will be able to work for a particular job/pipeline execution. | 6259904226068e7796d4dc20 |
class Merge(AFNICommand): <NEW_LINE> <INDENT> _cmd = '3dmerge' <NEW_LINE> input_spec = MergeInputSpec <NEW_LINE> output_spec = AFNICommandOutputSpec | Merge or edit volumes using AFNI 3dmerge command
For complete details, see the `3dmerge Documentation.
<https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dmerge.html>`_
Examples
========
>>> from nipype.interfaces import afni
>>> merge = afni.Merge()
>>> merge.inputs.in_files = ['functional.nii', 'functional2.nii']
>>> merge.inputs.blurfwhm = 4
>>> merge.inputs.doall = True
>>> merge.inputs.out_file = 'e7.nii'
>>> merge.cmdline # doctest: +IGNORE_UNICODE
'3dmerge -1blur_fwhm 4 -doall -prefix e7.nii functional.nii functional2.nii'
>>> res = merge.run() # doctest: +SKIP | 6259904223849d37ff852395 |
class Migration(migrations.Migration): <NEW_LINE> <INDENT> dependencies = [ ('djangae_contenttypes', '0001_patch_contenttypes_migrations'), ('contenttypes', '0001_initial'), ] <NEW_LINE> operations = [ AlterFieldInOtherApp( app_label='contenttypes', model_name='contenttype', name='id', field=models.BigIntegerField(auto_created=True, blank=True, primary_key=True, serialize=False, verbose_name='ID'), ), ] | Migration that changes the `id` field in the DJANGO ContentType app, so that foreign keys
which point to it will allow 64 bit ints. This then allows those foreign keys to work
with the IDs returned by our SimulatedContentTypeManager. | 625990426fece00bbacccc8d |
class TestWorker(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def run_test(container_id, value): <NEW_LINE> <INDENT> cmd = ['docker exec -it {0} echo "I am container {0}!, this is message: {1}"' .format(container_id, value)] <NEW_LINE> process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) <NEW_LINE> for line in process.stdout: <NEW_LINE> <INDENT> print(line.decode('utf-8')[:-2]) <NEW_LINE> <DEDENT> process.wait() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def container(container_id): <NEW_LINE> <INDENT> while not TEST_QUEUE.empty(): <NEW_LINE> <INDENT> LOCK.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> value = TEST_QUEUE.get(block=False) <NEW_LINE> time.sleep(0.5) <NEW_LINE> <DEDENT> except queue.Empty: <NEW_LINE> <INDENT> print("Queue empty ):") <NEW_LINE> return <NEW_LINE> <DEDENT> print("\nProcessing: {0}\n".format(value)) <NEW_LINE> LOCK.release() <NEW_LINE> TestWorker.run_test(container_id, value) | This Class is executed by each container | 62599042d6c5a102081e3401 |
class Architecture(object): <NEW_LINE> <INDENT> command_base = 'architecture' | Manipulates Foreman's architecture. | 62599042004d5f362081f953 |
class FixedIntervalLoopingCall(LoopingCallBase): <NEW_LINE> <INDENT> def start(self, interval, initial_delay=None): <NEW_LINE> <INDENT> self._running = True <NEW_LINE> done = event.Event() <NEW_LINE> def _inner(): <NEW_LINE> <INDENT> if initial_delay: <NEW_LINE> <INDENT> greenthread.sleep(initial_delay) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> while self._running: <NEW_LINE> <INDENT> start = _ts() <NEW_LINE> self.f(*self.args, **self.kw) <NEW_LINE> end = _ts() <NEW_LINE> if not self._running: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> delay = end - start - interval <NEW_LINE> if delay > 0: <NEW_LINE> <INDENT> LOG.warning(_LW('task %(func_name)r run outlasted ' 'interval by %(delay).2f sec'), {'func_name': self.f, 'delay': delay}) <NEW_LINE> <DEDENT> greenthread.sleep(-delay if delay < 0 else 0) <NEW_LINE> <DEDENT> <DEDENT> except LoopingCallDone as e: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> done.send(e.retvalue) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> LOG.exception(_LE('in fixed duration looping call')) <NEW_LINE> done.send_exception(*sys.exc_info()) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> done.send(True) <NEW_LINE> <DEDENT> <DEDENT> self.done = done <NEW_LINE> greenthread.spawn_n(_inner) <NEW_LINE> return self.done | A fixed interval looping call. | 625990426e29344779b0192f |
class NeuralNetwork: <NEW_LINE> <INDENT> def __init__(self, __input_nodes, __hidden_nodes, __output_nodes, __learning_rate): <NEW_LINE> <INDENT> self.i_nodes = __input_nodes <NEW_LINE> self.h_nodes = __hidden_nodes <NEW_LINE> self.o_nodes = __output_nodes <NEW_LINE> self.lr = __learning_rate <NEW_LINE> self.w_ih = numpy.random.normal(0.0, pow(self.h_nodes, -0.5), (self.h_nodes, self.i_nodes)) <NEW_LINE> self.w_ho = numpy.random.normal(0.0, pow(self.o_nodes, -0.5), (self.o_nodes, self.h_nodes)) <NEW_LINE> self.activation_function = lambda x: scipy.special.expit(x) <NEW_LINE> <DEDENT> def query(self, inputs_list): <NEW_LINE> <INDENT> __inputs = numpy.array(inputs_list, ndmin=2).T <NEW_LINE> hidden_inputs = numpy.dot(self.w_ih, __inputs) <NEW_LINE> hidden_outputs = self.activation_function(hidden_inputs) <NEW_LINE> final_inputs = numpy.dot(self.w_ho, hidden_outputs) <NEW_LINE> final_outputs = self.activation_function(final_inputs) <NEW_LINE> return [__inputs, hidden_outputs, final_outputs] <NEW_LINE> <DEDENT> def train(self, inputs_list, targets_list): <NEW_LINE> <INDENT> signals = self.query(inputs_list) <NEW_LINE> __inputs, hidden_outputs, final_outputs = signals <NEW_LINE> __targets = numpy.array(targets_list, ndmin=2).T <NEW_LINE> output_errors = __targets - final_outputs <NEW_LINE> hidden_errors = numpy.dot(self.w_ho.T, output_errors) <NEW_LINE> self.w_ho += self.lr * numpy.dot(output_errors * final_outputs * (1 - output_errors), numpy.transpose(hidden_outputs)) <NEW_LINE> self.w_ih += self.lr * numpy.dot(hidden_errors * hidden_outputs * (1 - hidden_outputs), numpy.transpose(__inputs)) | 人工神经网络 | 625990420fa83653e46f61b6 |
class NamsTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_first_last_name(self): <NEW_LINE> <INDENT> formatted_name = get_formatted_name('janis', 'joplin') <NEW_LINE> self.assertEqual(formatted_name, 'Janis Joplin') <NEW_LINE> <DEDENT> def test_first_last_middle_name(self): <NEW_LINE> <INDENT> formatted_name = get_formatted_name('janis', 'joplin', 'lourla') <NEW_LINE> self.assertEqual(formatted_name, 'Janis Lourla Joplin') <NEW_LINE> <DEDENT> def test_first_last_name_failour(self): <NEW_LINE> <INDENT> formatted_name = get_formatted_name('janis', 'joplin', 'lourla') <NEW_LINE> self.assertNotEqual(formatted_name, 'Janis lourla Joplin') | 测试name_function.py | 6259904273bcbd0ca4bcb567 |
class AuthMetadataPluginCallback(six.with_metaclass(abc.ABCMeta)): <NEW_LINE> <INDENT> def __call__(self, metadata, error): <NEW_LINE> <INDENT> raise NotImplementedError() | Callback object received by a metadata plugin. | 62599042711fe17d825e160b |
class RelaxAndPhononWork(Work): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_scf_input(cls, scf_input, volumes, ngqpt, with_becs, optcell, ionmov, edos_ngkpt=None): <NEW_LINE> <INDENT> work = cls() <NEW_LINE> work.initial_scf_input = scf_input <NEW_LINE> work.ngqpt = np.reshape(ngqpt, (3,)) <NEW_LINE> work.edos_ngkpt = edos_ngkpt if edos_ngkpt is None else np.reshape(edos_ngkpt, (3,)) <NEW_LINE> work.volumes = np.array(volumes) <NEW_LINE> work.with_becs = with_becs <NEW_LINE> relax_template = scf_input.deepcopy() <NEW_LINE> relax_template.pop_tolerances() <NEW_LINE> relax_template.set_vars(tolvrs=1e-10, toldff=1.e-6, optcell=optcell, ionmov=ionmov) <NEW_LINE> if optcell is not None and optcell != 0: <NEW_LINE> <INDENT> relax_template.set_vars_ifnotin(ecutsm=0.5, dilatmx=1.05) <NEW_LINE> <DEDENT> work.relax_tasks_vol = [] <NEW_LINE> for new_volume in work.volumes: <NEW_LINE> <INDENT> new_structure = relax_template.structure.scale_lattice(new_volume) <NEW_LINE> new_input = relax_template.new_with_structure(new_structure) <NEW_LINE> task = work.register_relax_task(new_input) <NEW_LINE> work.relax_tasks_vol.append(task) <NEW_LINE> <DEDENT> work.ph_works_vol = len(volumes) * [None] <NEW_LINE> work.edos_work = None <NEW_LINE> return work <NEW_LINE> <DEDENT> def on_all_ok(self): <NEW_LINE> <INDENT> if self.edos_ngkpt is not None: self.edos_work = Work() <NEW_LINE> for ivol, task in enumerate(self.relax_tasks_vol): <NEW_LINE> <INDENT> relaxed_structure = task.get_final_structure() <NEW_LINE> scf_input = self.initial_scf_input.new_with_structure(relaxed_structure) <NEW_LINE> ph_work = PhononWork.from_scf_input(scf_input, self.ngqpt, is_ngqpt=True, tolerance=None, with_becs=self.with_becs, ddk_tolerance=None) <NEW_LINE> self.ph_works_vol[ivol] = ph_work <NEW_LINE> if self.edos_ngkpt is not None: <NEW_LINE> <INDENT> edos_input = scf_input.make_edos_input(self.edos_ngkpt) <NEW_LINE> self.edos_work.register_nscf_task(edos_input, deps={ph_work[0]: "DEN"}) <NEW_LINE> <DEDENT> self.flow.register_work(ph_work) <NEW_LINE> <DEDENT> if self.edos_ngkpt is not None: self.flow.register_work(self.edos_work) <NEW_LINE> self.flow.allocate() <NEW_LINE> self.flow.build_and_pickle_dump() <NEW_LINE> return super().on_all_ok() | This work performs a structural relaxation for different volumes, then it uses
the relaxed structures to compute phonons, BECS and the dielectric tensor with DFPT.
.. rubric:: Inheritance Diagram
.. inheritance-diagram:: RelaxAndPhononWork | 6259904223849d37ff852397 |
class EnumLogLevel(object): <NEW_LINE> <INDENT> DEBUG = 10 <NEW_LINE> INFO = 20 <NEW_LINE> Environment = 21 <NEW_LINE> ENVIRONMENT = Environment <NEW_LINE> RESOURCE = 22 <NEW_LINE> WARNING = 30 <NEW_LINE> ERROR = 40 <NEW_LINE> ASSERT = 41 <NEW_LINE> CRITICAL = 60 <NEW_LINE> APPCRASH = 61 <NEW_LINE> TESTTIMEOUT = 62 <NEW_LINE> RESNOTREADY = 69 | 日志级别
| 6259904226068e7796d4dc22 |
class FeedView(View): <NEW_LINE> <INDENT> def dispatch(self, request, city_name, rubric_name=None, filter_name='last'): <NEW_LINE> <INDENT> rubric = None <NEW_LINE> if rubric_name: <NEW_LINE> <INDENT> rubric = get_object_or_404(ArticleRubric.objects.select_related('donc_data'), name=rubric_name, city__name=city_name) <NEW_LINE> <DEDENT> context = { 'rubric': rubric, 'group_filter': filter_name, 'adplate': AdStartPlate(city_name).get_context() } <NEW_LINE> return render(request, 'gorod/feed.html', context) | Show list of articles for the certain city and rubric. | 62599042d53ae8145f91973a |
class SetGraderRedirectView(LoginRequiredMixin, generic.RedirectView): <NEW_LINE> <INDENT> pattern_name = "submissions:grading" <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> SetGraderFormset = modelformset_factory(Feedback, form=forms.SetGraderMeForm) <NEW_LINE> formset = SetGraderFormset(request.POST) <NEW_LINE> if formset.is_valid(): <NEW_LINE> <INDENT> for form in formset.forms: <NEW_LINE> <INDENT> if form.cleaned_data.get("check_this"): <NEW_LINE> <INDENT> feedback_obj = form.save(commit=False) <NEW_LINE> feedback_obj.grader = request.user <NEW_LINE> feedback_obj.save() <NEW_LINE> <DEDENT> <DEDENT> messages.success(request, "Submissions added to my grading list") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, "Invalid form") <NEW_LINE> <DEDENT> return self.get(request, *args, **kwargs) | Lisätään palautus käyttäjän tarkastuslistalle. | 62599042a79ad1619776b35c |
class ReferencePath(nx.DiGraph): <NEW_LINE> <INDENT> def __init__(self, root): <NEW_LINE> <INDENT> nx.DiGraph.__init__(self) <NEW_LINE> self.root = root <NEW_LINE> self.add_node(root) <NEW_LINE> <DEDENT> def __print_node(self, node): <NEW_LINE> <INDENT> string = '{}'.format(node) <NEW_LINE> suc = self.successors(node) <NEW_LINE> if len(suc) > 0: <NEW_LINE> <INDENT> string += '[{}]->'.format(self.get_edge_data(node, suc[0])['label']) <NEW_LINE> return string + self.__print_node(suc[0]) <NEW_LINE> <DEDENT> return string <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__print_node(self.root) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__() <NEW_LINE> <DEDENT> def __equal_shape(self, node1, node2): <NEW_LINE> <INDENT> if node1.size != node2.size: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for k in node1.pointers.keys(): <NEW_LINE> <INDENT> if node2.pointers.get(k, None) == None: <NEW_LINE> <INDENT> word = struct.unpack('<I', node2.data[k:k + _WORD_SZ_])[0] <NEW_LINE> if word != 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def normalize(self): <NEW_LINE> <INDENT> sig = nx.DiGraph() <NEW_LINE> out_node = self.root <NEW_LINE> in_node = self.successors(out_node) <NEW_LINE> out_name = '{}({})'.format(out_node.name, out_node.size) <NEW_LINE> sig.add_node(out_name) <NEW_LINE> gen = services.get_all_the_letters() <NEW_LINE> while len(in_node) > 0: <NEW_LINE> <INDENT> if not isinstance(in_node[0], DataStructure): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> offset = self.get_edge_data(out_node, in_node[0])['label'] <NEW_LINE> if self.__equal_shape(out_node, in_node[0]): <NEW_LINE> <INDENT> sig.add_edge(out_name, out_name, label=offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> in_name = '{}({})'.format(next(gen), in_node[0].size) <NEW_LINE> sig.add_edge(out_name, in_name, label=offset) <NEW_LINE> out_name = in_name <NEW_LINE> <DEDENT> out_node = in_node[0] <NEW_LINE> in_node = self.successors(out_node) <NEW_LINE> <DEDENT> return sig | Represents a reference path in memory. | 6259904221a7993f00c67245 |
class TestBasics(SummerTestCase): <NEW_LINE> <INDENT> def test_runthru(self): <NEW_LINE> <INDENT> self.s.readline("hello 10 nums or 20 not.\n") <NEW_LINE> self.s.readline("again 2 nums but not -54.345e") <NEW_LINE> <DEDENT> def test_basic_summing(self): <NEW_LINE> <INDENT> self.ensure_input("<@sum = 0>") <NEW_LINE> self.s.readline("i've got 99 problems") <NEW_LINE> self.s.readline("but summer ain't 1 of them.") <NEW_LINE> self.ensure_input( "<@sum = 100>--") <NEW_LINE> self.s.readline("reduce -20") <NEW_LINE> self.ensure_input( "<@sum = 80>--") <NEW_LINE> <DEDENT> def test_accumulation(self): <NEW_LINE> <INDENT> self.s.readline("-- 120") <NEW_LINE> self.s.readline("0.5 --") <NEW_LINE> self.s.readline(";-- 120") <NEW_LINE> self.s.readline(";160") <NEW_LINE> self.s.readline("150;") <NEW_LINE> self.ensure_input("<@sum = 150>") <NEW_LINE> <DEDENT> def test_arithmetics(self): <NEW_LINE> <INDENT> self.ensure_input("<10/5 = 2>") <NEW_LINE> self.ensure_input("<123+7 = 130>") <NEW_LINE> self.ensure_input("<9*4 = 36>") <NEW_LINE> self.ensure_input("<(100/3)*9 = 300>") <NEW_LINE> self.ensure_input("<1+1+1+1+1+1 = 6>") <NEW_LINE> <DEDENT> def test_expr_updating(self): <NEW_LINE> <INDENT> self.assert_input("<12/3 =>", "<12/3 = 4>") <NEW_LINE> self.assert_input("<12/3 = >", "<12/3 = 4>") <NEW_LINE> self.assert_input("<12/3 = aybabtu>", "<12/3 = 4>") <NEW_LINE> self.assert_input("<12/3 = 5>", "<12/3 = 4>") <NEW_LINE> self.assert_input("<1004-4+10 = 4443>", "<1004-4+10 = 1010>") | test the basic functionality: evaluating calculations and summings. | 625990426fece00bbacccc8f |
class DataSourceBitStampFile(DataSourceBitStampBase, DataSourceCSVReader): <NEW_LINE> <INDENT> def __init__(self, broker, filename='assets/bitstampUSD.csv.gz', start=None, stop=None): <NEW_LINE> <INDENT> super(DataSourceBitStampFile,self).__init__(broker) <NEW_LINE> self._ts_start = dt2epock(date_parse(start)) if start is not None else None <NEW_LINE> self._ts_stop = dt2epock(date_parse(stop)) if stop is not None else None <NEW_LINE> self._csv = self._openCsv(filename) <NEW_LINE> self._fake_trade_id = 0 <NEW_LINE> self._last_exec_ts = None <NEW_LINE> self._fake_msec = None <NEW_LINE> if self._ts_start is None: <NEW_LINE> <INDENT> row = next(self._csv) <NEW_LINE> self._ts_start = self._makeTradeFromRow(row).ts_update <NEW_LINE> self._fp.seek(0) <NEW_LINE> <DEDENT> <DEDENT> def getStartTs(self): <NEW_LINE> <INDENT> return self._ts_start <NEW_LINE> <DEDENT> def _makeTradeFromRow(self, row): <NEW_LINE> <INDENT> ts_exec = int(row[0]) <NEW_LINE> if self._last_exec_ts != ts_exec: <NEW_LINE> <INDENT> self._fake_msec = 0 <NEW_LINE> self._last_exec_ts = ts_exec <NEW_LINE> <DEDENT> self._fake_trade_id += 1 <NEW_LINE> self._fake_msec += 0.0000001 <NEW_LINE> return Trade( exchange_id = self.EXCHANGE_ID, price = round_dec_down_to_n_places(D(row[1]), 2), volume = round_dec_down_to_n_places(D(row[2]), 8), ts_exec = ts_exec, ts_update = ts_exec + self._fake_msec, trade_id = self._fake_trade_id ) <NEW_LINE> <DEDENT> def fetch(self, ts_start=None, ts_end=None): <NEW_LINE> <INDENT> self._fp.seek(0) <NEW_LINE> for row in self._csv: <NEW_LINE> <INDENT> ts_exec = int(row[0]) <NEW_LINE> if self._ts_start and ts_exec < self._ts_start: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if self._ts_stop and self._ts_exec > ts_stop: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> yield self._makeTradeFromRow(row) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run(self, limit=None): <NEW_LINE> <INDENT> n_published = 0 <NEW_LINE> for row in self._csv: <NEW_LINE> <INDENT> ts_exec = int(row[0]) <NEW_LINE> if ts_exec < self._ts_start: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if self._ts_stop is not None and ts_exec > self._ts_stop: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> t = self._makeTradeFromRow(row) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.broker.publish(self.channel, t) <NEW_LINE> n_published += 1 <NEW_LINE> if limit is not None and n_published >= limit: <NEW_LINE> <INDENT> break | Ingest data from CSV files of BitStamp trades. These come from
http://api.bitcoincharts.com/v1/csv/ | 62599042287bf620b6272ec4 |
class TestDataset(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> dirs = [join(basedir, d) for d in dataset_dirs] <NEW_LINE> dir_tbts = filter(lambda x: exists(x[0]) and exists(join(x[0], x[1])), zip(dirs, tbt_files)) <NEW_LINE> cls.datasets = [CSVDataset(d, tbt) for d, tbt in dir_tbts] <NEW_LINE> cls.datasets += [CSVDataset('tests/test_dataset', 'disengagement_results.csv')] <NEW_LINE> <DEDENT> def test_dataset(self): <NEW_LINE> <INDENT> for ds in self.datasets: <NEW_LINE> <INDENT> print(ds.directory) <NEW_LINE> print(ds.tbt.data.dtype) <NEW_LINE> print(ds.list_gazedatas()) <NEW_LINE> self.assertEqual(len(ds.list_gazedatas()), 1) <NEW_LINE> <DEDENT> <DEDENT> def test_iterate_trials(self): <NEW_LINE> <INDENT> from icllib.dataset import TrialIterator <NEW_LINE> for ds in self.datasets: <NEW_LINE> <INDENT> ti = TrialIterator(ds) <NEW_LINE> for t in ti: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @unittest.skip("not really a test") <NEW_LINE> def test_common_names(self): <NEW_LINE> <INDENT> list_of_names = [set(ds.tbt.names) for ds in self.datasets] <NEW_LINE> common_names = set.intersection(*list_of_names) <NEW_LINE> print(common_names) <NEW_LINE> for ds in self.datasets: <NEW_LINE> <INDENT> print("Unique names in %s: %s" % (ds.directory, set(ds.tbt.names) - common_names)) <NEW_LINE> print("Stimulus names :%s" % np.unique(ds.tbt.data['stimulus'])) <NEW_LINE> print("Userdefined names :%s" % np.unique(ds.tbt.data['stimulus'])) <NEW_LINE> <DEDENT> list_of_gznames = [set(ds.get_gazedata(ds.list_gazedatas()[0]).data.dtype.names) for ds in self.datasets] <NEW_LINE> common_gznames = set.intersection(*list_of_gznames) <NEW_LINE> print(common_gznames) <NEW_LINE> for gzn, datasetname in zip(list_of_gznames, dataset_dirs): <NEW_LINE> <INDENT> print("Unique gazedata header names in %s: %s" % (datasetname, gzn - common_gznames)) | Unit test for Dataset module. | 62599042d10714528d69effb |
class iterqueue(object): <NEW_LINE> <INDENT> def __init__(self, queue, maxcount=None): <NEW_LINE> <INDENT> self.queue = queue <NEW_LINE> self.maxcount = maxcount <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> while (self.maxcount is None) or (self.count < self.maxcount): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> yield self.queue.get_nowait() <NEW_LINE> self.count += 1 <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> break | Iterate a queue til a maximum number of messages are read or the queue is empty
it exposes an attribute "count" with the number of messages read
>>> from Queue import Queue
>>> q = Queue()
>>> for x in xrange(10):
... q.put(x)
>>> qiter = iterqueue(q)
>>> list(qiter)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> qiter.count
10
>>> for x in xrange(10):
... q.put(x)
>>> qiter = iterqueue(q, maxcount=4)
>>> list(qiter)
[0, 1, 2, 3]
>>> qiter.count
4 | 6259904266673b3332c316d7 |
class FileDTO(Model): <NEW_LINE> <INDENT> _validation = { 'file_name': {'required': True, 'max_length': 200, 'min_length': 1}, 'file_uri': {'required': True}, } <NEW_LINE> _attribute_map = { 'file_name': {'key': 'fileName', 'type': 'str'}, 'file_uri': {'key': 'fileUri', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(FileDTO, self).__init__(**kwargs) <NEW_LINE> self.file_name = kwargs.get('file_name', None) <NEW_LINE> self.file_uri = kwargs.get('file_uri', None) | DTO to hold details of uploaded files.
All required parameters must be populated in order to send to Azure.
:param file_name: Required. File name. Supported file types are ".tsv",
".pdf", ".txt", ".docx", ".xlsx".
:type file_name: str
:param file_uri: Required. Public URI of the file.
:type file_uri: str | 6259904215baa72349463270 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.