code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Monedero(models.Model): <NEW_LINE> <INDENT> nombre = models.CharField(max_length=50) <NEW_LINE> usuario = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> moneda = models.ForeignKey(Moneda, on_delete=models.CASCADE) <NEW_LINE> monto = models.DecimalField(max_digits=11, decimal_places=2, default=0) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Monedero' <NEW_LINE> verbose_name_plural = 'Monederos' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.nombre | Model definition for Monedero. | 6259905532920d7e50bc758b |
class observe(object): <NEW_LINE> <INDENT> def __init__(self, *evtnames, **kwargs): <NEW_LINE> <INDENT> self.evtnames = evtnames <NEW_LINE> if "extname" in kwargs: <NEW_LINE> <INDENT> self.extname = kwargs["extname"] <NEW_LINE> <DEDENT> <DEDENT> def __guess_extension_name(self, modname): <NEW_LINE> <INDENT> if modname.startswith('modoboa.extensions'): <NEW_LINE> <INDENT> m = re.match(r'modoboa\.extensions\.([^\.]+)', modname) <NEW_LINE> if m: <NEW_LINE> <INDENT> return m.group(1) <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def __call__(self, f): <NEW_LINE> <INDENT> modname = inspect.getmodule(inspect.stack()[1][0]).__name__ <NEW_LINE> extname = self.extname if hasattr(self, "extname") else self.__guess_extension_name(modname) <NEW_LINE> @wraps(f) <NEW_LINE> def wrapped_f(*args, **kwargs): <NEW_LINE> <INDENT> if extname: <NEW_LINE> <INDENT> from modoboa.core.models import Extension <NEW_LINE> from modoboa.core.extensions import exts_pool <NEW_LINE> try: <NEW_LINE> <INDENT> ext = Extension.objects.get(name=extname) <NEW_LINE> <DEDENT> except Extension.DoesNotExist: <NEW_LINE> <INDENT> extdef = exts_pool.get_extension(extname) <NEW_LINE> if not extdef.always_active: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not ext.enabled: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif not modname in settings.MODOBOA_APPS: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return f(*args, **kwargs) <NEW_LINE> <DEDENT> for evt in self.evtnames: <NEW_LINE> <INDENT> register(evt, wrapped_f) <NEW_LINE> <DEDENT> return wrapped_f | Event observing decorator
Automatically register the decorated function to observe the given
event. If the decorated function is located into an extension, we
check before each call if the extension is enabled or not. If
that's not the case, the callback is not called.
.. note::
That's not a really optimized behaviour but I haven't found
another solution to achieve that feature.
:param evtname: the event's name | 62599055b57a9660fecd2fb6 |
class CreateCampaignRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.License = None <NEW_LINE> self.SendTime = None <NEW_LINE> self.Name = None <NEW_LINE> self.Strategies = None <NEW_LINE> self.TemplateId = None <NEW_LINE> self.CrowdID = None <NEW_LINE> self.SmsType = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.License = params.get("License") <NEW_LINE> self.SendTime = params.get("SendTime") <NEW_LINE> self.Name = params.get("Name") <NEW_LINE> if params.get("Strategies") is not None: <NEW_LINE> <INDENT> self.Strategies = [] <NEW_LINE> for item in params.get("Strategies"): <NEW_LINE> <INDENT> obj = PaasStrategy() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Strategies.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.TemplateId = params.get("TemplateId") <NEW_LINE> self.CrowdID = params.get("CrowdID") <NEW_LINE> self.SmsType = params.get("SmsType") | CreateCampaign请求参数结构体
| 625990557cff6e4e811b6f7c |
class Bot(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> async def on_turn(self, context: TurnContext): <NEW_LINE> <INDENT> raise NotImplementedError() | Represents a bot that can operate on incoming activities. | 62599055b830903b9686ef1b |
class SMPEntry: <NEW_LINE> <INDENT> def __init__(self, documentidentifier, peppolmember_id): <NEW_LINE> <INDENT> self.id = None <NEW_LINE> self.documentidentifier = documentidentifier <NEW_LINE> self.certificate_not_before = None <NEW_LINE> self.certificate_not_after = None <NEW_LINE> self.endpointurl = None <NEW_LINE> self.peppolmember_id = peppolmember_id <NEW_LINE> self.firstseen = datetime.now() <NEW_LINE> self.lastseen = None <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> sql = 'insert into smpentries(documentidentifier, certificate_not_before, certificate_not_after, ' <NEW_LINE> sql += 'endpointurl, peppolmember_id, first_seen, last_seen) values (?,?,?,?,?,?,?)' <NEW_LINE> smpchecker.query_db(sql, [self.documentidentifier, self.certificate_not_before, self.certificate_not_after, self.endpointurl, self.peppolmember_id, self.firstseen, self.lastseen]) <NEW_LINE> self.load(self.documentidentifier, self.peppolmember_id) <NEW_LINE> <DEDENT> def reload(self): <NEW_LINE> <INDENT> self.load(self.documentidentifier, self.peppolmember_id) <NEW_LINE> <DEDENT> def load(self, documentidentifier, peppolmember_id): <NEW_LINE> <INDENT> sql = 'select id, documentidentifier, certificate_not_before, certificate_not_after,' <NEW_LINE> sql += 'endpointurl, peppolmember_id, first_seen, last_seen from smpentries where documentidentifier=? ' <NEW_LINE> sql += 'and peppolmember_id=?' <NEW_LINE> rows = smpchecker.query_db(sql, [documentidentifier, peppolmember_id]) <NEW_LINE> if len(rows) > 0: <NEW_LINE> <INDENT> row = rows.pop(0) <NEW_LINE> self.id = row[0] <NEW_LINE> self.documentidentifier = row[1] <NEW_LINE> self.certificate_not_before = datetime.strptime(row[2],'%Y-%m-%d %H:%M:%S.%f') <NEW_LINE> self.certificate_not_after = datetime.strptime(row[3],'%Y-%m-%d %H:%M:%S.%f') <NEW_LINE> self.endpointurl = row[4] <NEW_LINE> self.peppolmember_id = row[5] <NEW_LINE> if row[6] is not None: <NEW_LINE> <INDENT> self.firstseen = datetime.strptime(row[6],'%Y-%m-%d %H:%M:%S.%f') <NEW_LINE> <DEDENT> if row[7] is not None: <NEW_LINE> <INDENT> self.lastseen = datetime.strptime(row[7],'%Y-%m-%d %H:%M:%S.%f') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def exists(self): <NEW_LINE> <INDENT> for row in smpchecker.query_db('select id from smpentries where peppolmember_id=? and documentidentifier=?', [self.peppolmember_id, self.documentidentifier]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> time = datetime.now() <NEW_LINE> smpchecker.query_db('update smpentries set last_seen=? where peppolmember_id=? and documentidentifier=?', [time, self.peppolmember_id, self.documentidentifier]) <NEW_LINE> self.lastseen = time | Object representing SMP entry | 62599055379a373c97d9a55f |
class StaffRuleCreateView( LoginRequiredMixin, PermissionRequiredMixin, CreateView ): <NEW_LINE> <INDENT> model = StaffRule <NEW_LINE> form_class = StaffRuleCreateForm <NEW_LINE> template_name = "staffrule_create.html" <NEW_LINE> login_url = "login" <NEW_LINE> permission_required = "rosters.change_roster" | Staff Rule Create View. | 625990554a966d76dd5f042c |
class ArrayManyToManyRel(ForeignObjectRel): <NEW_LINE> <INDENT> def __init__(self, field, to, field_name, related_name=None, related_query_name=None, limit_choices_to=None, symmetrical=True): <NEW_LINE> <INDENT> super(ArrayManyToManyRel, self).__init__( field, to, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, ) <NEW_LINE> self.model_name = to <NEW_LINE> self.field_name = field_name <NEW_LINE> self.symmetrical = symmetrical <NEW_LINE> <DEDENT> def get_join_on(self, parent_alias, lhs_col, table_alias, rhs_col): <NEW_LINE> <INDENT> return '%s.%s = ANY(%s.%s)' % ( parent_alias, lhs_col, table_alias, rhs_col, ) <NEW_LINE> <DEDENT> def set_field_name(self): <NEW_LINE> <INDENT> self.field_name = self.field_name or self.model._meta.pk.name <NEW_LINE> <DEDENT> def get_related_field(self): <NEW_LINE> <INDENT> field = self.model._meta.get_field(self.field_name) <NEW_LINE> if not field.concrete: <NEW_LINE> <INDENT> raise exceptions.FieldDoesNotExist("No related field named '%s'" % self.field_name) <NEW_LINE> <DEDENT> return field <NEW_LINE> <DEDENT> def get_lookup(self, lookup_name): <NEW_LINE> <INDENT> if lookup_name == 'in': <NEW_LINE> <INDENT> return RelatedIn <NEW_LINE> <DEDENT> elif lookup_name == 'exact': <NEW_LINE> <INDENT> return RelatedExact <NEW_LINE> <DEDENT> elif lookup_name == 'gt': <NEW_LINE> <INDENT> return RelatedGreaterThan <NEW_LINE> <DEDENT> elif lookup_name == 'gte': <NEW_LINE> <INDENT> return RelatedGreaterThanOrEqual <NEW_LINE> <DEDENT> elif lookup_name == 'lt': <NEW_LINE> <INDENT> return RelatedLessThan <NEW_LINE> <DEDENT> elif lookup_name == 'lte': <NEW_LINE> <INDENT> return RelatedLessThanOrEqual <NEW_LINE> <DEDENT> elif lookup_name == 'isnull': <NEW_LINE> <INDENT> return RelatedIsNull <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Related Field got invalid lookup: %s' % lookup_name) | Used by ManyToManyFields to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation. | 62599055dc8b845886d54b00 |
class Command(BaseCommand): <NEW_LINE> <INDENT> aliases = ['exit'] <NEW_LINE> interactive_only = True <NEW_LINE> def handle(self, options): <NEW_LINE> <INDENT> component.get('ConsoleUI').quit() | Exit the client | 62599055baa26c4b54d507df |
class DependencyExtractorStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.Match = channel.unary_unary( '/cloud.deps.api.v1alpha.extractor.DependencyExtractor/Match', request_serializer=depscloud__api_dot_v1alpha_dot_extractor_dot_extractor__pb2.MatchRequest.SerializeToString, response_deserializer=depscloud__api_dot_v1alpha_dot_extractor_dot_extractor__pb2.MatchResponse.FromString, ) <NEW_LINE> self.Extract = channel.unary_unary( '/cloud.deps.api.v1alpha.extractor.DependencyExtractor/Extract', request_serializer=depscloud__api_dot_v1alpha_dot_extractor_dot_extractor__pb2.ExtractRequest.SerializeToString, response_deserializer=depscloud__api_dot_v1alpha_dot_extractor_dot_extractor__pb2.ExtractResponse.FromString, ) | Missing associated documentation comment in .proto file. | 625990553eb6a72ae038bb9c |
class Plugin(BasePlugin): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> del self.ctx.tod_vx <NEW_LINE> del self.ctx.tod_vy <NEW_LINE> del self.ctx.frequencies <NEW_LINE> del self.ctx.strategy_coords <NEW_LINE> del self.ctx.beams <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Clean up" | Cleans up the context to avoid a memory leak | 6259905599cbb53fe683241d |
class Client(object, metaclass=ClientRegistry): <NEW_LINE> <INDENT> access_token_key = 'access_token' <NEW_LINE> shared_key = 'oauth_verifier' <NEW_LINE> access_token_url = None <NEW_LINE> authorize_url = None <NEW_LINE> base_url = None <NEW_LINE> name = None <NEW_LINE> user_info_url = None <NEW_LINE> def __init__(self, base_url=None, authorize_url=None, access_token_key=None, access_token_url=None, logger=None, request_params={}): <NEW_LINE> <INDENT> self.base_url = base_url or self.base_url <NEW_LINE> self.authorize_url = authorize_url or self.authorize_url <NEW_LINE> self.access_token_key = access_token_key or self.access_token_key <NEW_LINE> self.access_token_url = access_token_url or self.access_token_url <NEW_LINE> self.logger = logger or logging.getLogger('OAuth: %s' % self.name) <NEW_LINE> self.request_params = request_params <NEW_LINE> <DEDENT> def _get_url(self, url): <NEW_LINE> <INDENT> if self.base_url and not url.startswith(('http://', 'https://')): <NEW_LINE> <INDENT> return urljoin(self.base_url, url) <NEW_LINE> <DEDENT> return url <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s %s" % (self.name.title(), self.base_url) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s>" % self <NEW_LINE> <DEDENT> def request(self, method, url, params=None, headers=None, loop=None, **aio_kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Shouldnt be called.') <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def user_info(self, loop=None, **kwargs): <NEW_LINE> <INDENT> if not self.user_info_url: <NEW_LINE> <INDENT> raise NotImplementedError('The provider doesnt support user_info method.') <NEW_LINE> <DEDENT> response = yield from self.request('GET', self.user_info_url, loop=loop, **kwargs) <NEW_LINE> if response.status / 100 > 2: <NEW_LINE> <INDENT> raise web.HTTPBadRequest(reason='Failed to obtain User information. ' 'HTTP status code: %s' % response.status) <NEW_LINE> <DEDENT> data = (yield from response.json()) <NEW_LINE> user = User(**dict(self.user_parse(data))) <NEW_LINE> return user, data <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def user_parse(data): <NEW_LINE> <INDENT> yield 'id', None | Base abstract OAuth Client class. | 625990558a43f66fc4bf36c9 |
class BulletClient(object): <NEW_LINE> <INDENT> def __init__(self, connection_mode=None): <NEW_LINE> <INDENT> self._shapes = {} <NEW_LINE> if connection_mode is None: <NEW_LINE> <INDENT> self._client = pybullet.connect(pybullet.SHARED_MEMORY) <NEW_LINE> if self._client >= 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> connection_mode = pybullet.DIRECT <NEW_LINE> <DEDENT> <DEDENT> self._client = pybullet.connect(connection_mode) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self._client>=0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pybullet.disconnect(physicsClientId=self._client) <NEW_LINE> self._client = -1 <NEW_LINE> <DEDENT> except pybullet.error: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> attribute = getattr(pybullet, name) <NEW_LINE> if inspect.isbuiltin(attribute): <NEW_LINE> <INDENT> attribute = functools.partial(attribute, physicsClientId=self._client) <NEW_LINE> <DEDENT> if name=="disconnect": <NEW_LINE> <INDENT> self._client = -1 <NEW_LINE> <DEDENT> return attribute | A wrapper for pybullet to manage different clients. | 62599055d53ae8145f91999e |
class Factory: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def to_quote_view(quotes: list([Quote])): <NEW_LINE> <INDENT> return list(map(lambda quote: QuoteView.from_quote(quote), quotes)) | This Object will give us the list of quotes from the Object QuoteView | 6259905594891a1f408ba194 |
class AnacondaSetPythonBuilder(object): <NEW_LINE> <INDENT> def update_interpreter_build_system(self, cmd): <NEW_LINE> <INDENT> if get_settings( active_view(), 'auto_python_builder_enabled', True) is False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> project = self._get_project() <NEW_LINE> if project.get('build_systems', False) is not False: <NEW_LINE> <INDENT> if type(project['build_systems']) is list: <NEW_LINE> <INDENT> done = False <NEW_LINE> current_list = project['build_systems'] <NEW_LINE> for i in range(len(current_list)): <NEW_LINE> <INDENT> build = current_list[i] <NEW_LINE> if build['name'] == 'Anaconda Python Builder': <NEW_LINE> <INDENT> current_list[i] = self._parse_tpl(cmd) <NEW_LINE> done = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not done: <NEW_LINE> <INDENT> project['build_systems'].append(self._parse_tpl(cmd)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sublime.message_dialog( 'Your project build_systems is messed up' ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> project.update({ 'build_systems': [self._parse_tpl(cmd)] }) <NEW_LINE> <DEDENT> self._save_project(project) <NEW_LINE> <DEDENT> def _get_project(self): <NEW_LINE> <INDENT> return sublime.active_window().project_data() <NEW_LINE> <DEDENT> def _parse_tpl(self, cmd): <NEW_LINE> <INDENT> template_file = os.path.join( os.path.dirname(__file__), '../../', 'templates', 'python_build.tpl' ) <NEW_LINE> with open(template_file, 'r', encoding='utf8') as tplfile: <NEW_LINE> <INDENT> template = Template(tplfile.read()) <NEW_LINE> <DEDENT> cmd = cmd.replace('\\', '\\\\') <NEW_LINE> return sublime.decode_value( template.safe_substitute({'python_interpreter': cmd}) ) <NEW_LINE> <DEDENT> def _save_project(self, project_data): <NEW_LINE> <INDENT> sublime.active_window().set_project_data(project_data) | Sets or modifies the builder of the current project
| 6259905521a7993f00c674aa |
class GearmanTaskBackend(TaskBackend): <NEW_LINE> <INDENT> TASK_BATCH_SIZE = settings.BATCH_SIZE <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.client = MCPGearmanClient([settings.GEARMAN_SERVER]) <NEW_LINE> self.current_task_batches = {} <NEW_LINE> self.pending_gearman_jobs = {} <NEW_LINE> <DEDENT> def submit_task(self, job, task): <NEW_LINE> <INDENT> current_task_batch = self._get_current_task_batch(job.uuid) <NEW_LINE> if len(current_task_batch) == 0: <NEW_LINE> <INDENT> metrics.gearman_pending_jobs_gauge.inc() <NEW_LINE> <DEDENT> current_task_batch.add_task(task) <NEW_LINE> if (len(current_task_batch) % self.TASK_BATCH_SIZE) == 0: <NEW_LINE> <INDENT> self._submit_batch(job, current_task_batch) <NEW_LINE> <DEDENT> <DEDENT> def wait_for_results(self, job): <NEW_LINE> <INDENT> current_task_batch = self._get_current_task_batch(job.uuid) <NEW_LINE> if len(current_task_batch) > 0: <NEW_LINE> <INDENT> self._submit_batch(job, current_task_batch) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> pending_batches = self.pending_gearman_jobs[job.uuid] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> completed_request_count = 0 <NEW_LINE> gearman_requests = [request.pending for request in pending_batches] <NEW_LINE> self.client.wait_until_jobs_accepted(gearman_requests) <NEW_LINE> while len(pending_batches) > completed_request_count: <NEW_LINE> <INDENT> gearman_requests = self.client.wait_until_any_job_completed( gearman_requests ) <NEW_LINE> for batch in pending_batches: <NEW_LINE> <INDENT> if batch.collected: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if batch.complete or batch.failed: <NEW_LINE> <INDENT> for task in batch.update_task_results(): <NEW_LINE> <INDENT> yield task <NEW_LINE> <DEDENT> batch.collected = True <NEW_LINE> completed_request_count += 1 <NEW_LINE> metrics.gearman_active_jobs_gauge.dec() <NEW_LINE> <DEDENT> <DEDENT> gearman_requests = [ request for request in gearman_requests if request.state not in (JOB_COMPLETE, JOB_FAILED) ] <NEW_LINE> <DEDENT> del self.pending_gearman_jobs[job.uuid] <NEW_LINE> <DEDENT> def _get_current_task_batch(self, job_uuid): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.current_task_batches[job_uuid] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.current_task_batches[job_uuid] = GearmanTaskBatch() <NEW_LINE> return self.current_task_batches[job_uuid] <NEW_LINE> <DEDENT> <DEDENT> def _submit_batch(self, job, task_batch): <NEW_LINE> <INDENT> if len(task_batch) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> task_batch.submit(self.client, job) <NEW_LINE> metrics.gearman_active_jobs_gauge.inc() <NEW_LINE> metrics.gearman_pending_jobs_gauge.dec() <NEW_LINE> if job.uuid not in self.pending_gearman_jobs: <NEW_LINE> <INDENT> self.pending_gearman_jobs[job.uuid] = [] <NEW_LINE> <DEDENT> self.pending_gearman_jobs[job.uuid].append(task_batch) <NEW_LINE> if self.current_task_batches[job.uuid] is task_batch: <NEW_LINE> <INDENT> del self.current_task_batches[job.uuid] | Submits tasks to MCPClient via Gearman.
Tasks are batched into BATCH_SIZE groups (default 128), pickled and sent to
MCPClient. This adds some complexity but saves a lot of overhead. | 62599055f7d966606f749356 |
class OIDCAuthenticationRequestView(View): <NEW_LINE> <INDENT> http_method_names = ['get'] <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(OIDCAuthenticationRequestView, self).__init__(*args, **kwargs) <NEW_LINE> self.OIDC_OP_AUTH_ENDPOINT = import_from_settings('OIDC_OP_AUTHORIZATION_ENDPOINT') <NEW_LINE> self.OIDC_RP_CLIENT_ID = import_from_settings('OIDC_RP_CLIENT_ID') <NEW_LINE> <DEDENT> def get(self, request): <NEW_LINE> <INDENT> state = get_random_string(import_from_settings('OIDC_STATE_SIZE', 32)) <NEW_LINE> redirect_field_name = import_from_settings('OIDC_REDIRECT_FIELD_NAME', 'next') <NEW_LINE> reverse_url = import_from_settings('OIDC_AUTHENTICATION_CALLBACK_URL', 'oidc_authentication_callback') <NEW_LINE> params = { 'response_type': 'code', 'scope': import_from_settings('OIDC_RP_SCOPES', 'openid email'), 'client_id': self.OIDC_RP_CLIENT_ID, 'redirect_uri': absolutify( request, reverse(reverse_url) ), 'state': state, } <NEW_LINE> params.update(self.get_extra_params(request)) <NEW_LINE> if import_from_settings('OIDC_USE_NONCE', True): <NEW_LINE> <INDENT> nonce = get_random_string(import_from_settings('OIDC_NONCE_SIZE', 32)) <NEW_LINE> params.update({ 'nonce': nonce }) <NEW_LINE> request.session['oidc_nonce'] = nonce <NEW_LINE> <DEDENT> request.session['oidc_state'] = state <NEW_LINE> request.session['oidc_login_next'] = get_next_url(request, redirect_field_name) <NEW_LINE> query = urlencode(params) <NEW_LINE> redirect_url = '{url}?{query}'.format(url=self.OIDC_OP_AUTH_ENDPOINT, query=query) <NEW_LINE> return HttpResponseRedirect(redirect_url) <NEW_LINE> <DEDENT> def get_extra_params(self, request): <NEW_LINE> <INDENT> return import_from_settings('OIDC_AUTH_REQUEST_EXTRA_PARAMS', {}) | OIDC client authentication HTTP endpoint | 625990552ae34c7f260ac623 |
class Room(CommandHandler): <NEW_LINE> <INDENT> def __init__(self, server): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> self.sessions = [] <NEW_LINE> <DEDENT> def add(self, session): <NEW_LINE> <INDENT> self.sessions.append(session) <NEW_LINE> <DEDENT> def remove(self, session): <NEW_LINE> <INDENT> self.sessions.remove(session) <NEW_LINE> <DEDENT> def broadcast(self, line): <NEW_LINE> <INDENT> for session in self.sessions: <NEW_LINE> <INDENT> session.push(line) <NEW_LINE> <DEDENT> <DEDENT> def do_logout(self, session, line): <NEW_LINE> <INDENT> raise EndSession | A generic environment that contain one or more users (sessions).
It takes care of the basic command handling and broadcasting. | 62599055435de62698e9d33f |
class DeconzDevice(DeconzBase, Entity): <NEW_LINE> <INDENT> def __init__(self, device, gateway): <NEW_LINE> <INDENT> super().__init__(device, gateway) <NEW_LINE> self.unsub_dispatcher = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def entity_registry_enabled_default(self): <NEW_LINE> <INDENT> if not self.gateway.option_allow_clip_sensor and self._device.type.startswith( "CLIP" ): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if ( not self.gateway.option_allow_deconz_groups and self._device.type == "LightGroup" ): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> self._device.register_async_callback(self.async_update_callback) <NEW_LINE> self.gateway.deconz_ids[self.entity_id] = self._device.deconz_id <NEW_LINE> self.listeners.append( async_dispatcher_connect( self.hass, self.gateway.signal_reachable, self.async_update_callback ) ) <NEW_LINE> <DEDENT> async def async_will_remove_from_hass(self) -> None: <NEW_LINE> <INDENT> self._device.remove_callback(self.async_update_callback) <NEW_LINE> del self.gateway.deconz_ids[self.entity_id] <NEW_LINE> for unsub_dispatcher in self.listeners: <NEW_LINE> <INDENT> unsub_dispatcher() <NEW_LINE> <DEDENT> <DEDENT> @callback <NEW_LINE> def async_update_callback(self, force_update=False): <NEW_LINE> <INDENT> self.async_schedule_update_ha_state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self.gateway.available and self._device.reachable <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._device.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False | Representation of a deCONZ device. | 6259905516aa5153ce401a21 |
class _multimap: <NEW_LINE> <INDENT> def __init__(self, primary, secondary): <NEW_LINE> <INDENT> self._primary = primary <NEW_LINE> self._secondary = secondary <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._primary[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self._secondary[key] | Helper class for combining multiple mappings.
Used by .{safe_,}substitute() to combine the mapping and keyword
arguments. | 62599055097d151d1a2c25a8 |
class FolderWizardExtension(WizardExtension): <NEW_LINE> <INDENT> id = "puddle.resource.folder_wizard" <NEW_LINE> name = "Folder" <NEW_LINE> image = ImageResource("new") <NEW_LINE> wizard_class = "puddle.resource.wizard.folder_wizard:" "FolderWizard" <NEW_LINE> description = "Create a new folder resource" | Contributes a new folder wizard.
| 62599055cad5886f8bdc5b1f |
class Formatter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.formatters = {} <NEW_LINE> <DEDENT> def add(self, formatter): <NEW_LINE> <INDENT> self.formatters[formatter.get_name()] = formatter <NEW_LINE> <DEDENT> def format(self, name, content): <NEW_LINE> <INDENT> return self.formatters[name].format(content) <NEW_LINE> <DEDENT> def get_names(self): <NEW_LINE> <INDENT> return self.formatters.keys() | Formatter class | 6259905524f1403a9268636d |
class Modeler: <NEW_LINE> <INDENT> def __init__(self, P, num_outcomes, dnn_layers, dnn_poly_degree, drop_prob, optimizer): <NEW_LINE> <INDENT> with tf.name_scope("Modeler"): <NEW_LINE> <INDENT> self._output, self._weights, self._biases = dnn_regressor( P, num_outcomes, dnn_poly_degree, dnn_layers, drop_prob) <NEW_LINE> self._optimizer = optimizer <NEW_LINE> for w in self._weights: <NEW_LINE> <INDENT> tf.add_to_collection("ModelerModelVariables", w) <NEW_LINE> <DEDENT> for b in self._biases: <NEW_LINE> <INDENT> tf.add_to_collection("ModelerModelVariables", b) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def output(self): <NEW_LINE> <INDENT> return self._output <NEW_LINE> <DEDENT> @property <NEW_LINE> def weights(self): <NEW_LINE> <INDENT> return self._weights <NEW_LINE> <DEDENT> @property <NEW_LINE> def biases(self): <NEW_LINE> <INDENT> return self._biases <NEW_LINE> <DEDENT> @property <NEW_LINE> def optimizer(self): <NEW_LINE> <INDENT> return self._optimizer <NEW_LINE> <DEDENT> @property <NEW_LINE> def trainable_vars(self): <NEW_LINE> <INDENT> return self._weights + self._biases | Stores the tf expressions related to the modeler | 6259905532920d7e50bc758d |
class FileType(object): <NEW_LINE> <INDENT> def __init__(self, mode='r', bufsize=-1, encoding=None, errors=None): <NEW_LINE> <INDENT> self._mode = mode <NEW_LINE> self._bufsize = bufsize <NEW_LINE> self._encoding = encoding <NEW_LINE> self._errors = errors <NEW_LINE> <DEDENT> def __call__(self, string): <NEW_LINE> <INDENT> if string == '-': <NEW_LINE> <INDENT> if 'r' in self._mode: <NEW_LINE> <INDENT> return _sys.stdin <NEW_LINE> <DEDENT> elif 'w' in self._mode: <NEW_LINE> <INDENT> return _sys.stdout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = _('argument "-" with mode %r') % self._mode <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> return codecs.open(string, self._mode, self._encoding, self._errors, self._bufsize) <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> message = _("can't open '%s': %s") <NEW_LINE> raise ArgumentTypeError(message % (string, e)) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> args = self._mode, self._bufsize <NEW_LINE> kwargs = [('encoding', self._encoding), ('errors', self._errors)] <NEW_LINE> args_str = ', '.join([repr(arg) for arg in args if arg != -1] + ['%s=%r' % (kw, arg) for kw, arg in kwargs if arg is not None]) <NEW_LINE> return '%s(%s)' % (type(self).__name__, args_str) | Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
- encoding -- The file's encoding. Accepts the same values as the
the builtin open() function.
- errors -- A string indicating how encoding and decoding errors are to
be handled. Accepts the same value as the builtin open() function. | 62599055baa26c4b54d507e0 |
class bcolors: <NEW_LINE> <INDENT> HEADER = '\033[95m' <NEW_LINE> OKBLUE = '\033[94m' <NEW_LINE> OKGREEN = '\033[92m' <NEW_LINE> WARNING = '\033[93m' <NEW_LINE> FAIL = '\033[91m' <NEW_LINE> ENDC = '\033[0m' <NEW_LINE> BOLD = '\033[1m' <NEW_LINE> UNDERLINE = '\033[4m' | Colors for stdout coloring | 625990557cff6e4e811b6f7e |
class TestLP1074374(unittest.TestCase): <NEW_LINE> <INDENT> layer = RESTLayer <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.user_manager = getUtility(IUserManager) <NEW_LINE> with transaction(): <NEW_LINE> <INDENT> self.mlist = create_list('[email protected]') <NEW_LINE> self.anne = self.user_manager.create_user( '[email protected]', 'Anne Person') <NEW_LINE> <DEDENT> <DEDENT> def test_deleting_user_deletes_address(self): <NEW_LINE> <INDENT> with transaction(): <NEW_LINE> <INDENT> user_id = self.anne.user_id <NEW_LINE> <DEDENT> call_api('http://localhost:9001/3.0/users/[email protected]', method='DELETE') <NEW_LINE> self.assertIsNone(self.user_manager.get_user_by_id(user_id)) <NEW_LINE> self.assertIsNone(self.user_manager.get_user('[email protected]')) <NEW_LINE> self.assertIsNone(self.user_manager.get_address('[email protected]')) <NEW_LINE> <DEDENT> def test_deleting_user_deletes_addresses(self): <NEW_LINE> <INDENT> with transaction(): <NEW_LINE> <INDENT> self.anne.register('[email protected]') <NEW_LINE> <DEDENT> call_api('http://localhost:9001/3.0/users/[email protected]', method='DELETE') <NEW_LINE> self.assertIsNone(self.user_manager.get_user('[email protected]')) <NEW_LINE> self.assertIsNone(self.user_manager.get_user('[email protected]')) <NEW_LINE> <DEDENT> def test_lp_1074374(self): <NEW_LINE> <INDENT> with transaction(): <NEW_LINE> <INDENT> user_id = self.anne.user_id <NEW_LINE> address = list(self.anne.addresses)[0] <NEW_LINE> self.mlist.subscribe(address) <NEW_LINE> <DEDENT> call_api('http://localhost:9001/3.0/users/[email protected]', method='DELETE') <NEW_LINE> json, response = call_api('http://localhost:9001/3.0/addresses') <NEW_LINE> self.assertNotIn('entries', json) <NEW_LINE> self.assertEqual(json['total_size'], 0) <NEW_LINE> json, response = call_api('http://localhost:9001/3.0/members') <NEW_LINE> self.assertNotIn('entries', json) <NEW_LINE> self.assertEqual(json['total_size'], 0) <NEW_LINE> call_api('http://localhost:9001/3.0/users', dict( email='[email protected]', password='bbb')) <NEW_LINE> call_api('http://localhost:9001/3.0/members', dict( list_id='test.example.com', subscriber='[email protected]', role='member', pre_verified=True, pre_confirmed=True, pre_approved=True)) <NEW_LINE> json, response = call_api( 'http://localhost:9001/3.0/users/[email protected]') <NEW_LINE> self.assertNotEqual(user_id, json['user_id']) <NEW_LINE> json, response = call_api('http://localhost:9001/3.0/addresses') <NEW_LINE> self.assertEqual(json['total_size'], 1) <NEW_LINE> self.assertEqual(json['entries'][0]['email'], '[email protected]') <NEW_LINE> json, response = call_api('http://localhost:9001/3.0/members') <NEW_LINE> self.assertEqual(json['total_size'], 1) <NEW_LINE> member = json['entries'][0] <NEW_LINE> self.assertEqual( member['address'], 'http://localhost:9001/3.0/addresses/[email protected]') <NEW_LINE> self.assertEqual(member['email'], '[email protected]') <NEW_LINE> self.assertEqual(member['delivery_mode'], 'regular') <NEW_LINE> self.assertEqual(member['list_id'], 'test.example.com') <NEW_LINE> self.assertEqual(member['role'], 'member') | LP: #1074374 - deleting a user left their address records active. | 625990550a50d4780f70685d |
class ShipTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.image = pygame.image.load('img/ship.bmp') <NEW_LINE> self.ship = Ship(0, 0, self.image) <NEW_LINE> <DEDENT> def test_initialization(self): <NEW_LINE> <INDENT> self.assertEqual(self.ship.rect.centerx, 0) <NEW_LINE> self.assertEqual(self.ship.rect.bottom, 0) <NEW_LINE> <DEDENT> def test_move_right(self): <NEW_LINE> <INDENT> current_center_x = self.ship.rect.centerx <NEW_LINE> movement_step = self.ship.get_movement_step() <NEW_LINE> self.ship.move_right() <NEW_LINE> self.assertEqual(current_center_x + movement_step, self.ship.rect.centerx) <NEW_LINE> <DEDENT> def test_move_left(self): <NEW_LINE> <INDENT> current_center_x = self.ship.rect.centerx <NEW_LINE> movement_step = self.ship.get_movement_step() <NEW_LINE> self.ship.move_left() <NEW_LINE> self.assertEqual(current_center_x - movement_step, self.ship.rect.centerx) <NEW_LINE> <DEDENT> def test_is_in_bounds_left_ok(self): <NEW_LINE> <INDENT> self.assertTrue(self.ship.is_in_bounds(0, 1000)) <NEW_LINE> <DEDENT> def test_is_in_bounds_left_fail(self): <NEW_LINE> <INDENT> self.assertFalse(self.ship.is_in_bounds(500, 1000)) <NEW_LINE> <DEDENT> def test_is_in_bounds_right_ok(self): <NEW_LINE> <INDENT> self.assertTrue(self.ship.is_in_bounds(0, 1000)) <NEW_LINE> <DEDENT> def test_is_in_bounds_right_fail(self): <NEW_LINE> <INDENT> self.assertFalse(self.ship.is_in_bounds(-4, -1)) <NEW_LINE> <DEDENT> def test_die_a_little(self): <NEW_LINE> <INDENT> live_status = self.ship.get_electro_cardio_graphy() <NEW_LINE> self.ship.die_a_little() <NEW_LINE> self.assertEqual(self.ship.get_electro_cardio_graphy(), live_status - 1) <NEW_LINE> <DEDENT> def test_is_dead(self): <NEW_LINE> <INDENT> self.assertEqual(self.ship.get_electro_cardio_graphy(), 3) <NEW_LINE> self.ship.die_a_little() <NEW_LINE> self.ship.die_a_little() <NEW_LINE> self.ship.die_a_little() <NEW_LINE> self.assertTrue(self.ship.is_dead()) | Tests the Ship class of Alien Invaders game | 62599055b5575c28eb71376a |
class Permissions(object): <NEW_LINE> <INDENT> def __init__(self, create, read, update, delete): <NEW_LINE> <INDENT> self.create = create <NEW_LINE> self.read = read <NEW_LINE> self.update = update <NEW_LINE> self.delete = delete <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_user_permissions_for_category(category): <NEW_LINE> <INDENT> belongs_to_user = category.user_id == UserUtils.get_authenticated_user_id() <NEW_LINE> is_in_use = session.query(Item). filter_by(category_id=category.id). count() <NEW_LINE> return Permissions( create=True, read=True, update=belongs_to_user, delete=belongs_to_user and not is_in_use) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_user_permissions_for_item(item): <NEW_LINE> <INDENT> belongs_to_user = item.user_id == UserUtils.get_authenticated_user_id() <NEW_LINE> return Permissions( create=True, read=True, update=belongs_to_user, delete=belongs_to_user) | Determines and represents the permissions available on a given resource
in a given context | 625990556e29344779b01b87 |
class MASK_OT_set_marker_drawtype(Operator): <NEW_LINE> <INDENT> bl_idname = "mask.set_marker_drawtype" <NEW_LINE> bl_label = "Set Marker Drawtype" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> sc = context.space_data <NEW_LINE> sc.show_marker_pattern = False <NEW_LINE> sc.show_marker_search= False <NEW_LINE> sc.show_track_path = False <NEW_LINE> return {'FINISHED'} | Don't draw markers | 62599055a17c0f6771d5d640 |
class PrivateingredientsApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.user = get_user_model().objects.create_user( '[email protected]', 'password123' ) <NEW_LINE> self.client = APIClient() <NEW_LINE> self.client.force_authenticate(self.user) <NEW_LINE> <DEDENT> def test_retrieve_ingredients(self): <NEW_LINE> <INDENT> Ingredients.objects.create(user=self.user, name='Kale') <NEW_LINE> Ingredients.objects.create(user=self.user, name='Milk') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> ingredients = Ingredients.objects.all().order_by('-name') <NEW_LINE> serializer = IngredientSerializer(ingredients, many=True) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_ingredients_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> Ingredients.objects.create(user=user2, name='banana') <NEW_LINE> ingredient = Ingredients.objects.create(user=self.user, name='nuts') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], ingredient.name) <NEW_LINE> <DEDENT> def test_create_ingredient_successful(self): <NEW_LINE> <INDENT> payload = { 'name': "Test ingredient" } <NEW_LINE> self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> exists = Ingredients.objects.filter( user=self.user, name=payload['name'] ).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_ingredient_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) | Tests the private user ingredients API | 625990558e7ae83300eea5ce |
class Hitmarker(GridComponent): <NEW_LINE> <INDENT> def __init__(self, square_size, grid_rect, square_group): <NEW_LINE> <INDENT> image_name = 'hit.png' <NEW_LINE> GridComponent.__init__(self, square_size, grid_rect, square_group, image_name) | A subclass of GridComponent for displaying successful strikes on the enemy grid.
:parent: gridcomponent.GridComponent | 625990553cc13d1c6d466c7b |
class SparseSquaredHingeProblem(SparseLinearProblem): <NEW_LINE> <INDENT> @property <NEW_LINE> def _solution_class(self): <NEW_LINE> <INDENT> return SparseSquaredHingeSolution <NEW_LINE> <DEDENT> @property <NEW_LINE> def _loss_index(self): <NEW_LINE> <INDENT> return 3. <NEW_LINE> <DEDENT> def _check_data_inputs(self, A, b): <NEW_LINE> <INDENT> SparseLinearProblem._check_data_inputs(self, A, b) <NEW_LINE> check_classification_labels(b) | Class for training sparse linear models with squared hinge loss.
The optimization objective is
.. math::
\sum_i \onehalf (1 - b_i a_i^T w)_+^2 + \lambda ||w||_1 ,
where the "+" subscript denotes the rectifier function. Each label should
have value 1 or -1. | 62599055e5267d203ee6ce2c |
class InvalidThermostatState(Exception): <NEW_LINE> <INDENT> pass | Vital information is missing from the response | 62599055379a373c97d9a562 |
class PressRelease(Highlight, BaseContent): <NEW_LINE> <INDENT> security = ClassSecurityInfo() <NEW_LINE> archetype_name = 'Press Release' <NEW_LINE> meta_type = 'PressRelease' <NEW_LINE> portal_type = 'PressRelease' <NEW_LINE> allowed_content_types = [] + list(getattr(Highlight, 'allowed_content_types', [])) <NEW_LINE> filter_content_types = 0 <NEW_LINE> global_allow = 1 <NEW_LINE> immediate_view = 'base_view' <NEW_LINE> default_view = 'pressrelease_view' <NEW_LINE> suppl_views = () <NEW_LINE> typeDescription = "Press Release" <NEW_LINE> typeDescMsgId = 'description_edit_pressrelease' <NEW_LINE> _at_rename_after_creation = True <NEW_LINE> schema = PressRelease_schema <NEW_LINE> content_icon = 'press-release_icon.gif' <NEW_LINE> security.declareProtected(ModifyPortalContent, 'setThemes') <NEW_LINE> def setThemes(self, value, **kw): <NEW_LINE> <INDENT> value = [val for val in value if val] <NEW_LINE> tagging = IThemeTagging(self) <NEW_LINE> tagging.tags = value | Press release
| 625990558da39b475be04729 |
class CreateGroupRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ApplicationId = None <NEW_LINE> self.NamespaceId = None <NEW_LINE> self.GroupName = None <NEW_LINE> self.ClusterId = None <NEW_LINE> self.GroupDesc = None <NEW_LINE> self.GroupResourceType = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ApplicationId = params.get("ApplicationId") <NEW_LINE> self.NamespaceId = params.get("NamespaceId") <NEW_LINE> self.GroupName = params.get("GroupName") <NEW_LINE> self.ClusterId = params.get("ClusterId") <NEW_LINE> self.GroupDesc = params.get("GroupDesc") <NEW_LINE> self.GroupResourceType = params.get("GroupResourceType") | CreateGroup请求参数结构体
| 62599055baa26c4b54d507e1 |
class Tool(benchexec.tools.template.BaseTool2): <NEW_LINE> <INDENT> REQUIRED_PATHS = ["predator", "predator-bfs", "predator-dfs", "predatorHP.py"] <NEW_LINE> def executable(self, tool_locator): <NEW_LINE> <INDENT> return tool_locator.find_executable("predatorHP.py") <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return "PredatorHP" <NEW_LINE> <DEDENT> def version(self, executable): <NEW_LINE> <INDENT> return self._version_from_tool(executable, use_stderr=True) <NEW_LINE> <DEDENT> def cmdline(self, executable, options, task, rlimits): <NEW_LINE> <INDENT> spec = ["--propertyfile", task.property_file] if task.property_file else [] <NEW_LINE> data_model_param = get_data_model_from_task( task, {ILP32: "--compiler-options=-m32", LP64: "--compiler-options=-m64"}, ) <NEW_LINE> if data_model_param and data_model_param not in options: <NEW_LINE> <INDENT> options += [data_model_param] <NEW_LINE> <DEDENT> return [executable] + options + spec + list(task.input_files_or_identifier) <NEW_LINE> <DEDENT> def determine_result(self, run): <NEW_LINE> <INDENT> status = "UNKNOWN" <NEW_LINE> if run.output.any_line_contains("UNKNOWN"): <NEW_LINE> <INDENT> status = result.RESULT_UNKNOWN <NEW_LINE> <DEDENT> elif run.output.any_line_contains("TRUE"): <NEW_LINE> <INDENT> status = result.RESULT_TRUE_PROP <NEW_LINE> <DEDENT> elif run.output.any_line_contains("FALSE(valid-memtrack)"): <NEW_LINE> <INDENT> status = result.RESULT_FALSE_MEMTRACK <NEW_LINE> <DEDENT> elif run.output.any_line_contains("FALSE(valid-deref)"): <NEW_LINE> <INDENT> status = result.RESULT_FALSE_DEREF <NEW_LINE> <DEDENT> elif run.output.any_line_contains("FALSE(valid-free)"): <NEW_LINE> <INDENT> status = result.RESULT_FALSE_FREE <NEW_LINE> <DEDENT> elif run.output.any_line_contains("FALSE(valid-memcleanup)"): <NEW_LINE> <INDENT> status = result.RESULT_FALSE_MEMCLEANUP <NEW_LINE> <DEDENT> elif run.output.any_line_contains("FALSE"): <NEW_LINE> <INDENT> status = result.RESULT_FALSE_REACH <NEW_LINE> <DEDENT> if status == "UNKNOWN" and run.was_timeout: <NEW_LINE> <INDENT> status = "TIMEOUT" <NEW_LINE> <DEDENT> return status | Wrapper for a Predator - Hunting Party
http://www.fit.vutbr.cz/research/groups/verifit/tools/predator-hp/ | 6259905573bcbd0ca4bcb7cf |
class Sliding_Window_Dataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, data, gpudevice, sliding_window_size, sliding_window_step): <NEW_LINE> <INDENT> super(Sliding_Window_Dataset, self).__init__() <NEW_LINE> self.gpudevice = gpudevice <NEW_LINE> self.data, self.labels = data <NEW_LINE> t,d = self.data.shape <NEW_LINE> self.data = torch.from_numpy(self.data) <NEW_LINE> self.data = self.data.float() <NEW_LINE> self.data = self.data.reshape(1,t,d) <NEW_LINE> self.labels = torch.from_numpy(self.labels) <NEW_LINE> self.labels = self.labels.int() <NEW_LINE> self.sliding_window_size = sliding_window_size <NEW_LINE> self.sliding_window_step = sliding_window_step <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return int(self.data.shape[1]/self.sliding_window_size) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> lowerbound = index * self.sliding_window_step <NEW_LINE> upperbound = lowerbound + self.sliding_window_size <NEW_LINE> segment = self.data[:, lowerbound:upperbound, :] <NEW_LINE> label = torch.mode(self.labels[lowerbound:upperbound])[0].item() <NEW_LINE> return segment,label | Characterizes a dataset for PyTorch | 62599055d99f1b3c44d06bde |
class User(UserMixin, db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> username = db.Column(db.String(64), index=True, unique=True) <NEW_LINE> email = db.Column(db.String(120), index=True, unique=True) <NEW_LINE> password_hash = db.Column(db.String(128)) <NEW_LINE> about_me = db.Column(db.String(140)) <NEW_LINE> last_seen = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> posts = db.relationship('Post', backref='author', lazy='dynamic') <NEW_LINE> followed = db.relationship( 'User', secondary=followers, primaryjoin=(followers.c.follower_id == id), secondaryjoin=(followers.c.followed_id == id), backref=db.backref('followers', lazy='dynamic'), lazy='dynamic') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<User {}, {}>'.format(self.username, self.email) <NEW_LINE> <DEDENT> def set_password(self, password): <NEW_LINE> <INDENT> self.password_hash = generate_password_hash(password) <NEW_LINE> <DEDENT> def check_password(self, password): <NEW_LINE> <INDENT> current_app.logger.debug("Checking password {}".format(password)) <NEW_LINE> return check_password_hash(self.password_hash, password) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @login.user_loader <NEW_LINE> def load_user(id_): <NEW_LINE> <INDENT> return User.query.get(int(id_)) <NEW_LINE> <DEDENT> def avatar(self, size="80"): <NEW_LINE> <INDENT> digest = md5(self.email.lower().encode('utf-8')).hexdigest() <NEW_LINE> url = 'https://www.gravatar.com/avatar/{}?d=retro&s={}'.format( digest, size) <NEW_LINE> current_app.logger.debug("Get gravatar {}".format(url)) <NEW_LINE> return url <NEW_LINE> <DEDENT> def follow(self, user): <NEW_LINE> <INDENT> if not self.is_following(user): <NEW_LINE> <INDENT> self.followed.append(user) <NEW_LINE> <DEDENT> <DEDENT> def unfollow(self, user): <NEW_LINE> <INDENT> if self.is_following(user): <NEW_LINE> <INDENT> self.followed.remove(user) <NEW_LINE> <DEDENT> <DEDENT> def is_following(self, user): <NEW_LINE> <INDENT> return self.followed.filter( followers.c.followed_id == user.id).count() > 0 <NEW_LINE> <DEDENT> def followed_posts(self): <NEW_LINE> <INDENT> followed = Post.query.join( followers, (followers.c.followed_id == Post.user_id)).filter( followers.c.follower_id == self.id) <NEW_LINE> own = Post.query.filter_by(user_id=self.id) <NEW_LINE> return followed.union(own).order_by(Post.timestamp.desc()) | Represetns a system User | 625990553539df3088ecd7e4 |
class Operations(object): <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> def list( self, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2021-09-01" <NEW_LINE> accept = "application/json" <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> if not next_link: <NEW_LINE> <INDENT> url = self.list.metadata['url'] <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize('ResourceProviderOperationList', pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, iter(list_of_elem) <NEW_LINE> <DEDENT> def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) <NEW_LINE> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return ItemPaged( get_next, extract_data ) <NEW_LINE> <DEDENT> list.metadata = {'url': '/providers/Microsoft.KubernetesConfiguration/operations'} | Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.kubernetesconfiguration.v2021_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer. | 62599055e64d504609df9e6f |
class PingTask(Task): <NEW_LINE> <INDENT> executor_script = "/framework/calico_executor.py ping_task" <NEW_LINE> def __init__(self, can_ping_targets=[], cant_ping_targets=[], *args, **kwargs): <NEW_LINE> <INDENT> super(PingTask, self).__init__(*args, **kwargs) <NEW_LINE> assert can_ping_targets or cant_ping_targets, "Must provide can/t " "ping targets." <NEW_LINE> self.can_ping_targets = can_ping_targets <NEW_LINE> self.cant_ping_targets = cant_ping_targets <NEW_LINE> self.ping_status_data = {} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> task_description = "PingTask(from=%s" % self.ip <NEW_LINE> if self.can_ping_targets: <NEW_LINE> <INDENT> task_description += ", can_ping=%s" % self.can_ping_targets <NEW_LINE> <DEDENT> if self.cant_ping_targets: <NEW_LINE> <INDENT> task_description += ", cant_ping=%s" % self.cant_ping_targets <NEW_LINE> <DEDENT> if self.netgroups: <NEW_LINE> <INDENT> task_description += ", netgroups=%s" % self.netgroups <NEW_LINE> <DEDENT> return task_description + ")" <NEW_LINE> <DEDENT> def as_new_mesos_task(self): <NEW_LINE> <INDENT> task = super(PingTask, self).as_new_mesos_task() <NEW_LINE> ping_ips = [] <NEW_LINE> for target in self.can_ping_targets: <NEW_LINE> <INDENT> ping_ips.extend(target.ip_addresses) <NEW_LINE> <DEDENT> cant_ping_ips = [] <NEW_LINE> for target in self.cant_ping_targets: <NEW_LINE> <INDENT> cant_ping_ips.extend(target.ip_addresses) <NEW_LINE> <DEDENT> if not self.default_executor: <NEW_LINE> <INDENT> task_type_label = task.labels.labels.add() <NEW_LINE> task_type_label.key = "task_type" <NEW_LINE> task_type_label.value = "ping" <NEW_LINE> can_ping_label = task.labels.labels.add() <NEW_LINE> can_ping_label.key = "can_ping" <NEW_LINE> can_ping_label.value = ",".join(ping_ips) <NEW_LINE> cant_ping_label = task.labels.labels.add() <NEW_LINE> cant_ping_label.key = "cant_ping" <NEW_LINE> cant_ping_label.value = ",".join(cant_ping_ips) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command = " && ".join(["ping -c 1 %s" % ip for ip in ping_ips]) <NEW_LINE> task.command.value = command <NEW_LINE> <DEDENT> return task <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies_are_met(self): <NEW_LINE> <INDENT> if self.state is not None: <NEW_LINE> <INDENT> raise Exception("PingTask has already been started") <NEW_LINE> <DEDENT> for task in self.can_ping_targets + self.cant_ping_targets: <NEW_LINE> <INDENT> if task.state != mesos_pb2.TASK_RUNNING: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Subclass of Task which attempts to ping a target.
Pass in a collection of can_ping_targets or cant_ping_targets, and
this PingTask will fail accordingly.
Since it can target multiple tasks, results reported by the Executor
are stored in ping_status_data, so individual pings can be checked. | 62599055cb5e8a47e493cc26 |
class CmdTime(MuxCommand): <NEW_LINE> <INDENT> key = "@time" <NEW_LINE> aliases = "@uptime" <NEW_LINE> locks = "cmd:perm(time) or perm(Players)" <NEW_LINE> help_category = "System" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> table = prettytable.PrettyTable(["{wserver time statistic","{wtime"]) <NEW_LINE> table.align = 'l' <NEW_LINE> table.add_row(["Current server uptime", utils.time_format(gametime.uptime(), 3)]) <NEW_LINE> table.add_row(["Total server running time", utils.time_format(gametime.runtime(), 2)]) <NEW_LINE> table.add_row(["Total in-game time (realtime x %g)" % (gametime.TIMEFACTOR), utils.time_format(gametime.gametime(), 2)]) <NEW_LINE> table.add_row(["Server time stamp", datetime.datetime.now()]) <NEW_LINE> self.caller.msg(str(table)) | show server time statistics
Usage:
@time
List Server time statistics such as uptime
and the current time stamp. | 62599055baa26c4b54d507e2 |
class TestModel(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> unittest.TestCase.setUp(self) <NEW_LINE> config_parser = SafeConfigParser() <NEW_LINE> config_parser.read_string(TEST_CONFIG) <NEW_LINE> self.site_config = config_parser[SEC_SITE] <NEW_LINE> <DEDENT> def test_weather_model(self): <NEW_LINE> <INDENT> weather_model = WeatherModel(TEST_TEMPERATURE_VALUE, TEST_HUMIDITY_VALUE, TEST_WINDSPEED_VALUE, TEST_PRESSURE_VALUE) <NEW_LINE> self.assertEqual(weather_model.temperature, TEST_TEMPERATURE_VALUE) <NEW_LINE> self.assertEqual(weather_model.pressure, TEST_HUMIDITY_VALUE) <NEW_LINE> self.assertEqual(weather_model.humidity, TEST_WINDSPEED_VALUE) <NEW_LINE> self.assertEqual(weather_model.windspeed, TEST_PRESSURE_VALUE) <NEW_LINE> <DEDENT> def test_create_weather_model(self): <NEW_LINE> <INDENT> model = create_weather_model(self.site_config) <NEW_LINE> self.assertEqual(model.temperature, self.site_config.getfloat(OPT_TEMPERATURE)) <NEW_LINE> self.assertEqual(model.humidity, self.site_config.getfloat(OPT_HUMIDITY)) <NEW_LINE> self.assertEqual(model.pressure, self.site_config.getfloat(OPT_PRESSURE)) <NEW_LINE> self.assertEqual(model.windspeed, self.site_config.getfloat(OPT_WINDSPEED)) | Test Weather Model class
| 62599055435de62698e9d342 |
class Easy21Action(Action): <NEW_LINE> <INDENT> def __init__(self, hit: bool): <NEW_LINE> <INDENT> self.hit = hit <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'hit' if self.hit else 'stick' <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) | Easy21 Action that can be performed on the environment state | 6259905521bff66bcd7241a4 |
class RadioThermostat(ThermostatDevice): <NEW_LINE> <INDENT> def __init__(self, device, hold_temp): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.set_time() <NEW_LINE> self._target_temperature = None <NEW_LINE> self._current_temperature = None <NEW_LINE> self._operation = STATE_IDLE <NEW_LINE> self._name = None <NEW_LINE> self.hold_temp = hold_temp <NEW_LINE> self.update() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return TEMP_FAHRENHEIT <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return { "fan": self.device.fmode['human'], "mode": self.device.tmode['human'] } <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_temperature(self): <NEW_LINE> <INDENT> return round(self._current_temperature, 1) <NEW_LINE> <DEDENT> @property <NEW_LINE> def operation(self): <NEW_LINE> <INDENT> return self._operation <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_temperature(self): <NEW_LINE> <INDENT> return round(self._target_temperature, 1) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self._current_temperature = self.device.temp['raw'] <NEW_LINE> self._name = self.device.name['raw'] <NEW_LINE> if self.device.tmode['human'] == 'Cool': <NEW_LINE> <INDENT> self._target_temperature = self.device.t_cool['raw'] <NEW_LINE> self._operation = STATE_COOL <NEW_LINE> <DEDENT> elif self.device.tmode['human'] == 'Heat': <NEW_LINE> <INDENT> self._target_temperature = self.device.t_heat['raw'] <NEW_LINE> self._operation = STATE_HEAT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._operation = STATE_IDLE <NEW_LINE> <DEDENT> <DEDENT> def set_temperature(self, temperature): <NEW_LINE> <INDENT> if self._operation == STATE_COOL: <NEW_LINE> <INDENT> self.device.t_cool = temperature <NEW_LINE> <DEDENT> elif self._operation == STATE_HEAT: <NEW_LINE> <INDENT> self.device.t_heat = temperature <NEW_LINE> <DEDENT> if self.hold_temp: <NEW_LINE> <INDENT> self.device.hold = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.device.hold = 0 <NEW_LINE> <DEDENT> <DEDENT> def set_time(self): <NEW_LINE> <INDENT> now = datetime.datetime.now() <NEW_LINE> self.device.time = {'day': now.weekday(), 'hour': now.hour, 'minute': now.minute} | Representation of a Radio Thermostat. | 62599055be383301e0254d2c |
class InvertedResidualRelu(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_chs, out_chs, stride=1, exp_ratio=6.0, slice=1, real_in_chs = None): <NEW_LINE> <INDENT> super(InvertedResidualRelu, self).__init__() <NEW_LINE> mid_chs = make_divisible(in_chs * exp_ratio) <NEW_LINE> if real_in_chs: <NEW_LINE> <INDENT> in_chs = real_in_chs <NEW_LINE> <DEDENT> self.has_residual = in_chs == out_chs and stride == 1 <NEW_LINE> self.conv_pw = conv1x1(in_chs, mid_chs, slice=slice) <NEW_LINE> self.bn1 = norm_layer(mid_chs, slice=slice) <NEW_LINE> self.act1 = act_layer() <NEW_LINE> self.conv_dw = conv3x3(mid_chs, mid_chs, stride, group=True, slice=slice) <NEW_LINE> self.bn2 = norm_layer(mid_chs, slice=slice) <NEW_LINE> self.act2 = act_layer() <NEW_LINE> self.conv_pwl = conv1x1(mid_chs, out_chs, slice=slice) <NEW_LINE> self.bn3 = norm_layer(out_chs, slice=slice) <NEW_LINE> self.act3 = act_layer() <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> residual = x <NEW_LINE> x = self.conv_pw(x) <NEW_LINE> x = self.bn1(x) <NEW_LINE> x = self.act1(x) <NEW_LINE> x = self.conv_dw(x) <NEW_LINE> x = self.bn2(x) <NEW_LINE> x = self.act2(x) <NEW_LINE> x = self.conv_pwl(x) <NEW_LINE> x = self.bn3(x) <NEW_LINE> if self.has_residual: <NEW_LINE> <INDENT> x += residual <NEW_LINE> <DEDENT> x = self.act3(x) <NEW_LINE> return x | Inverted residual block w/ optional SE and CondConv routing | 625990553617ad0b5ee07687 |
class NStepGRU(NStepGRUBase): <NEW_LINE> <INDENT> use_bi_direction = False <NEW_LINE> def rnn(self, *args): <NEW_LINE> <INDENT> return rnn.n_step_gru(*args) <NEW_LINE> <DEDENT> @property <NEW_LINE> def n_cells(self): <NEW_LINE> <INDENT> return 1 | __init__(self, n_layers, in_size, out_size, dropout)
Stacked Uni-directional GRU for sequences.
This link is stacked version of Uni-directional GRU for sequences.
It calculates hidden and cell states of all layer at end-of-string,
and all hidden states of the last layer for each time.
Unlike :func:`chainer.functions.n_step_gru`, this function automatically
sort inputs in descending order by length, and transpose the sequence.
Users just need to call the link with a list of :class:`chainer.Variable`
holding sequences.
Args:
n_layers (int): Number of layers.
in_size (int): Dimensionality of input vectors.
out_size (int): Dimensionality of hidden states and output vectors.
dropout (float): Dropout ratio.
.. seealso::
:func:`chainer.functions.n_step_gru` | 6259905576d4e153a661dd1b |
class Meta: <NEW_LINE> <INDENT> name = 'denmark_provider' | The name of the provider. | 62599055ac7a0e7691f73a20 |
class SessionCart: <NEW_LINE> <INDENT> def __init__(self, request): <NEW_LINE> <INDENT> cart_id = request.session.get(CART_ID) <NEW_LINE> if cart_id: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cart = models.Cart.objects.get(id=cart_id, checked_out=False) <NEW_LINE> <DEDENT> except models.Cart.DoesNotExist: <NEW_LINE> <INDENT> cart = self.new(request) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> cart = self.new(request) <NEW_LINE> <DEDENT> self.cart = cart <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for item in self.cart.item_set.all(): <NEW_LINE> <INDENT> yield item <NEW_LINE> <DEDENT> <DEDENT> def new(self, request): <NEW_LINE> <INDENT> cart = models.Cart(creation_date=datetime.datetime.now()) <NEW_LINE> cart.save() <NEW_LINE> request.session[CART_ID] = cart.id <NEW_LINE> return cart <NEW_LINE> <DEDENT> def add(self, product, unit_price, quantity=1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = models.Item.objects.get( cart=self.cart, product=product, ) <NEW_LINE> <DEDENT> except models.Item.DoesNotExist: <NEW_LINE> <INDENT> item = models.Item() <NEW_LINE> item.cart = self.cart <NEW_LINE> item.product = product <NEW_LINE> item.unit_price = unit_price <NEW_LINE> item.quantity = quantity <NEW_LINE> item.save() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item.unit_price = unit_price <NEW_LINE> item.quantity = item.quantity + int(quantity) <NEW_LINE> item.save() <NEW_LINE> <DEDENT> <DEDENT> def remove(self, product): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = models.Item.objects.get( cart=self.cart, product=product, ) <NEW_LINE> <DEDENT> except models.Item.DoesNotExist: <NEW_LINE> <INDENT> raise ItemDoesNotExist <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item.delete() <NEW_LINE> <DEDENT> <DEDENT> def update(self, product, quantity, unit_price=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = models.Item.objects.get( cart=self.cart, product=product, ) <NEW_LINE> <DEDENT> except models.Item.DoesNotExist: <NEW_LINE> <INDENT> raise ItemDoesNotExist <NEW_LINE> <DEDENT> <DEDENT> def count(self): <NEW_LINE> <INDENT> result = 0 <NEW_LINE> for item in self.cart.item_set.all(): <NEW_LINE> <INDENT> result += 1 * item.quantity <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def summary(self): <NEW_LINE> <INDENT> result = 0 <NEW_LINE> is_footwear = False <NEW_LINE> for item in self.cart.item_set.all(): <NEW_LINE> <INDENT> item_product = item.get_product() <NEW_LINE> if 'Footwear' in item_product.product_category.category_name: <NEW_LINE> <INDENT> is_footwear = True <NEW_LINE> <DEDENT> result += item.total_price <NEW_LINE> <DEDENT> if self.cart.has_voucher and self.cart.voucher_code: <NEW_LINE> <INDENT> if self.cart.voucher_code == "vou5": <NEW_LINE> <INDENT> result -= 5 <NEW_LINE> <DEDENT> if 50 < result < 75 and self.cart.voucher_code == "vou10": <NEW_LINE> <INDENT> result -= 10 <NEW_LINE> <DEDENT> if result > 75 and is_footwear and self.cart.voucher_code == "vou15": <NEW_LINE> <INDENT> result -= 15 <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> for item in self.cart.item_set.all(): <NEW_LINE> <INDENT> item.delete() <NEW_LINE> <DEDENT> self.cart.voucher_code = "" <NEW_LINE> self.cart.has_voucher = False <NEW_LINE> self.cart.save() | @change: Rename Cart to SessionCart to avoid clashes | 625990558a43f66fc4bf36cc |
class MTable(object): <NEW_LINE> <INDENT> def __init__(self, name, families): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.families = families <NEW_LINE> self._rows = {} <NEW_LINE> <DEDENT> def row(self, key): <NEW_LINE> <INDENT> return self._rows.get(key, {}) <NEW_LINE> <DEDENT> def rows(self, keys): <NEW_LINE> <INDENT> return ((k, self.row(k)) for k in keys) <NEW_LINE> <DEDENT> def put(self, key, data): <NEW_LINE> <INDENT> if key not in self._rows: <NEW_LINE> <INDENT> self._rows[key] = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rows[key].update(data) <NEW_LINE> <DEDENT> <DEDENT> def delete(self, key): <NEW_LINE> <INDENT> del self._rows[key] <NEW_LINE> <DEDENT> def scan(self, filter=None, columns=[], row_start=None, row_stop=None): <NEW_LINE> <INDENT> sorted_keys = sorted(self._rows) <NEW_LINE> rows = {} <NEW_LINE> for row in sorted_keys: <NEW_LINE> <INDENT> if row_start and row < row_start: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if row_stop and row > row_stop: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> rows[row] = copy.copy(self._rows[row]) <NEW_LINE> <DEDENT> if columns: <NEW_LINE> <INDENT> ret = {} <NEW_LINE> for row in rows.keys(): <NEW_LINE> <INDENT> data = rows[row] <NEW_LINE> for key in data: <NEW_LINE> <INDENT> if key in columns: <NEW_LINE> <INDENT> ret[row] = data <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> rows = ret <NEW_LINE> <DEDENT> elif filter: <NEW_LINE> <INDENT> filters = filter.split('AND') <NEW_LINE> for f in filters: <NEW_LINE> <INDENT> g = re.search("(.*)\((.*),?\)", f) <NEW_LINE> fname = g.group(1).strip() <NEW_LINE> fargs = [s.strip().replace('\'', '') for s in g.group(2).split(',')] <NEW_LINE> m = getattr(self, fname) <NEW_LINE> if callable(m): <NEW_LINE> <INDENT> rows = m(fargs, rows) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("%s filter is not implemented, " "you may want to add it!") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for k in sorted(rows): <NEW_LINE> <INDENT> yield k, rows[k] <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def SingleColumnValueFilter(args, rows): <NEW_LINE> <INDENT> op = args[2] <NEW_LINE> column = "%s:%s" % (args[0], args[1]) <NEW_LINE> value = args[3] <NEW_LINE> if value.startswith('binary:'): <NEW_LINE> <INDENT> value = value[7:] <NEW_LINE> <DEDENT> r = {} <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> data = rows[row] <NEW_LINE> if op == '=': <NEW_LINE> <INDENT> if column in data and data[column] == value: <NEW_LINE> <INDENT> r[row] = data <NEW_LINE> <DEDENT> <DEDENT> elif op == '<=': <NEW_LINE> <INDENT> if column in data and data[column] <= value: <NEW_LINE> <INDENT> r[row] = data <NEW_LINE> <DEDENT> <DEDENT> elif op == '>=': <NEW_LINE> <INDENT> if column in data and data[column] >= value: <NEW_LINE> <INDENT> r[row] = data <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("In-memory " "SingleColumnValueFilter " "doesn't support the %s operation " "yet" % op) <NEW_LINE> <DEDENT> <DEDENT> return r | HappyBase.Table mock
| 62599055cc0a2c111447c52c |
class ISchoolFolder(Interface): <NEW_LINE> <INDENT> contains('Products.Tutorweb.interfaces.ISchool') <NEW_LINE> title = schema.TextLine(title=_(u"Object title"), required=True) <NEW_LINE> description = schema.TextLine(title=_(u"Description"), description=_(u"A short summary of this folder")) | An folder object containing schools.
| 62599055d53ae8145f9199a2 |
class Launcher(_LauncherBase): <NEW_LINE> <INDENT> def __init__(self, protocol, port, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> if not issubclass(protocol, Protocol): <NEW_LINE> <INDENT> raise TypeError(r'Dot Implemented Protocol Interface') <NEW_LINE> <DEDENT> self._settings = { r'ssl_options': kwargs.get(r'ssl_options', None), r'max_buffer_size': kwargs.get(r'max_buffer_size', None), r'read_chunk_size': kwargs.get(r'read_chunk_size', None), } <NEW_LINE> self._sockets = bind_sockets(port) <NEW_LINE> if self._process_num > 1: <NEW_LINE> <INDENT> self._process_id = fork_processes(self._process_num) <NEW_LINE> <DEDENT> AsyncIOMainLoop().install() <NEW_LINE> self._event_loop = asyncio.get_event_loop() <NEW_LINE> self._event_loop.set_debug(self._debug) <NEW_LINE> self._event_loop.add_signal_handler(signal.SIGINT, self.stop) <NEW_LINE> self._event_loop.add_signal_handler(signal.SIGTERM, self.stop) <NEW_LINE> self._server = _TCPServer(protocol, **self._settings) <NEW_LINE> if self._async_initialize: <NEW_LINE> <INDENT> self._event_loop.run_until_complete(self._async_initialize()) | TornadoTCP的启动器
用于简化和统一程序的启动操作 | 625990558da39b475be0472c |
class TrustedFilter(filters.BaseHostFilter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.compute_attestation = ComputeAttestation() <NEW_LINE> <DEDENT> run_filter_once_per_request = True <NEW_LINE> def host_passes(self, host_state, filter_properties): <NEW_LINE> <INDENT> instance_type = filter_properties.get('instance_type', {}) <NEW_LINE> extra = instance_type.get('extra_specs', {}) <NEW_LINE> trust = extra.get('trust:trusted_host') <NEW_LINE> host = host_state.nodename <NEW_LINE> if trust: <NEW_LINE> <INDENT> return self.compute_attestation.is_trusted(host, trust) <NEW_LINE> <DEDENT> return True | Trusted filter to support Trusted Compute Pools. | 62599055462c4b4f79dbcf45 |
class MaintenanceRecordFilterBackend(filters.BaseFilterBackend): <NEW_LINE> <INDENT> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> request_params = request.query_params <NEW_LINE> active = request_params.get('active') <NEW_LINE> if isinstance(active, basestring) and active.lower() == 'true' or isinstance(active, bool) and active: <NEW_LINE> <INDENT> queryset = MaintenanceRecord.active() <NEW_LINE> <DEDENT> return queryset | Filter MaintenanceRecords using the request_user and 'query_params' | 625990557b25080760ed877f |
class FileScheme(object): <NEW_LINE> <INDENT> host = '<file>' <NEW_LINE> port = '<file>' <NEW_LINE> reason = '<none>' <NEW_LINE> def __init__(self, location): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def request(self, method, fullpath, body='', headers=None): <NEW_LINE> <INDENT> self.status = 200 <NEW_LINE> self.msg = '' <NEW_LINE> self.path = fullpath.split('?')[0] <NEW_LINE> self.method = method = method.lower() <NEW_LINE> assert method in ('get', 'put', 'delete') <NEW_LINE> if method == 'delete': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.path) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> elif method == 'put': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = open(self.path, 'w') <NEW_LINE> f.write(body) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> self.status = 500 <NEW_LINE> self.raise_connection_error() <NEW_LINE> <DEDENT> <DEDENT> elif method == 'get': <NEW_LINE> <INDENT> if not os.path.exists(self.path): <NEW_LINE> <INDENT> self.status = 404 <NEW_LINE> self.raise_connection_error(NotFound) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def connect(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getresponse(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def getheader(self, header): <NEW_LINE> <INDENT> if header == 'content-length': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.path.getsize(self.path) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def read(self, howmuch=None): <NEW_LINE> <INDENT> if self.method == 'get': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fl = open(self.path, 'r') <NEW_LINE> if howmuch is None: <NEW_LINE> <INDENT> return fl.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return fl.read(howmuch) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.status = 500 <NEW_LINE> self.raise_connection_error() <NEW_LINE> <DEDENT> <DEDENT> return '' <NEW_LINE> <DEDENT> def raise_connection_error(self, klass=None): <NEW_LINE> <INDENT> if klass is None: <NEW_LINE> <INDENT> klass=ConnectionError <NEW_LINE> <DEDENT> raise klass(_Params('file://' + self.path, self.method)) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass | Retarded scheme to local file wrapper. | 6259905591af0d3eaad3b368 |
class AutoRestUrlTestServiceConfiguration(Configuration): <NEW_LINE> <INDENT> def __init__( self, global_string_path, global_string_query=None, base_url=None, filepath=None): <NEW_LINE> <INDENT> if global_string_path is None: <NEW_LINE> <INDENT> raise ValueError('global_string_path must not be None.') <NEW_LINE> <DEDENT> if not base_url: <NEW_LINE> <INDENT> base_url = 'http://localhost' <NEW_LINE> <DEDENT> super(AutoRestUrlTestServiceConfiguration, self).__init__(base_url, filepath) <NEW_LINE> self.add_user_agent('autoresturltestservice/{}'.format(VERSION)) <NEW_LINE> self.global_string_path = global_string_path <NEW_LINE> self.global_string_query = global_string_query | Configuration for AutoRestUrlTestService
Note that all parameters used to create this instance are saved as instance
attributes.
:param global_string_path: A string value 'globalItemStringPath' that
appears in the path
:type global_string_path: str
:param global_string_query: should contain value null
:type global_string_query: str
:param str base_url: Service URL
:param str filepath: Existing config | 625990558e71fb1e983bd00a |
class Acidentes(object): <NEW_LINE> <INDENT> causa = '' <NEW_LINE> tipo = '' <NEW_LINE> quantidade_ocorrencias = 0 | Causes of accidents | 6259905532920d7e50bc7591 |
class NoAuthenticationError(Exception): <NEW_LINE> <INDENT> pass | Raised when trying to use an invalid or missing authentication token. | 62599055cad5886f8bdc5b21 |
class SiteSettingsForm(KeyValueForm): <NEW_LINE> <INDENT> def __init__(self, siteconfig, *args, **kwargs): <NEW_LINE> <INDENT> self.request = kwargs.pop('request', None) <NEW_LINE> self.siteconfig = siteconfig <NEW_LINE> super(SiteSettingsForm, self).__init__(instance=siteconfig, *args, **kwargs) <NEW_LINE> <DEDENT> def get_key_value(self, key, default=None): <NEW_LINE> <INDENT> return self.instance.get(key) <NEW_LINE> <DEDENT> def set_key_value(self, key, value): <NEW_LINE> <INDENT> self.instance.set(key, value) <NEW_LINE> <DEDENT> def save_instance(self): <NEW_LINE> <INDENT> self.instance.save() | A base form for loading/saving settings for a SiteConfiguration.
This is meant to be subclassed for different settings pages. Any fields
defined by the form will be loaded/saved automatically.
Attributes:
request (django.http.HttpRequest):
The HTTP request used for this form.
siteconfig (djblets.siteconfig.models.SiteConfiguration):
The site configuration settings are loaded from and saved to. | 625990557cff6e4e811b6f82 |
class NullSearchProvider(provider.SolrSearchProvider): <NEW_LINE> <INDENT> async def search_album_name(self, *args, **kwargs): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> async def search_artist_name(self, *args, **kwargs): <NEW_LINE> <INDENT> return [] | This is a pretty sketchy way to test these charts, but some of them have a habit of changing. This will at
least allow us to ensure that they work without the complexity of end to end testing. | 625990556e29344779b01b8b |
class Permutation: <NEW_LINE> <INDENT> def __init__(self, image=list(range(4))): <NEW_LINE> <INDENT> if not type(image) is list: raise(TypeError) <NEW_LINE> try: <NEW_LINE> <INDENT> [image.index(v) for v in range(max(image)+1)] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("ArgumentError: ", image) <NEW_LINE> <DEDENT> self.image = image <NEW_LINE> self.size = len(image) <NEW_LINE> self.repr = [list(range(self.size)),self.image] <NEW_LINE> <DEDENT> def act(self,arg): <NEW_LINE> <INDENT> if not type(arg) in [list,int]: <NEW_LINE> <INDENT> raise TypeError("type(arg)={}".format(type(arg))) <NEW_LINE> <DEDENT> if type(arg) is int: <NEW_LINE> <INDENT> if arg in self.image: <NEW_LINE> <INDENT> return self.image[arg] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return arg <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return [self.act(v) for v in arg] <NEW_LINE> <DEDENT> except IndexError as e: print(e) <NEW_LINE> <DEDENT> <DEDENT> def __mul__(self,aPerm): <NEW_LINE> <INDENT> if not isinstance(aPerm, type(self)): raise(TypeError) <NEW_LINE> return type(self)(self.act(aPerm.image)) <NEW_LINE> <DEDENT> def inverse(self): <NEW_LINE> <INDENT> inverse_image = [] <NEW_LINE> inverse_image = [self.image.index(v) for v in range(self.size)] <NEW_LINE> return type(self)(inverse_image) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{}\n{}".format(self.repr[0],self.repr[1]) <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> display(sympy.Matrix(self.repr)) | Permutation class | 6259905501c39578d7f141d8 |
class PoolScanTcpListener(common.PoolScanner): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(PoolScanTcpListener, self).__init__(**kwargs) <NEW_LINE> min_size = self.profile.get_obj_size("_TCP_LISTENER") <NEW_LINE> if not min_size: <NEW_LINE> <INDENT> raise RuntimeError(repr(min_size)) <NEW_LINE> <DEDENT> self.checks = [ ('PoolTagCheck', dict( tag=self.profile.get_constant("TCP_LISTENER_POOLTAG"))), ('CheckPoolSize', dict(min_size=min_size)), ('CheckPoolType', dict(non_paged=True, free=True, paged=True)), ('CheckPoolIndex', dict(value=0)), ] | PoolScanner for Tcp Listeners | 62599055435de62698e9d344 |
class tektronixMDO3052(tektronixMDO3000): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.__dict__.setdefault('_instrument_id', 'MDO3052') <NEW_LINE> super(tektronixMDO3052, self).__init__(*args, **kwargs) <NEW_LINE> self._analog_channel_count = 2 <NEW_LINE> self._digital_channel_count = 16 <NEW_LINE> self._bandwidth = 500e6 <NEW_LINE> self._output_count = 1 <NEW_LINE> self._init_channels() <NEW_LINE> self._init_outputs() | Tektronix MDO3052 IVI oscilloscope driver | 625990558e7ae83300eea5d0 |
class StringTable: <NEW_LINE> <INDENT> def __init__(self, name=None, kids=None): <NEW_LINE> <INDENT> self.name = name or u'' <NEW_LINE> self.kids = kids or [] <NEW_LINE> <DEDENT> def fromRaw(self, data, i, limit): <NEW_LINE> <INDENT> i, (cpsublen, cpwValueLength, cpwType, self.name) = parseCodePage(data, i, limit) <NEW_LINE> i = nextDWord(i) <NEW_LINE> while i < limit: <NEW_LINE> <INDENT> ss = StringStruct() <NEW_LINE> j = ss.fromRaw(data, i, limit) <NEW_LINE> i = j <NEW_LINE> self.kids.append(ss) <NEW_LINE> i = nextDWord(i) <NEW_LINE> <DEDENT> return i <NEW_LINE> <DEDENT> def toRaw(self): <NEW_LINE> <INDENT> raw_name = getRaw(self.name) <NEW_LINE> vallen = 0 <NEW_LINE> typ = 1 <NEW_LINE> sublen = 6 + len(raw_name) + 2 <NEW_LINE> tmp = [] <NEW_LINE> for kid in self.kids: <NEW_LINE> <INDENT> raw = kid.toRaw() <NEW_LINE> if len(raw) % 4: <NEW_LINE> <INDENT> raw = raw + b'\000\000' <NEW_LINE> <DEDENT> tmp.append(raw) <NEW_LINE> <DEDENT> tmp = b''.join(tmp) <NEW_LINE> sublen += len(tmp) <NEW_LINE> return (struct.pack('hhh', sublen, vallen, typ) + raw_name + b'\000\000' + tmp) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.toRaw() == other <NEW_LINE> <DEDENT> def __str__(self, indent=u''): <NEW_LINE> <INDENT> newindent = indent + u' ' <NEW_LINE> tmp = (u',\n%s' % newindent).join(str(kid) for kid in self.kids) <NEW_LINE> return (u"%sStringTable(\n%su'%s',\n%s[%s])" % (indent, newindent, self.name, newindent, tmp)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'versioninfo.StringTable(%r, %r)' % (self.name, self.kids) | WORD wLength;
WORD wValueLength;
WORD wType;
WCHAR szKey[];
String Children[]; // list of zero or more String structures. | 6259905582261d6c5273096a |
class D3Q15Lattice(Lattice): <NEW_LINE> <INDENT> def __init__(self,Nx,Ny,Nz): <NEW_LINE> <INDENT> super(D3Q15Lattice,self).__init__(Nx,Ny,Nz) <NEW_LINE> self.ex = [0,1,-1,0,0,0,0,1,-1,1,-1,1,-1,1,-1]; self.ex = np.array(self.ex,dtype=np.float32); <NEW_LINE> self.ey = [0,0,0,1,-1,0,0,1,1,-1,-1,1,1,-1,-1]; self.ey = np.array(self.ey,dtype=np.float32); <NEW_LINE> self.ez = [0,0,0,0,0,1,-1,1,1,1,1,-1,-1,-1,-1]; self.ez = np.array(self.ez,dtype=np.float32); <NEW_LINE> self.bbSpd = [0,2,1,4,3,6,5,14,13,12,11,10,9,8,7] <NEW_LINE> self.w = [2./9.,1./9.,1./9,1./9.,1./9.,1./9.,1./9., 1./72.,1./72.,1./72.,1./72., 1./72.,1./72.,1./72.,1./72.] <NEW_LINE> self.w = np.array(self.w,dtype=np.float32) | D3Q15 Lattice | 62599055e5267d203ee6ce30 |
class Hashtag(object): <NEW_LINE> <INDENT> def __init__(self, text=None): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def NewFromJsonDict(data): <NEW_LINE> <INDENT> return Hashtag(text=data.get('text', None)) | A class representing a twitter hashtag | 62599055596a897236129050 |
class ExceptionLoggerHook(object): <NEW_LINE> <INDENT> DEFAULT_LEVEL = logging.CRITICAL <NEW_LINE> def __init__(self, level=DEFAULT_LEVEL): <NEW_LINE> <INDENT> self.level = level <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> logger.log(self.level, "Uncaught exception", exc_info=args) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> sys.__excepthook__(*args) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{0}({1}) at 0x{2:x}>'.format( self.__class__.__name__, logging.getLevelName(self.level), id(self)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def level(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._loglevel <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return self.DEFAULT_LEVEL <NEW_LINE> <DEDENT> <DEDENT> @level.setter <NEW_LINE> def level(self, level): <NEW_LINE> <INDENT> if isinstance(level, int): <NEW_LINE> <INDENT> self._loglevel = level <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._loglevel = logging.getLevelName(level) | Replacement function for sys.__excepthook__.
This excepthook implementation can replace the default one to capture
unhandled exceptions with a logger. | 6259905510dbd63aa1c72119 |
class UserItemsBids(APIView): <NEW_LINE> <INDENT> def get(self, request, pk, format=None): <NEW_LINE> <INDENT> user = get_user(pk) <NEW_LINE> items_id_dict = Bid.objects.filter(user=user.id).values('item_id') <NEW_LINE> item_ids = [val['item_id'] for val in items_id_dict] <NEW_LINE> item_objects = Item.objects.filter(pk__in=item_ids) <NEW_LINE> serializer = ItemSerializer(item_objects, many=True) <NEW_LINE> return Response(serializer.data) | Retrieve all the items on which a user has bid;
GET user/<user_id>/bids/items | 62599055d7e4931a7ef3d5c0 |
@dataclass <NEW_LINE> class GitExecutor(MultiExecutor): <NEW_LINE> <INDENT> repository: str = '' <NEW_LINE> version: str = '' <NEW_LINE> force: bool = False <NEW_LINE> def execute(self, **kwargs) -> Result: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.execute_executor(GitCloneExecutor( repository=self.repository, name=f'Cloning {self.repository}', **self.get_base_attributes() )) <NEW_LINE> self.execute_executor(GitCheckoutExecutor( version=self.version, force=self.force, name=f'Checking out {self.version} for {self.repository}', **self.get_base_attributes() )) <NEW_LINE> self.execute_executor(GitPullExecutor( force=self.force, name=f'Pulling {self.repository}', **self.get_base_attributes() )) <NEW_LINE> return self.result_from_executor('Updated github repository') <NEW_LINE> <DEDENT> except SingleExecutorFailedException as e: <NEW_LINE> <INDENT> return e.result | Executor that clones, checks out and pulls a repository
Args:
repository (str): The repository that will be cloned. :obj:`required`
version (str): Version of the repository. Defaults to :obj:`'master'`
force (bool): Force changes made in the repository to be discarded. Defaults to :obj:`False` | 62599055379a373c97d9a566 |
class PrimoControlPanelForm(ControlPanelForm): <NEW_LINE> <INDENT> implements(IPrimoControlPanelForm) <NEW_LINE> primolayout = FormFieldsets(IPrimoLayout) <NEW_LINE> primolayout.id = 'primolayout' <NEW_LINE> primolayout.label = _(u'Connexion') <NEW_LINE> form_fields = FormFieldsets(primolayout) <NEW_LINE> label = _(u"Primo Settings") <NEW_LINE> description = _(u"Settings for the Primo connector.") <NEW_LINE> form_name = _("Primo Settings") | Primo Control Panel Form | 625990558a43f66fc4bf36ce |
class PathStatus(Enum): <NEW_LINE> <INDENT> MOVING_TOWARDS_TARGET = 0 <NEW_LINE> INTERMEDIATE_NODE_REACHED = 1 <NEW_LINE> CHECKPOINT_REACHED = 2 | Enum of pathfinding status | 62599055cc0a2c111447c52e |
class overridable_property: <NEW_LINE> <INDENT> def __init__(self, fget=None, fset=None, fdel=None, doc=None): <NEW_LINE> <INDENT> self.fget = fget <NEW_LINE> self.fset = fset <NEW_LINE> self.fdel = fdel <NEW_LINE> self.__doc__ = doc <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> if self.fget is None: <NEW_LINE> <INDENT> raise AttributeError("unreadable attribute") <NEW_LINE> <DEDENT> if self.fget.__name__ == '<lambda>' or not self.fget.__name__: <NEW_LINE> <INDENT> return self.fget(obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return getattr(obj, self.fget.__name__)() <NEW_LINE> <DEDENT> <DEDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> if self.fset is None: <NEW_LINE> <INDENT> raise AttributeError("can't set attribute") <NEW_LINE> <DEDENT> if self.fset.__name__ == '<lambda>' or not self.fset.__name__: <NEW_LINE> <INDENT> self.fset(obj, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> getattr(obj, self.fset.__name__)(value) <NEW_LINE> <DEDENT> <DEDENT> def __delete__(self, obj): <NEW_LINE> <INDENT> if self.fdel is None: <NEW_LINE> <INDENT> raise AttributeError("can't delete attribute") <NEW_LINE> <DEDENT> if self.fdel.__name__ == '<lambda>' or not self.fdel.__name__: <NEW_LINE> <INDENT> self.fdel(obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> getattr(obj, self.fdel.__name__)() | The same as Python's "property" attribute, but allows the accessor
methods to be overridden in subclasses. | 62599055d99f1b3c44d06be2 |
class InsteonDimmerDevice(InsteonEntity, LightEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def brightness(self): <NEW_LINE> <INDENT> onlevel = self._insteon_device_state.value <NEW_LINE> return int(onlevel) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return bool(self.brightness) <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self): <NEW_LINE> <INDENT> return SUPPORT_BRIGHTNESS <NEW_LINE> <DEDENT> async def async_turn_on(self, **kwargs): <NEW_LINE> <INDENT> if ATTR_BRIGHTNESS in kwargs: <NEW_LINE> <INDENT> brightness = int(kwargs[ATTR_BRIGHTNESS]) <NEW_LINE> self._insteon_device_state.set_level(brightness) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._insteon_device_state.on() <NEW_LINE> <DEDENT> <DEDENT> async def async_turn_off(self, **kwargs): <NEW_LINE> <INDENT> self._insteon_device_state.off() | A Class for an Insteon device. | 6259905538b623060ffaa2f0 |
class AzulejoTestBase(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.screen = None <NEW_LINE> cls.keybinding_obj = None | Base setup of tests | 62599055be8e80087fbc05c3 |
class StartBackend(RunConfig): <NEW_LINE> <INDENT> command_name = "start-backend" <NEW_LINE> @staticmethod <NEW_LINE> def start_backend(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> wd = os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "..") ) <NEW_LINE> print("Deleting existing windmill dev project") <NEW_LINE> p = subprocess.Popen( ["rm", "-rf", ".windmill-temp-project/"], cwd=wd, stdout=subprocess.PIPE, ) <NEW_LINE> p.communicate() <NEW_LINE> print("Creating new project") <NEW_LINE> p = subprocess.Popen( ["windmill", "init", "--name", ".windmill-temp-project"], cwd=wd, stdout=subprocess.PIPE, ) <NEW_LINE> p.communicate() <NEW_LINE> print("Starting dev backend") <NEW_LINE> os.chdir( os.path.abspath( os.path.join( os.path.dirname(__file__), "..", "..", ".windmill-temp-project/", ) ) ) <NEW_LINE> run_config = RunConfig(_run_dev_server=True, *args, **kwargs) <NEW_LINE> StartWebserver(run_config) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(f"Unable to start webserver ({e}) - aborting") | Starts the backend flask server with CORS enabled
| 6259905521a7993f00c674b0 |
class FileRefDto: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'id', None, None, ), (2, TType.STRING, 'format', None, None, ), ) <NEW_LINE> def __init__(self, id=None, format=None,): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.format = format <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.id = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.format = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('FileRefDto') <NEW_LINE> if self.id is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('id', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.id) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.format is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('format', TType.STRING, 2) <NEW_LINE> oprot.writeString(self.format) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- id
- format | 625990550fa83653e46f6426 |
class Preprocessor: <NEW_LINE> <INDENT> from ..schemata import identity <NEW_LINE> noop = identity().coerce <NEW_LINE> preprocessor = noop <NEW_LINE> def setValue(self, value, **kwds): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self.preprocessor(value=value, node=self, **kwds) <NEW_LINE> <DEDENT> except AttributeError as error: <NEW_LINE> <INDENT> import journal <NEW_LINE> raise journal.firewall('pyre.calc').log(str(error)) <NEW_LINE> <DEDENT> return super().setValue(value=value, **kwds) <NEW_LINE> <DEDENT> def __init__(self, preprocessor=noop, **kwds): <NEW_LINE> <INDENT> self.preprocessor = preprocessor <NEW_LINE> super().__init__(**kwds) <NEW_LINE> return | A mix-in class that performs arbitrary transformations on the value of a node | 6259905507f4c71912bb097d |
class BaseConfigManager(ManagerInterface): <NEW_LINE> <INDENT> def __init__(self, path=None, *args, **kwargs): <NEW_LINE> <INDENT> self.load(path) <NEW_LINE> super(BaseConfigManager, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> return self._get("") <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> return self._get(key) <NEW_LINE> <DEDENT> def set(self, key, value): <NEW_LINE> <INDENT> old_value = self._get(key) <NEW_LINE> self._set(key, value, dump=True) <NEW_LINE> new_value = self._get(key) <NEW_LINE> return "key %s was changed \n %s --> %s" % (key, old_value, new_value) <NEW_LINE> <DEDENT> def _get(self, key, default={}, deliemeter=DELIMETER): <NEW_LINE> <INDENT> obj = conf_api.get(self.data, key, seps=deliemeter)[0] <NEW_LINE> if not obj: <NEW_LINE> <INDENT> LOG.debug("key %s not found in %s" % (key, self.data)) <NEW_LINE> return default <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def _set(self, key, val, dic=None, deliemeter=DELIMETER, dump=False): <NEW_LINE> <INDENT> dumped = False <NEW_LINE> for path in glob.glob(self.config_path): <NEW_LINE> <INDENT> _data = anyconfig.load(path) <NEW_LINE> item = conf_api.get(_data, key, seps=deliemeter)[0] <NEW_LINE> if item: <NEW_LINE> <INDENT> conf_api.set_(_data, key, val, seps=deliemeter) <NEW_LINE> if dump: <NEW_LINE> <INDENT> dumped = True <NEW_LINE> return self.dump(_data, path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not dumped: <NEW_LINE> <INDENT> raise Exception("Critical error in dump config to disk !") <NEW_LINE> <DEDENT> <DEDENT> def dump(self, data, path): <NEW_LINE> <INDENT> with open(path, "w") as f: <NEW_LINE> <INDENT> if isinstance(data, MergeableDict): <NEW_LINE> <INDENT> pyaml.dump(data.convert_to(data), f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pyaml.dump(data, f) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load(self, path=None): <NEW_LINE> <INDENT> self._data = anyconfig.load( path or self._config(), ignore_missing=True) <NEW_LINE> return self._data <NEW_LINE> <DEDENT> def _config(self): <NEW_LINE> <INDENT> return os.path.join(os.environ["R_CONFIG_DIR"], self.config_path) <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> if RELOAD: <NEW_LINE> <INDENT> return self.load() <NEW_LINE> <DEDENT> return self._data | base config manager
common operations for config files
:param:config_path or as own property | 62599055097d151d1a2c25ae |
@Singleton <NEW_LINE> class TranscriptContainer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._transcript_container = {} <NEW_LINE> <DEDENT> def add(self, transcript_id, transcript): <NEW_LINE> <INDENT> if type(transcript) is not Transcript: <NEW_LINE> <INDENT> raise TypeError('Transcript, TypeError, {0}'.format(transcript_id)) <NEW_LINE> <DEDENT> if self._transcript_container.has_key(transcript_id): <NEW_LINE> <INDENT> raise KeyError('Transcript, KeyError, {0}'.format(transcript_id)) <NEW_LINE> <DEDENT> self._transcript_container[transcript_id] = transcript <NEW_LINE> <DEDENT> def get(self, transcript_id): <NEW_LINE> <INDENT> return self._transcript_container[transcript_id] | classdocs | 62599055baa26c4b54d507e6 |
class TestSuiteAutoload(TestAppendingMain): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.test_runner = ipyenv.TestRunner( test_paths=(helper.get_abspath_from('nose-like-tests'),), sitelib_paths=(helper.get_abspath_from('sitelib'),), suite_autoload=True ) | Tests for test suite auto-loading. | 6259905545492302aabfda1a |
class Author(models.Model): <NEW_LINE> <INDENT> first_name = models.CharField(max_length=100) <NEW_LINE> last_name = models.CharField(max_length=100) <NEW_LINE> date_of_birth = models.DateField(null=True, blank=True) <NEW_LINE> date_of_death = models.DateField('Died', null=True, blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s, %s' % (self.last_name, self.first_name) | Model representing an author. | 62599055b7558d58954649cc |
class OutstandingRequest(object): <NEW_LINE> <INDENT> def __init__(self, request_id, controller): <NEW_LINE> <INDENT> self.controller = controller <NEW_LINE> self.id = request_id | These represent requests on the server side that haven't completed yet. | 62599055596a897236129051 |
class AddWithLimits(ExprWithLimits): <NEW_LINE> <INDENT> def __new__(cls, function, *symbols, **assumptions): <NEW_LINE> <INDENT> function = sympify(function) <NEW_LINE> if hasattr(function, 'func') and function.func is C.Equality: <NEW_LINE> <INDENT> lhs = function.lhs <NEW_LINE> rhs = function.rhs <NEW_LINE> return C.Equality(cls(lhs, *symbols, **assumptions), cls(rhs, *symbols, **assumptions)) <NEW_LINE> <DEDENT> function = piecewise_fold(function) <NEW_LINE> if function is S.NaN: <NEW_LINE> <INDENT> return S.NaN <NEW_LINE> <DEDENT> if symbols: <NEW_LINE> <INDENT> limits, orientation = _process_limits(*symbols) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> free = function.free_symbols <NEW_LINE> if len(free) != 1: <NEW_LINE> <INDENT> raise ValueError( "specify dummy variables for %s" % function) <NEW_LINE> <DEDENT> limits, orientation = [Tuple(s) for s in free], 1 <NEW_LINE> <DEDENT> while cls == type(function): <NEW_LINE> <INDENT> limits = list(function.limits) + limits <NEW_LINE> function = function.function <NEW_LINE> <DEDENT> obj = Expr.__new__(cls, **assumptions) <NEW_LINE> arglist = [orientation*function] <NEW_LINE> arglist.extend(limits) <NEW_LINE> obj._args = tuple(arglist) <NEW_LINE> obj.is_commutative = function.is_commutative <NEW_LINE> return obj <NEW_LINE> <DEDENT> def _eval_adjoint(self): <NEW_LINE> <INDENT> if all([x.is_real for x in flatten(self.limits)]): <NEW_LINE> <INDENT> return self.func(self.function.adjoint(), *self.limits) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _eval_conjugate(self): <NEW_LINE> <INDENT> if all([x.is_real for x in flatten(self.limits)]): <NEW_LINE> <INDENT> return self.func(self.function.conjugate(), *self.limits) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _eval_transpose(self): <NEW_LINE> <INDENT> if all([x.is_real for x in flatten(self.limits)]): <NEW_LINE> <INDENT> return self.func(self.function.transpose(), *self.limits) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _eval_factor(self, **hints): <NEW_LINE> <INDENT> if 1 == len(self.limits): <NEW_LINE> <INDENT> summand = self.function.factor(**hints) <NEW_LINE> if summand.is_Mul: <NEW_LINE> <INDENT> out = sift(summand.args, lambda w: w.is_commutative and not w.has(*self.variables)) <NEW_LINE> return C.Mul(*out[True])*self.func(C.Mul(*out[False]), *self.limits) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> summand = self.func(self.function, self.limits[0:-1]).factor() <NEW_LINE> if not summand.has(self.variables[-1]): <NEW_LINE> <INDENT> return self.func(1, [self.limits[-1]]).doit()*summand <NEW_LINE> <DEDENT> elif isinstance(summand, C.Mul): <NEW_LINE> <INDENT> return self.func(summand, self.limits[-1]).factor() <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def _eval_expand_basic(self, **hints): <NEW_LINE> <INDENT> summand = self.function.expand(**hints) <NEW_LINE> if summand.is_Add and summand.is_commutative: <NEW_LINE> <INDENT> return C.Add(*[ self.func(i, *self.limits) for i in summand.args ]) <NEW_LINE> <DEDENT> elif summand != self.function: <NEW_LINE> <INDENT> return self.func(summand, *self.limits) <NEW_LINE> <DEDENT> return self | Represents unevaluated oriented additions.
Parent class for Integral and Sum. | 6259905510dbd63aa1c7211a |
class ConceptVariantEntry(VariantEntry): <NEW_LINE> <INDENT> def __init__(self, concept_entry_content, variant_attributes=None, type_=None, time_or_duration_ref=None, substitute_attribute=None, scheme_attribute=None, entry_group_attributes=None, note_list=None): <NEW_LINE> <INDENT> if variant_attributes is not None: <NEW_LINE> <INDENT> assert isinstance(variant_attributes, VariantAttributes) <NEW_LINE> <DEDENT> self.variant_attributes = variant_attributes <NEW_LINE> if type_ is not None: <NEW_LINE> <INDENT> assert isinstance(type_, GenericType) <NEW_LINE> <DEDENT> self.type = type_ <NEW_LINE> if time_or_duration_ref is not None: <NEW_LINE> <INDENT> assert isinstance(time_or_duration_ref, TimeRef) or isinstance(time_or_duration_ref, DurationRef) <NEW_LINE> <DEDENT> self.time_or_duration_ref = time_or_duration_ref <NEW_LINE> if substitute_attribute is not None: <NEW_LINE> <INDENT> assert isinstance(substitute_attribute, SubstituteAttribute) <NEW_LINE> <DEDENT> self.substitute_attribute = substitute_attribute <NEW_LINE> if scheme_attribute is not None: <NEW_LINE> <INDENT> assert isinstance(scheme_attribute, SchemeAttribute) <NEW_LINE> <DEDENT> self.scheme_attribute = scheme_attribute <NEW_LINE> if entry_group_attributes is not None: <NEW_LINE> <INDENT> assert isinstance(entry_group_attributes, EntryGroupAttributes) <NEW_LINE> <DEDENT> self.entry_group_attributes = entry_group_attributes <NEW_LINE> assert isinstance(concept_entry_content, ConceptEntryContent) <NEW_LINE> self.concept_entry_content = concept_entry_content <NEW_LINE> if note_list is not None: <NEW_LINE> <INDENT> assert isinstance(note_list, NoteList) <NEW_LINE> <DEDENT> self.note_list = note_list <NEW_LINE> <DEDENT> def serialize_xml(self): <NEW_LINE> <INDENT> variant_attributes_attrs = {} <NEW_LINE> if self.variant_attributes is not None: <NEW_LINE> <INDENT> variant_attributes_attrs = self.variant_attributes.serialize_xml() <NEW_LINE> <DEDENT> variant_e = E('concept', **variant_attributes_attrs) <NEW_LINE> if self.type is not None: <NEW_LINE> <INDENT> type_e = self.type.serialize_xml() <NEW_LINE> variant_e.append(type_e) <NEW_LINE> <DEDENT> if self.time_or_duration_ref: <NEW_LINE> <INDENT> time_or_duration_ref_e = self.time_or_duration_ref.serialize_xml() <NEW_LINE> variant_e.append(time_or_duration_ref_e) <NEW_LINE> <DEDENT> entry_attrs = {} <NEW_LINE> if self.substitute_attribute is not None: <NEW_LINE> <INDENT> substitute_attribute_attrs = self.substitute_attribute.serialize_xml() <NEW_LINE> entry_attrs.update(substitute_attribute_attrs) <NEW_LINE> <DEDENT> if self.scheme_attribute is not None: <NEW_LINE> <INDENT> scheme_attribute_attrs = self.scheme_attribute.serialize_xml() <NEW_LINE> entry_attrs.update(scheme_attribute_attrs) <NEW_LINE> <DEDENT> if self.entry_group_attributes is not None: <NEW_LINE> <INDENT> entry_group_attributes_attrs = self.entry_group_attributes.serialize_xml() <NEW_LINE> entry_attrs.update(entry_group_attributes_attrs) <NEW_LINE> <DEDENT> entry_e = E('entry', **entry_attrs) <NEW_LINE> concept_entry_content_elements = self.concept_entry_content.serialize_xml() <NEW_LINE> entry_e.extend(concept_entry_content_elements) <NEW_LINE> variant_e.append(entry_e) <NEW_LINE> if self.note_list is not None: <NEW_LINE> <INDENT> note_list_e = self.note_list.serialize_xml() <NEW_LINE> variant_e.append(note_list_e) <NEW_LINE> <DEDENT> return variant_e | conceptVariant |=
element xobis:concept {
variantAttributes?,
genericType?,
(timeRef | durationRef)?,
element xobis:entry { substituteAttribute?, schemeAttribute?, entryGroupAttributes?, conceptEntryContent },
noteList?
} | 625990558da39b475be0472f |
class ClassFactory(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.classes = {} <NEW_LINE> <DEDENT> def _BuildClass(self, key): <NEW_LINE> <INDENT> raise cx_Exceptions.NotImplemented() <NEW_LINE> <DEDENT> def _GenerateClass(self, className, baseClass, classDict, initArgNames): <NEW_LINE> <INDENT> if initArgNames: <NEW_LINE> <INDENT> initLines = [" self.%s = %s\n" % (n, n) for n in initArgNames] <NEW_LINE> codeString = "def __init__(self, %s):\n%s" % (", ".join(initArgNames), "".join(initLines)) <NEW_LINE> code = compile(codeString, "GeneratedClass.py", "exec") <NEW_LINE> exec(code, dict(), classDict) <NEW_LINE> <DEDENT> return type(className, (baseClass,), classDict) <NEW_LINE> <DEDENT> def GetClass(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.classes[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> cls = self.classes[key] = self._BuildClass(key) <NEW_LINE> return cls | Implements a class factory which builds classes as needed and caches
them. | 62599055097d151d1a2c25af |
class ContextFilter(logging.Filter): <NEW_LINE> <INDENT> def __init__(self, field, value): <NEW_LINE> <INDENT> self.field = field <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def filter(self, record): <NEW_LINE> <INDENT> setattr(record, self.field, self.value) <NEW_LINE> return True | Log filter that adds a static field to a record. | 6259905599cbb53fe6832423 |
class OutlierFilter(): <NEW_LINE> <INDENT> def __init__(self, cloud, k = 50, factor = 1): <NEW_LINE> <INDENT> self._k = k <NEW_LINE> self._factor = factor <NEW_LINE> self._filter = cloud.make_statistical_outlier_filter() <NEW_LINE> self._filter.set_mean_k(self._k) <NEW_LINE> self._filter.set_std_dev_mul_thresh(self._factor) <NEW_LINE> <DEDENT> def filter(self): <NEW_LINE> <INDENT> return self._filter.filter() | Remove outliers in PCL
| 625990554e696a045264e8c4 |
class RedisCache(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._redis = redis.Redis() <NEW_LINE> self._prefix = REDIS_PREFIX <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return self._redis.exists(self._prefix + key) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self._redis.set(self._prefix + key, pickle.dumps(value)) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return pickle.loads(self._redis.get(self._prefix + key)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._redis.keys()) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._redis.keys()) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._redis.keys(self._prefix + "*") <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return [self[key] for key in self] <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> for key in self._redis.keys(self._prefix + "*"): <NEW_LINE> <INDENT> self._redis.delete(key) | Redis cache. | 62599055d53ae8145f9199a6 |
class Meta: <NEW_LINE> <INDENT> model = Post <NEW_LINE> fields = ['authors', 'summary', 'title', 'created', 'get_absolute_url'] | The Meta
Attributes:
model: Post
fields: authors, summary, title, created, get_absolute_url, | 625990558da39b475be04730 |
class Die: <NEW_LINE> <INDENT> def __init__(self, sides): <NEW_LINE> <INDENT> self.sides = sides if sides else 6 <NEW_LINE> <DEDENT> def describe(self): <NEW_LINE> <INDENT> return 'This die has {sides} sides'.format(sides = self.sides) <NEW_LINE> <DEDENT> def roll(self): <NEW_LINE> <INDENT> return random.randint(1, self.sides) | A Multi-sided die
Instance variables:
sides -> the number of sides on die | 62599055009cb60464d02a79 |
class TestArrayField(TransactionTestCase): <NEW_LINE> <INDENT> def test_tic_tac_toe(self): <NEW_LINE> <INDENT> boards = [ ["x", "o", "x", "o", "o", "x", "x", "x", "o"], [" ", " ", " ", " ", "x", " ", " ", " ", " "], [" ", " ", " ", "o", "o", " ", " ", " ", "o"], [" ", " ", " ", " ", " ", " ", " ", " ", " "], ] <NEW_LINE> for board in boards: <NEW_LINE> <INDENT> ttt = TicTacToeBoard(board=board) <NEW_LINE> ttt.save() <NEW_LINE> <DEDENT> contains = lambda c: TicTacToeBoard.sa.board.contains(array([c])) <NEW_LINE> query = TicTacToeBoard.sa.query(TicTacToeBoard.sa.id) <NEW_LINE> assert query.filter(contains("x")).count() == 2 <NEW_LINE> assert query.filter(contains("o")).count() == 2 <NEW_LINE> assert query.filter(contains(" ")).count() == 3 <NEW_LINE> <DEDENT> def test_sa_objects_fetching(self): <NEW_LINE> <INDENT> boards = [ ["x", "o", "x", "o", "o", "x", "x", "x", "o"], [" ", " ", " ", " ", "x", " ", " ", " ", " "], [" ", " ", " ", "o", "o", " ", " ", " ", "o"], [" ", " ", " ", " ", " ", " ", " ", " ", " "], ] <NEW_LINE> created_objects = [] <NEW_LINE> for board in boards: <NEW_LINE> <INDENT> ttt = TicTacToeBoard(board=board) <NEW_LINE> ttt.save() <NEW_LINE> created_objects.append(ttt) <NEW_LINE> <DEDENT> session = get_session() <NEW_LINE> test_object = session.get(TicTacToeBoard.sa, created_objects[0].id) <NEW_LINE> assert test_object.id == created_objects[0].id <NEW_LINE> assert test_object.board == boards[0] <NEW_LINE> <DEDENT> def test_sa_sql_expression_language_fetching(self): <NEW_LINE> <INDENT> boards = [ ["x", "o", "x", "o", "o", "x", "x", "x", "o"], [" ", " ", " ", " ", "x", " ", " ", " ", " "], [" ", " ", " ", "o", "o", " ", " ", " ", "o"], [" ", " ", " ", " ", " ", " ", " ", " ", " "], ] <NEW_LINE> created_objects = [] <NEW_LINE> for board in boards: <NEW_LINE> <INDENT> ttt = TicTacToeBoard(board=board) <NEW_LINE> ttt.save() <NEW_LINE> created_objects.append(ttt) <NEW_LINE> <DEDENT> query = ( select(TicTacToeBoard.sa.id, TicTacToeBoard.sa.board) .order_by(TicTacToeBoard.sa.id) .limit(10) ) <NEW_LINE> with get_engine().begin() as connection: <NEW_LINE> <INDENT> test_data = connection.execute(query) <NEW_LINE> <DEDENT> for t_data, c_object in zip(test_data, created_objects): <NEW_LINE> <INDENT> t_data_id, t_data_board = t_data <NEW_LINE> assert t_data_id == c_object.id <NEW_LINE> assert t_data_board == c_object.board | Tests that queries involving array fields can be performed. | 625990550fa83653e46f6428 |
class ContactSerializer(Schema): <NEW_LINE> <INDENT> first_name = fields.String(required=True) <NEW_LINE> last_name = fields.String(required=True) <NEW_LINE> email = fields.Email(required=True) <NEW_LINE> phone_number = fields.Integer() <NEW_LINE> company = fields.String() <NEW_LINE> address = fields.String() <NEW_LINE> street_address = fields.String(required=True) <NEW_LINE> unit_number = fields.String() <NEW_LINE> city = fields.String(required=True) <NEW_LINE> state = fields.String(required=True) <NEW_LINE> zip_code = fields.String(required=True) <NEW_LINE> country = fields.String() <NEW_LINE> notes = fields.String() <NEW_LINE> @post_load <NEW_LINE> def make_user(self, data): <NEW_LINE> <INDENT> return Contact(**data) | Class for serialize and deserialize contact data | 62599055f7d966606f74935a |
class BotProcessorConsumer(SyncConsumer): <NEW_LINE> <INDENT> def respond_query(self,event): <NEW_LINE> <INDENT> controllers.silly_print("message received by Bot Processor",event) <NEW_LINE> query_set = event['query_set'] <NEW_LINE> player_num = event['player_num'] <NEW_LINE> match_id = event['match_id'] <NEW_LINE> prime_channel_name = event['prime_channel_name'] <NEW_LINE> lorax = controllers.SearchController(query_set,player_num,match_id) <NEW_LINE> lorax.load_simulation_data() <NEW_LINE> response_set = lorax.respond_to_query_set() <NEW_LINE> message = { "type":"process.client.response", "response_set":response_set } <NEW_LINE> async_to_sync(self.channel_layer.send)(prime_channel_name, message) <NEW_LINE> controllers.silly_print("message sent by Bot Processor",message) | This consumer handles decision request from player prime.
This object is stateless, all game state data comes from db lookup
methods callable by channels:
- respond_query | 62599055e64d504609df9e72 |
class Meta: <NEW_LINE> <INDENT> ordering = ('related_user', '-active', 'occupied') | ServiceAgent | 62599055e76e3b2f99fd9f43 |
class WhatResponder(Responder): <NEW_LINE> <INDENT> def response(self, input, mood): <NEW_LINE> <INDENT> return '{}ってなに?'.format(input) | オウム返しのための行うサブクラス
| 62599055dd821e528d6da422 |
class VABSystemSecondOrderReaction(object): <NEW_LINE> <INDENT> def __init__(self, init_x, k, init_t=0): <NEW_LINE> <INDENT> if init_x > 0: <NEW_LINE> <INDENT> self._x = float(init_x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid initial concentration assignment. Must be greater than 0') <NEW_LINE> <DEDENT> self._time = init_t <NEW_LINE> self._init_t = init_t <NEW_LINE> self._k = float(k) <NEW_LINE> self._init_x = float(init_x) <NEW_LINE> <DEDENT> def update_x(self, elapsed_time): <NEW_LINE> <INDENT> x = self._x / (1. + self._k * elapsed_time * self._x) <NEW_LINE> self._x = x <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self._x = self._init_x <NEW_LINE> self._time = self._init_t | This class defines a simulated second-order chemical reaction: aA ->
products. Throughout, x is used for concentration, and k for the reaction
constant (that includes the stoichiometric coefficient, a). | 625990556e29344779b01b8f |
class VppTestRunner(unittest.TextTestRunner): <NEW_LINE> <INDENT> @property <NEW_LINE> def resultclass(self): <NEW_LINE> <INDENT> return VppTestResult <NEW_LINE> <DEDENT> def __init__(self, keep_alive_pipe=None, descriptions=True, verbosity=1, result_pipe=None, failfast=False, buffer=False, resultclass=None, print_summary=True, **kwargs): <NEW_LINE> <INDENT> super(VppTestRunner, self).__init__(sys.stdout, descriptions, verbosity, failfast, buffer, resultclass, **kwargs) <NEW_LINE> KeepAliveReporter.pipe = keep_alive_pipe <NEW_LINE> self.orig_stream = self.stream <NEW_LINE> self.resultclass.test_framework_result_pipe = result_pipe <NEW_LINE> self.print_summary = print_summary <NEW_LINE> <DEDENT> def _makeResult(self): <NEW_LINE> <INDENT> return self.resultclass(self.stream, self.descriptions, self.verbosity, self) <NEW_LINE> <DEDENT> def run(self, test): <NEW_LINE> <INDENT> faulthandler.enable() <NEW_LINE> result = super(VppTestRunner, self).run(test) <NEW_LINE> if not self.print_summary: <NEW_LINE> <INDENT> self.stream = self.orig_stream <NEW_LINE> result.stream = self.orig_stream <NEW_LINE> <DEDENT> return result | A basic test runner implementation which prints results to standard error. | 625990557047854f46340904 |
class VendorEdit(CuvenEdit): <NEW_LINE> <INDENT> def __init__(self, parent= None, name= None, modal= 0, fl= 0): <NEW_LINE> <INDENT> CuvenEdit.__init__(self,parent) <NEW_LINE> self.wNumber.setText(self._cuvenL.getNextNumber('V')) <NEW_LINE> self._type= 'V' <NEW_LINE> self.setCaption(self.tr('Edit vendor')) <NEW_LINE> self.initFields() | Dialogue for editing vendor properies. Inherits CuvenEdit.
| 625990558e7ae83300eea5d3 |
class CommandLine(object) : <NEW_LINE> <INDENT> def __init__(self, inOpts=None) : <NEW_LINE> <INDENT> import argparse <NEW_LINE> self.parser = argparse.ArgumentParser(description = 'geneExpressionZ_groups.py - a tool to convert gene expression matrix to z-score values.', epilog = 'Please feel free to forward any questions/concerns to /dev/null', add_help = True, prefix_chars = '-', usage = '%(prog)s -o file_name -t expression_matrix -s sample_tsv -p N') <NEW_LINE> self.parser.add_argument('-o', '--output_file', action = 'store', required=False, default=sys.stdout, help='Output file name. [Default : stdout]') <NEW_LINE> self.parser.add_argument('-t', '--table_file', action = 'store', required=True, help='Input expression tableFastQ [Default: req*]') <NEW_LINE> self.parser.add_argument('-s', '--sample_groups', action = 'store', required=True, help='Sample group file (TSV) [Default: req*]') <NEW_LINE> self.parser.add_argument('-p', '--num_threads', action = 'store', required=False, default=2, type=int, help='Num of threads [Default: 2]') <NEW_LINE> if inOpts is None : <NEW_LINE> <INDENT> self.args = vars(self.parser.parse_args()) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.args = vars(self.parser.parse_args(inOpts)) | Handle the command line, usage and help requests.
CommandLine uses argparse, now standard in 2.7 and beyond.
it implements a standard command line argument parser with various argument options,
and a standard usage and help,
attributes:
myCommandLine.args is a dictionary which includes each of the available command line arguments as
myCommandLine.args['option']
methods: | 6259905529b78933be26ab67 |
class KonnectedSwitch(ToggleEntity): <NEW_LINE> <INDENT> def __init__(self, device_id, zone_num, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> self._device_id = device_id <NEW_LINE> self._zone_num = zone_num <NEW_LINE> self._activation = self._data.get(CONF_ACTIVATION, STATE_HIGH) <NEW_LINE> self._momentary = self._data.get(CONF_MOMENTARY) <NEW_LINE> self._pause = self._data.get(CONF_PAUSE) <NEW_LINE> self._repeat = self._data.get(CONF_REPEAT) <NEW_LINE> self._state = self._boolean_state(self._data.get(ATTR_STATE)) <NEW_LINE> self._name = self._data.get(CONF_NAME) <NEW_LINE> self._unique_id = ( f"{device_id}-{self._zone_num}-{self._momentary}-" f"{self._pause}-{self._repeat}" ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def panel(self): <NEW_LINE> <INDENT> device_data = self.hass.data[KONNECTED_DOMAIN][CONF_DEVICES][self._device_id] <NEW_LINE> return device_data.get("panel") <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self): <NEW_LINE> <INDENT> return { "identifiers": {(KONNECTED_DOMAIN, self._device_id)}, } <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self.panel.available <NEW_LINE> <DEDENT> async def async_turn_on(self, **kwargs): <NEW_LINE> <INDENT> resp = await self.panel.update_switch( self._zone_num, int(self._activation == STATE_HIGH), self._momentary, self._repeat, self._pause, ) <NEW_LINE> if resp.get(ATTR_STATE) is not None: <NEW_LINE> <INDENT> self._set_state(True) <NEW_LINE> if self._momentary and resp.get(ATTR_STATE) != -1: <NEW_LINE> <INDENT> self._set_state(False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def async_turn_off(self, **kwargs): <NEW_LINE> <INDENT> resp = await self.panel.update_switch( self._zone_num, int(self._activation == STATE_LOW) ) <NEW_LINE> if resp.get(ATTR_STATE) is not None: <NEW_LINE> <INDENT> self._set_state(self._boolean_state(resp.get(ATTR_STATE))) <NEW_LINE> <DEDENT> <DEDENT> def _boolean_state(self, int_state): <NEW_LINE> <INDENT> if int_state is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if int_state == 0: <NEW_LINE> <INDENT> return self._activation == STATE_LOW <NEW_LINE> <DEDENT> if int_state == 1: <NEW_LINE> <INDENT> return self._activation == STATE_HIGH <NEW_LINE> <DEDENT> <DEDENT> def _set_state(self, state): <NEW_LINE> <INDENT> self._state = state <NEW_LINE> self.async_write_ha_state() <NEW_LINE> _LOGGER.debug( "Setting status of %s actuator zone %s to %s", self._device_id, self.name, state, ) <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> self._data["entity_id"] = self.entity_id | Representation of a Konnected switch. | 62599055379a373c97d9a56a |
class SingleTimeFreqExecutable(PlotExecutable): <NEW_LINE> <INDENT> time_dependent_options = ['--channel-name', '--frame-type'] | Class to be used for to create workflow.Executable instances for the
pycbc_plot_singles_timefreq executable. Basically inherits directly from
PlotExecutable. | 6259905545492302aabfda1d |
class hadamard(_PYQUEST): <NEW_LINE> <INDENT> def call_interactive(self, qureg: tqureg, qubit: int) -> None: <NEW_LINE> <INDENT> quest.hadamard(qureg, qubit) <NEW_LINE> <DEDENT> def matrix(self, **kwargs) -> np.ndarray: <NEW_LINE> <INDENT> matrix = 1 / np.sqrt(2) * np.array([[1, 1], [1, -1]], dtype=complex) <NEW_LINE> return matrix | Implements Hadamard gate
.. math::
U = \frac{1}{\sqrt{2}} \begin{pmatrix}
1 & 1\\
1 & -1
\end{pmatrix}
Args:
qureg: quantum register
qubit: qubit the unitary gate is applied to | 6259905571ff763f4b5e8cf5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.