code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class RoleModelForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.Role <NEW_LINE> fields = ['title', ] <NEW_LINE> widgets = { 'title': forms.TextInput(attrs={'class': 'layui-input'}) }
ModelForm自动渲染form的类,便于下面调用
625990343eb6a72ae038b76f
class RoleAssignmentOwnerTransferForm(SODARForm): <NEW_LINE> <INDENT> def __init__(self, project, current_user, current_owner, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.current_user = current_user <NEW_LINE> self.project = project <NEW_LINE> self.current_owner = current_owner <NEW_LINE> self.fields['new_owner'] = SODARUserChoiceField( label='New owner', help_text='Select a member of the project to become owner.', scope='project', project=self.project, exclude=[self.current_owner], ) <NEW_LINE> self.selectable_roles = get_role_choices( self.project, self.current_user ) <NEW_LINE> self.fields['old_owner_role'] = forms.ChoiceField( label='New role for {}'.format(self.current_owner.username), help_text='New role for the current owner. Select "Remove" in the ' 'member list to remove the user\'s membership.', choices=self.selectable_roles, initial=Role.objects.get(name=PROJECT_ROLE_CONTRIBUTOR).pk, ) <NEW_LINE> self.fields['project'] = forms.Field( widget=forms.HiddenInput(), initial=self.project.sodar_uuid ) <NEW_LINE> <DEDENT> def clean_old_owner_role(self): <NEW_LINE> <INDENT> role = next( ( choice for choice in self.selectable_roles if choice[0] == int(self.cleaned_data['old_owner_role']) ), None, ) <NEW_LINE> try: <NEW_LINE> <INDENT> role = Role.objects.get(name=role[1]) <NEW_LINE> <DEDENT> except Role.DoesNotExist: <NEW_LINE> <INDENT> raise forms.ValidationError('Selected role does not exist') <NEW_LINE> <DEDENT> if role.name == PROJECT_ROLE_DELEGATE: <NEW_LINE> <INDENT> del_limit = getattr(settings, 'PROJECTROLES_DELEGATE_LIMIT', 1) <NEW_LINE> if not self.current_user.has_perm( 'projectroles.update_project_delegate', obj=self.project ): <NEW_LINE> <INDENT> raise forms.ValidationError( 'Insufficient permissions for assigning a delegate role' ) <NEW_LINE> <DEDENT> new_owner_role = RoleAssignment.objects.filter( project=self.project, user=self.cleaned_data.get('new_owner') ).first() <NEW_LINE> if ( del_limit != 0 and new_owner_role and new_owner_role.role.name != PROJECT_ROLE_DELEGATE and self.project.get_delegates(exclude_inherited=True).count() >= del_limit ): <NEW_LINE> <INDENT> raise forms.ValidationError( 'The limit ({}) of delegates for this project has ' 'already been reached.'.format(del_limit) ) <NEW_LINE> <DEDENT> <DEDENT> return role <NEW_LINE> <DEDENT> def clean_new_owner(self): <NEW_LINE> <INDENT> user = self.cleaned_data['new_owner'] <NEW_LINE> if user == self.current_owner: <NEW_LINE> <INDENT> raise forms.ValidationError( 'The new owner shouldn\'t be the current owner' ) <NEW_LINE> <DEDENT> role_as = RoleAssignment.objects.get_assignment(user, self.project) <NEW_LINE> inh_owners = [ a.user for a in self.project.get_owners(inherited_only=True) ] <NEW_LINE> if (role_as and role_as.project != self.project) or ( not role_as and user not in inh_owners ): <NEW_LINE> <INDENT> raise forms.ValidationError( 'The new owner has no roles in the project' ) <NEW_LINE> <DEDENT> return user
Form for transferring owner role assignment between users
62599034a4f1c619b294f6ff
class PrePublishSquashPlugin(PrePublishPlugin): <NEW_LINE> <INDENT> key = "squash" <NEW_LINE> is_allowed_to_fail = False <NEW_LINE> def __init__(self, workflow, tag=None, from_base=True, from_layer=None, dont_load=False, save_archive=True): <NEW_LINE> <INDENT> super(PrePublishSquashPlugin, self).__init__(workflow) <NEW_LINE> self.image = self.workflow.data.image_id <NEW_LINE> self.tag = tag or str(self.workflow.image) <NEW_LINE> self.from_layer = from_layer <NEW_LINE> if from_base and from_layer is None: <NEW_LINE> <INDENT> if not self.workflow.data.dockerfile_images.base_from_scratch: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> base_image_id = self.workflow.imageutil.base_image_inspect()['Id'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.log.error( "Missing Id in inspection: '%s'", self.workflow.imageutil.base_image_inspect(), ) <NEW_LINE> raise <NEW_LINE> <DEDENT> self.log.info("will squash from base-image: '%s'", base_image_id) <NEW_LINE> self.from_layer = base_image_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.info("from scratch, will squash all layers") <NEW_LINE> <DEDENT> <DEDENT> self.dont_load = dont_load <NEW_LINE> self.save_archive = save_archive <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if is_flatpak_build(self.workflow): <NEW_LINE> <INDENT> self.log.info('flatpak build, skipping plugin') <NEW_LINE> return <NEW_LINE> <DEDENT> if getattr(self.workflow, "skip_layer_squash", False): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.save_archive: <NEW_LINE> <INDENT> output_path = os.path.join(self.workflow.source.workdir, EXPORTED_SQUASHED_IMAGE_NAME) <NEW_LINE> metadata = {"path": output_path} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_path = None <NEW_LINE> <DEDENT> new_id = Squash(log=self.log, image=self.image, from_layer=self.from_layer, tag=self.tag, output_path=output_path, load_image=not self.dont_load).run() <NEW_LINE> if ':' not in new_id: <NEW_LINE> <INDENT> new_id = 'sha256:{}'.format(new_id) <NEW_LINE> <DEDENT> if not self.dont_load: <NEW_LINE> <INDENT> self.workflow.data.image_id = new_id <NEW_LINE> <DEDENT> if self.save_archive: <NEW_LINE> <INDENT> metadata.update(get_exported_image_metadata(output_path, IMAGE_TYPE_DOCKER_ARCHIVE)) <NEW_LINE> self.workflow.data.exported_image_sequence.append(metadata)
This feature requires docker-squash package to be installed in version 1.0.0rc3 or higher. Usage: A json build config file should be created with following content: ``` "prepublish_plugins": [{ "name": "squash", "args": { "tag": "SQUASH_TAG", "from_layer": "FROM_LAYER", "dont_load": false } } ] ``` The `tag` argument specifes the tag under which the new squashed image will be registered. The `from_layer` argument specifies from which layer we want to squash. Of course it's possible to override it at runtime, like this: `--substitute prepublish_plugins.squash.tag=image:squashed --substitute prepublish_plugins.squash.from_layer=asdasd2332`.
62599034ac7a0e7691f735f0
class TestDanger(TestInfotech): <NEW_LINE> <INDENT> def check_stats(self, info, val_types, val_methods, counts): <NEW_LINE> <INDENT> from oeg_infotech.base import DistItem <NEW_LINE> from oeg_infotech import defect <NEW_LINE> stats_values, stats_counts = info.defects.danger_stats() <NEW_LINE> self.assert_list(stats_values[DistItem.field_typeobj], val_types) <NEW_LINE> self.assert_list(stats_values[defect.Item.field_method_id], val_methods) <NEW_LINE> for item in counts: <NEW_LINE> <INDENT> assert stats_counts[item] == counts[item] <NEW_LINE> <DEDENT> <DEDENT> def test_danger_stats(self): <NEW_LINE> <INDENT> from oeg_infotech import Infotech, defect, codes <NEW_LINE> self.check_stats( Infotech.from_file(self.fixture('1827.xml')), [ codes.Feature.METALL_LOSS, codes.Feature.KANAVKA_HOR, codes.Feature.CAVERNA, codes.Feature.KANAVKA_VERT, ], [ codes.MethodsKBD.GAZNADZOR2013, ], { defect.Item.field_kbd: 136, defect.Item.field_safe_pressure: 0, defect.Item.field_time_limit: 136, defect.Item.field_safe_pressure_persent: 0, defect.Item.field_method_id: 136, } )
Defects danger stuff.
62599034be383301e025491c
class RegisterMatmul(object): <NEW_LINE> <INDENT> def __init__(self, lin_op_cls_a, lin_op_cls_b): <NEW_LINE> <INDENT> self._key = (lin_op_cls_a, lin_op_cls_b) <NEW_LINE> <DEDENT> def __call__(self, matmul_fn): <NEW_LINE> <INDENT> if not callable(matmul_fn): <NEW_LINE> <INDENT> raise TypeError( "matmul_fn must be callable, received: {}".format(matmul_fn)) <NEW_LINE> <DEDENT> if self._key in _MATMUL: <NEW_LINE> <INDENT> raise ValueError("Matmul({}, {}) has already been registered.".format( self._key[0].__name__, self._key[1].__name__)) <NEW_LINE> <DEDENT> _MATMUL[self._key] = matmul_fn <NEW_LINE> return matmul_fn
Decorator to register a Matmul implementation function. Usage: @linear_operator_algebra.RegisterMatmul( lin_op.LinearOperatorIdentity, lin_op.LinearOperatorIdentity) def _matmul_identity(a, b): # Return the identity matrix.
62599034d164cc617582207a
class _unlatex: <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __init__(self,tex): <NEW_LINE> <INDENT> self.tex = tuple(_tokenize(tex)) <NEW_LINE> self.pos = 0 <NEW_LINE> self.lastoutput = 'x' <NEW_LINE> <DEDENT> def __getitem__(self,n): <NEW_LINE> <INDENT> p = self.pos + n <NEW_LINE> t = self.tex <NEW_LINE> return p < len(t) and t[p] or None <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.pos >= len(self.tex): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> nextoutput = self.chunk() <NEW_LINE> if self.lastoutput[0] == '\\' and self.lastoutput[-1].isalpha() and nextoutput[0].isalpha(): <NEW_LINE> <INDENT> nextoutput = ' ' + nextoutput <NEW_LINE> <DEDENT> self.lastoutput = nextoutput <NEW_LINE> return nextoutput <NEW_LINE> <DEDENT> def chunk(self): <NEW_LINE> <INDENT> for delta,c in self.candidates(0): <NEW_LINE> <INDENT> if c in _l2u: <NEW_LINE> <INDENT> self.pos += delta <NEW_LINE> return unichr(_l2u[c]) <NEW_LINE> <DEDENT> elif len(c) == 2 and c[1] == 'i' and (c[0],'\\i') in _l2u: <NEW_LINE> <INDENT> self.pos += delta <NEW_LINE> return unichr(_l2u[(c[0],'\\i')]) <NEW_LINE> <DEDENT> elif len(c) == 1 and c[0].startswith('\\char') and c[0][5:].isdigit(): <NEW_LINE> <INDENT> self.pos += delta <NEW_LINE> return unichr(int(c[0][5:])) <NEW_LINE> <DEDENT> <DEDENT> self.pos += 1 <NEW_LINE> return self[-1] <NEW_LINE> <DEDENT> def candidates(self,offset): <NEW_LINE> <INDENT> t = self[offset] <NEW_LINE> if t in _blacklist: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif t == '{': <NEW_LINE> <INDENT> for delta,c in self.candidates(offset+1): <NEW_LINE> <INDENT> if self[offset+delta+1] == '}': <NEW_LINE> <INDENT> yield delta+2,c <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif t == '\\mbox': <NEW_LINE> <INDENT> for delta,c in self.candidates(offset+1): <NEW_LINE> <INDENT> yield delta+1,c <NEW_LINE> <DEDENT> <DEDENT> elif t == '$' and self[offset+2] == '$': <NEW_LINE> <INDENT> yield 3, (t,self[offset+1],t) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = self[offset+1] <NEW_LINE> if q == '{' and self[offset+3] == '}': <NEW_LINE> <INDENT> yield 4, (t,self[offset+2]) <NEW_LINE> <DEDENT> elif q: <NEW_LINE> <INDENT> yield 2, (t,q) <NEW_LINE> <DEDENT> yield 1, t
Convert tokenized tex into sequence of unicode strings. Helper for decode().
6259903473bcbd0ca4bcb38f
class UniformShell(pml.SubmodularShell): <NEW_LINE> <INDENT> def configTypes(self): <NEW_LINE> <INDENT> return dict(self.config().items(),std=float,gridResolution=readArray,gridMargin=readArray) <NEW_LINE> <DEDENT> def createGrid(self): <NEW_LINE> <INDENT> self.grid = np.zeros(tuple(self._gridResolution)+(len(self._gridResolution),)) <NEW_LINE> it = np.nditer(self.grid, flags=['multi_index'], op_flags=['readwrite']) <NEW_LINE> while not it.finished: <NEW_LINE> <INDENT> it[0] = self.minPos[it.multi_index[-1]] + it.multi_index[it.multi_index[-1]]/float(self._gridResolution[it.multi_index[-1]]-1)*self.gridSize[it.multi_index[-1]] <NEW_LINE> it.iternext() <NEW_LINE> <DEDENT> <DEDENT> def prepare(self,data): <NEW_LINE> <INDENT> isSet=False <NEW_LINE> for datum in data: <NEW_LINE> <INDENT> p = self.locate(datum) <NEW_LINE> if not isSet: <NEW_LINE> <INDENT> self.minPos = p <NEW_LINE> self.maxPos = p <NEW_LINE> isSet=True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.minPos = np.minimum(self.minPos, p) <NEW_LINE> self.maxPos = np.maximum(self.maxPos, p) <NEW_LINE> <DEDENT> <DEDENT> self.gridSize = self.maxPos-self.minPos <NEW_LINE> logging.info("The pictures are contained in a rectangle of size %f x %f" % tuple(self.gridSize)) <NEW_LINE> self.minPos -= self._gridMargin*self.gridSize <NEW_LINE> self.maxPos += self._gridMargin*self.gridSize <NEW_LINE> self.gridSize = self.maxPos-self.minPos <NEW_LINE> self.createGrid() <NEW_LINE> self.std = self._std*max(self.gridSize/self._gridResolution) <NEW_LINE> <DEDENT> def evaluate(self,subset): <NEW_LINE> <INDENT> score = np.zeros(self.grid.shape[:-1]) <NEW_LINE> shape_tuple = (1,)*(len(self.grid.shape)-1)+(self.grid.shape[-1],) <NEW_LINE> for datum in subset: <NEW_LINE> <INDENT> position = self.locate(datum).reshape(shape_tuple) <NEW_LINE> p = np.tile(position,tuple(self.grid.shape[:-1])+(1,)) <NEW_LINE> score = np.maximum(score, np.exp(-0.5*(np.sum((self.grid-p)**2, len(self.grid.shape)-1))/self.std**2) ) <NEW_LINE> <DEDENT> return np.sum(score) <NEW_LINE> <DEDENT> def config(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def filter(self,datum): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def locate(self,datum): <NEW_LINE> <INDENT> pass
A general coverage shell in n-dimensions. In practice, it is instantiated for small dimensions like geocoverage, timecoverage.
625990349b70327d1c57fe8d
class TranslationCall(BrowserView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> from zeep import Client <NEW_LINE> from zeep.wsse.username import UsernameToken <NEW_LINE> client = Client( 'https://webgate.ec.europa.eu/etranslation/si/WSEndpointHandlerService?WSDL', wsse=UsernameToken('Marine_EEA_20180706', 'xxxyyyzzz')) <NEW_LINE> client.service.translate( {'priority': '5', 'external-reference': '1', 'caller-information': {'application': 'Marine_EEA_20180706', 'username': 'dumitval'}, 'text-to-translate': 'Hausaufgabe', 'source-language': 'DE', 'target-languages': {'target-language': 'EN'}, 'domain': 'SPD', 'requester-callback': 'https://wise-test.eionet.europa.eu/translation_callback', 'destinations': { 'http-destination': 'https://wise-test.eionet.europa.eu/translation_callback'} })
Call Translation class
6259903430c21e258be99915
class InputURLList: <NEW_LINE> <INDENT> def __init__(self, attributes): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self._encoding = None <NEW_LINE> if not ValidateAttributes('URLLIST', attributes, ('path', 'encoding')): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._path = attributes.get('path') <NEW_LINE> self._encoding = attributes.get('encoding', ENC_UTF8) <NEW_LINE> if self._path: <NEW_LINE> <INDENT> self._path = encoder.MaybeNarrowPath(self._path) <NEW_LINE> if os.path.isfile(self._path): <NEW_LINE> <INDENT> output.Log('Input: From URLLIST "%s"' % self._path, 2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output.Error('Can not locate file: %s' % self._path) <NEW_LINE> self._path = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> output.Error('Urllist entries must have a "path" attribute.') <NEW_LINE> <DEDENT> <DEDENT> def ProduceURLs(self, consumer): <NEW_LINE> <INDENT> (frame, file) = OpenFileForRead(self._path, 'URLLIST') <NEW_LINE> if not file: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> linenum = 0 <NEW_LINE> for line in file.readlines(): <NEW_LINE> <INDENT> linenum = linenum + 1 <NEW_LINE> if self._encoding: <NEW_LINE> <INDENT> line = encoder.WidenText(line, self._encoding) <NEW_LINE> <DEDENT> line = line.strip() <NEW_LINE> if (not line) or line[0] == '#': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> url = URL() <NEW_LINE> cols = line.split(' ') <NEW_LINE> for i in range(0, len(cols)): <NEW_LINE> <INDENT> cols[i] = cols[i].strip() <NEW_LINE> <DEDENT> url.TrySetAttribute('loc', cols[0]) <NEW_LINE> for i in range(1, len(cols)): <NEW_LINE> <INDENT> if cols[i]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> (attr_name, attr_val) = cols[i].split('=', 1) <NEW_LINE> url.TrySetAttribute(attr_name, attr_val) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> output.Warn('Line %d: Unable to parse attribute: %s' % (linenum, cols[i])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> consumer(url, False) <NEW_LINE> <DEDENT> file.close() <NEW_LINE> if frame: <NEW_LINE> <INDENT> frame.close()
Each Input class knows how to yield a set of URLs from a data source. This one handles a text file with a list of URLs
6259903426068e7796d4da53
class Config(object): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> TESTING = False <NEW_LINE> HOST = "0.0.0.0" <NEW_LINE> PORT = 8000 <NEW_LINE> LOGGING_CONFIG = { "version": 1, "disable_existing_loggers": False, "formatters": { "default": { "format": ( "%(asctime)s - %(name)s [%(levelname)s]: %(message)s " "%(module)s %(funcName)s %(lineno)d" ), "datefmt": "%Y-%m-%d %H:%M:%S", }, }, "handlers": { "console": { "class": "logging.StreamHandler", "formatter": "default", "stream": sys.stdout, }, "file": { "class": "logging.handlers.RotatingFileHandler", "formatter": "default", "filename": f"{folder_helper.MAIN_FOLDER}/logs/log.log", "maxBytes": 104857600, "backupCount": 3, }, }, "loggers": { "sanic": {"level": "DEBUG", "handlers": ["console", "file"]} }, "root": { "handlers": ["console", "file"], "level": "DEBUG", }, }
Common configurations
6259903430c21e258be99916
class ThreadPool: <NEW_LINE> <INDENT> def __init__(self, num_threads): <NEW_LINE> <INDENT> self.tasks = Queue(num_threads) <NEW_LINE> for _ in range(num_threads): <NEW_LINE> <INDENT> Worker(self.tasks) <NEW_LINE> <DEDENT> <DEDENT> def add_task(self, func, *args, **kargs): <NEW_LINE> <INDENT> self.tasks.put((func, args, kargs)) <NEW_LINE> <DEDENT> def wait_completion(self): <NEW_LINE> <INDENT> self.tasks.join() <NEW_LINE> <DEDENT> def full(self): <NEW_LINE> <INDENT> return self.tasks.full()
Pool of threads consuming tasks from a queue
625990348c3a8732951f7662
class DefaultFactory: <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __call__(self, **kwargs): <NEW_LINE> <INDENT> data = kwargs.copy() <NEW_LINE> return data
This is the default factory used by relatorio. It just returns a copy of the data it receives
62599034b830903b9686ecff
class Unevaluated(Builtin): <NEW_LINE> <INDENT> attributes = ('HoldAllComplete',)
<dl> <dt>'Unevaluated[$expr$]' <dd>temporarily leaves $expr$ in an unevaluated form when it appears as a function argument. </dl> 'Unevaluated' is automatically removed when function arguments are evaluated: >> Sqrt[Unevaluated[x]] = Sqrt[x] >> Length[Unevaluated[1+2+3+4]] = 4 'Unevaluated' has attribute 'HoldAllComplete': >> Attributes[Unevaluated] = {HoldAllComplete, Protected} 'Unevaluated' is maintained for arguments to non-executed functions: >> f[Unevaluated[x]] = f[Unevaluated[x]] Likewise, its kept in flattened arguments and sequences: >> Attributes[f] = {Flat}; >> f[a, Unevaluated[f[b, c]]] = f[a, Unevaluated[b], Unevaluated[c]] >> g[a, Sequence[Unevaluated[b], Unevaluated[c]]] = g[a, Unevaluated[b], Unevaluated[c]] However, unevaluated sequences are kept: >> g[Unevaluated[Sequence[a, b, c]]] = g[Unevaluated[Sequence[a, b, c]]] #> Attributes[h] = Flat; #> h[items___] := Plus[items] #> h[1, Unevaluated[Sequence[Unevaluated[2], 3]], Sequence[4, Unevaluated[5]]] = 15
625990341d351010ab8f4c23
class CreateEndpoint(show.ShowOne): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.CreateEndpoint') <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(CreateEndpoint, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'service', metavar='<service>', help=_('New endpoint service (name or ID)'), ) <NEW_LINE> parser.add_argument( '--publicurl', metavar='<url>', required=True, help=_('New endpoint public URL (required)'), ) <NEW_LINE> parser.add_argument( '--adminurl', metavar='<url>', help=_('New endpoint admin URL'), ) <NEW_LINE> parser.add_argument( '--internalurl', metavar='<url>', help=_('New endpoint internal URL'), ) <NEW_LINE> parser.add_argument( '--region', metavar='<region-id>', help=_('New endpoint region ID'), ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> @utils.log_method(log) <NEW_LINE> def take_action(self, parsed_args): <NEW_LINE> <INDENT> identity_client = self.app.client_manager.identity <NEW_LINE> service = common.find_service(identity_client, parsed_args.service) <NEW_LINE> endpoint = identity_client.endpoints.create( parsed_args.region, service.id, parsed_args.publicurl, parsed_args.adminurl, parsed_args.internalurl,) <NEW_LINE> info = {} <NEW_LINE> info.update(endpoint._info) <NEW_LINE> info['service_name'] = service.name <NEW_LINE> info['service_type'] = service.type <NEW_LINE> return zip(*sorted(six.iteritems(info)))
Create new endpoint
62599034d6c5a102081e322f
class ConnectionError(InstascrapeError): <NEW_LINE> <INDENT> def __init__(self, url: str): <NEW_LINE> <INDENT> super().__init__("Failed to connect to '{0}'.".format(url))
Raised when Instascrape fails to connect to Instagram server.
6259903476d4e153a661daf6
class DatetimeDescriptor(ScalarDescriptor): <NEW_LINE> <INDENT> default_instance = datetime.datetime.fromtimestamp(0) <NEW_LINE> type_cls = datetime.datetime <NEW_LINE> def sortkey(self, coerced): <NEW_LINE> <INDENT> return super(DatetimeDescriptor, self).sortkey(coerced).strftime("%s")
Python's datetime is a special snowflake that requires some massaging. This is just like ScalarDescriptor, except it works around the weirdness in datetime's sorting behavior and absence of default parameters in the constructor.
6259903415baa723494630a4
class PickGame(object): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> top = self.top = tk.Toplevel(parent) <NEW_LINE> top.title('Pick a Game') <NEW_LINE> self.choice = tk.Entry(top, width=10) <NEW_LINE> self.maxlim = len(parent._start_confs)*2 <NEW_LINE> self.entry_label = tk.Label(top, text='Choose a game number between ' + '1 and {}:'.format(self.maxlim)) <NEW_LINE> self.start_btn = tk.Button(top, text='Start', command=lambda: self._start(parent, self.choice)) <NEW_LINE> self.entry_label.pack() <NEW_LINE> self.choice.pack() <NEW_LINE> self.start_btn.pack() <NEW_LINE> <DEDENT> def _start(self, parent, choice): <NEW_LINE> <INDENT> choice = int(self.choice.get()) - 1 <NEW_LINE> while(True): <NEW_LINE> <INDENT> if choice >= 0 and choice < self.maxlim: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.entry_label.config(text='Retry: Number must be between 1' + 'and {}:'.format(self.maxlim)) <NEW_LINE> <DEDENT> <DEDENT> parent.cbtns['new'].config(state='normal') <NEW_LINE> parent.cbtns['pick'].config(state='normal') <NEW_LINE> parent.cbtns['restart'].grid(column=1, row=parent.btn_per_side + 1) <NEW_LINE> for btn in parent.lbtns.values(): <NEW_LINE> <INDENT> btn.config(state='normal') <NEW_LINE> <DEDENT> parent.get_start_layout(choice) <NEW_LINE> parent.reset_start_conf() <NEW_LINE> self.top.destroy()
Pop-up window in lights out game, allows selection of game by number.
6259903407d97122c4217db2
class AnalyticEuropeanStockOptionSolver(OptionSolver): <NEW_LINE> <INDENT> def solve_option_price(self, option): <NEW_LINE> <INDENT> underlying = option.assets[0] <NEW_LINE> spot = underlying.spot <NEW_LINE> vol = underlying.vol <NEW_LINE> risk_free = option.risk_free <NEW_LINE> expiry = option.expiry <NEW_LINE> strike = option.strike <NEW_LINE> log_diff = math.log(spot / strike) <NEW_LINE> vt = 0.5 * vol**2 <NEW_LINE> denom = vol * math.sqrt(expiry) <NEW_LINE> d1 = (log_diff + (risk_free + vt)*expiry) / denom <NEW_LINE> d2 = (log_diff + (risk_free - vt)*expiry) / denom <NEW_LINE> discount = math.exp(-risk_free * expiry) <NEW_LINE> if option.is_call: <NEW_LINE> <INDENT> S, d1, K, d2 = spot, d1, -strike, d2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> S, d1, K, d2 = -spot, -d1, strike, -d2 <NEW_LINE> <DEDENT> return S * ss.norm.cdf(d1) + K * ss.norm.cdf(d2) * discount
A Black-Scholes stock option pricer. Only works for European stock options
62599034b57a9660fecd2b8f
class NetRpcNfsd(ReadFile): <NEW_LINE> <INDENT> FILENAME = ospath.join('proc', 'net', 'rpc', 'nfsd') <NEW_LINE> KEY = 'netrpcnfsd' <NEW_LINE> FIELDS = { 'net': ('packets', 'udp', 'tcp', 'tcpconn', ), 'rpc': ('calls', 'badcalls', 'badclnt', 'badauth', 'xdrcall', ), 'ra': ('size', 'deep10', 'deep20', 'deep30', 'deep40', 'deep50', 'deep60', 'deep70', 'deep80', 'deep90', 'deep100', 'notfound', ), 'rc': ('hits', 'misses', 'nocache', ), 'io': ('null', 'compound', ), 'th': ('threads', 'ntimesmax', 'hist00', 'hist10', 'hist20', 'hist30', 'hist40', 'hist50', 'hist60', 'hist70', 'hist80', 'hist90', ), 'fh': ('lookup', 'anon', 'ncachedir', 'ncachedir', 'stale', ), 'proc2': ('null', 'getattr', 'setattr', 'root', 'lookup', 'readlink', 'read', 'wrcache', 'write', 'create', 'remove', 'rename', 'link', 'symlink', 'mkdir', 'rmdir', 'readdir', 'fsstat', ), 'proc3': ('null', 'getattr', 'setattr', 'lookup', 'access', 'readlink', 'read', 'write', 'create', 'kdir', 'symlink', 'mknod', 'remove', 'rmdir', 'rename', 'link', 'readdir', 'readdirplus', 'fsstat', 'fsinfo', 'pathconf', 'commit', ), 'proc4': ('null', 'compound', ), 'proc4ops': ('op0-unused', 'op1-unused', 'op2-future', 'access', 'close', 'commit', 'create', 'delegpurge', 'delegreturn', 'getattr', 'getfh', 'link', 'lock', 'lockt', 'locku', 'lookup', 'lookup_root', 'nverify', 'open', 'openattr', 'open_conf', 'open_dgrd', 'putfh', 'putpubfh', 'putrootfh', 'read', 'readdir', 'readlink', 'remove', 'rename', 'renew', 'restorefh', 'savefh', 'secinfo', 'setattr', 'setcltid', 'setcltidconf', 'verify', 'write', 'rellockowner', 'bc_ctl', 'bind_conn', 'exchange_id', 'create_ses', 'destroy_ses', 'free_stateid', 'getdirdeleg', 'getdevinfo', 'getdevlist', 'layoutcommit', 'layoutget', 'layoutreturn', 'secinfononam', 'sequence', 'set_ssv', 'test_stateid', 'want_deleg', 'destroy_clid', 'reclaim_comp', ), } <NEW_LINE> def normalize(self): <NEW_LINE> <INDENT> LOGGER.debug("Normalize") <NEW_LINE> lines = self.lines <NEW_LINE> ret = {} <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> if line: <NEW_LINE> <INDENT> vals = line.split() <NEW_LINE> key = vals[0] <NEW_LINE> ret[key] = dict( zip(self.FIELDS[key], [literal_eval(val) for val in islice(vals, 1, None)])) <NEW_LINE> ret[key]['total'] = sum(ret[key].values()) <NEW_LINE> <DEDENT> <DEDENT> return ret
NetRpcNfsd handling
625990348e05c05ec3f6f6e0
class Settings(db.Model, BaseMixin): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> darkmode = db.Column(db.Boolean, default=False) <NEW_LINE> api_key = db.Column(db.String) <NEW_LINE> @staticmethod <NEW_LINE> def get_api_key(): <NEW_LINE> <INDENT> s = Settings.query.first() <NEW_LINE> if s and s.api_key: <NEW_LINE> <INDENT> return s.api_key <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ''
Global Application Settings. (SQL Alchemy model)
625990348a43f66fc4bf3293
@register <NEW_LINE> class ArrayContains(FunctionSignature): <NEW_LINE> <INDENT> name = "arrayContains" <NEW_LINE> argument_types = [TypeHint.Array, TypeHint.primitives()] <NEW_LINE> return_value = TypeHint.Boolean <NEW_LINE> additional_types = TypeHint.primitives() <NEW_LINE> @classmethod <NEW_LINE> def run(cls, array, *value): <NEW_LINE> <INDENT> values = [fold_case(v) for v in value] <NEW_LINE> if array is not None: <NEW_LINE> <INDENT> for v in array: <NEW_LINE> <INDENT> if fold_case(v) in values: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Check if ``value`` is a member of the array ``some_array``.
625990348a349b6b43687349
class TybaltAutoban(commands.Cog): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.config = Config.get_conf(self, identifier=1840351931) <NEW_LINE> default_guild = { 'ban_like': [], 'ban_regex': [] } <NEW_LINE> self.config.register_guild(**default_guild) <NEW_LINE> <DEDENT> @commands.command(pass_context=True, no_pm=True, aliases=["autoban"]) <NEW_LINE> @checks.has_permissions(ban_members=True) <NEW_LINE> async def autoban_add(self, ctx, *match): <NEW_LINE> <INDENT> msg = " ".join(match).lower() <NEW_LINE> matches = await self.config.guild(ctx.guild).ban_like() <NEW_LINE> if msg not in matches: <NEW_LINE> <INDENT> matches.append(msg) <NEW_LINE> await self.config.guild(ctx.guild).ban_like.set(matches) <NEW_LINE> await ctx.message.channel.send("Added to autoban : `{}`".format(msg)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await ctx.message.channel.send("Already in autoban : `{}`".format(msg)) <NEW_LINE> <DEDENT> <DEDENT> @commands.command(pass_context=True, no_pm=True) <NEW_LINE> @checks.has_permissions(ban_members=True) <NEW_LINE> async def autoban_del(self, ctx, *match): <NEW_LINE> <INDENT> msg = " ".join(match) <NEW_LINE> matches = await self.config.guild(ctx.guild).ban_like() <NEW_LINE> if msg in matches: <NEW_LINE> <INDENT> matches.remove(msg) <NEW_LINE> await self.config.guild(ctx.guild).ban_like.set(matches) <NEW_LINE> await ctx.message.channel.send("Removed from autoban : `{}`".format(msg)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await ctx.message.channel.send("Not in autoban : `{}`".format(msg)) <NEW_LINE> <DEDENT> <DEDENT> @commands.command(pass_context=True, no_pm=True) <NEW_LINE> @checks.has_permissions(ban_members=True) <NEW_LINE> async def autoban_list(self, ctx, *match): <NEW_LINE> <INDENT> matches = await self.config.guild(ctx.guild).ban_like() <NEW_LINE> msg = "\n".join(matches) <NEW_LINE> await ctx.message.channel.send("List of autoban filters : \n{}".format(msg)) <NEW_LINE> <DEDENT> @commands.Cog.listener() <NEW_LINE> async def on_member_join(self, member): <NEW_LINE> <INDENT> member_name = member.name.lower() <NEW_LINE> matches = await self.config.guild(member.guild).ban_like() <NEW_LINE> for match in matches: <NEW_LINE> <INDENT> if match in member_name: <NEW_LINE> <INDENT> print("Ban ! ({})".format(member.name)) <NEW_LINE> await member.ban(reason="Autoban : {}".format(match)) <NEW_LINE> break
TybaltAutoban.
6259903496565a6dacd2d812
class BrokenCtypesTest(NormalTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> uptime.ctypes = None <NEW_LINE> delattr(uptime, 'struct') <NEW_LINE> delattr(uptime, 'os')
It's ridiculous how many platforms don't have ctypes. This class simulates that.
62599034d10714528d69ef11
class SpecsSpreadsheet(object): <NEW_LINE> <INDENT> def __init__(self, creds_file, spreadsheet_id): <NEW_LINE> <INDENT> self.service = self.login(creds_file, spreadsheet_id) <NEW_LINE> self.spreadsheet_id = spreadsheet_id <NEW_LINE> <DEDENT> def login(self, creds_file, spreadsheet_id): <NEW_LINE> <INDENT> creds = None <NEW_LINE> SCOPES = 'https://www.googleapis.com/auth/spreadsheets' <NEW_LINE> store = file.Storage('token.json') <NEW_LINE> try: <NEW_LINE> <INDENT> creds = store.get() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not creds or creds.invalid: <NEW_LINE> <INDENT> flow = client.flow_from_clientsecrets(creds_file, SCOPES) <NEW_LINE> flags=tools.argparser.parse_args(args=['--noauth_local_webserver']) <NEW_LINE> creds = tools.run_flow(flow, store, flags) <NEW_LINE> <DEDENT> service = build('sheets', 'v4', http=creds.authorize(Http())) <NEW_LINE> return service <NEW_LINE> <DEDENT> def get(self, range): <NEW_LINE> <INDENT> request = self.service.spreadsheets().values().get( spreadsheetId=self.spreadsheet_id, range=range) <NEW_LINE> response = request.execute() <NEW_LINE> values = response.get('values', []) <NEW_LINE> return values <NEW_LINE> <DEDENT> def clear(self, range): <NEW_LINE> <INDENT> request = self.service.spreadsheets().values().clear( spreadsheetId=self.spreadsheet_id, range=range) <NEW_LINE> response = request.execute() <NEW_LINE> <DEDENT> def bulk_add_specs(self, range, specs): <NEW_LINE> <INDENT> values = [] <NEW_LINE> for s in specs: <NEW_LINE> <INDENT> context = ", ".join(s.rule.context) <NEW_LINE> response = ", ".join(s.rule.response) <NEW_LINE> value = [context, response, s.eclat_support, s.support, s.merged_support, s.global_confidence, s.local_confidence] <NEW_LINE> values.append(value) <NEW_LINE> <DEDENT> value_range_body = { 'values': values } <NEW_LINE> request = self.service.spreadsheets().values().append( spreadsheetId=self.spreadsheet_id, range=range, valueInputOption='RAW', insertDataOption='OVERWRITE', body=value_range_body) <NEW_LINE> response = request.execute() <NEW_LINE> <DEDENT> def add_spec(self, range, specs): <NEW_LINE> <INDENT> context = ", ".join(association_rule.context) <NEW_LINE> response = ", ".join(association_rule.response) <NEW_LINE> values = [ [ context, response, eclat_support, support, merged_support, global_confidence, local_confidence ] ] <NEW_LINE> value_range_body = { 'values': values } <NEW_LINE> request = self.service.spreadsheets().values().append( spreadsheetid=self.spreadsheet_id, range=range, valueinputoption='RAW', insertdataoption='OVERWRITE', body=value_range_body) <NEW_LINE> response = request.execute()
Handles auth/requests for a single login + spreadsheet
625990349b70327d1c57fe91
class FocusMimics(MappingRule): <NEW_LINE> <INDENT> mapping = { "focus chrome": Mimic("switch", "to", "Google Chrome"), "focus note": Mimic("switch", "to", "notepad++"), "focus word": Mimic("switch", "to", "Microsoft Word"), "focus evernote": Mimic("switch", "to", "evernote"), "focus fire": Mimic("switch", "to", "Firefox"), "focus plore": Mimic("switch", "to", "Windows Explorer"), }
This mimics the "switch to" command from DNS to use the Dragonfly "focus" command syntax. The main definitions of the Dragonfly "focus" command are in _window_control.py.
62599034ec188e330fdf99a1
class Click(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def login_button(driver): <NEW_LINE> <INDENT> login_button = Browser._FieldHelper.find_element(driver, xpath=web_elements.login_button) <NEW_LINE> login_button.click()
The class allows the Browser to click on the various button on the page
625990340a366e3fb87ddaf3
class DashbordViewWidget(FWidget): <NEW_LINE> <INDENT> def __init__(self, parent=0, *args, **kwargs): <NEW_LINE> <INDENT> super(DashbordViewWidget, self).__init__( parent=parent, *args, **kwargs) <NEW_LINE> self.parentWidget().set_window_title("TABLEAU DE BORD") <NEW_LINE> self.parent = parent <NEW_LINE> vbox = QVBoxLayout() <NEW_LINE> self.title = FPageTitle("TABLEAU DE BORD") <NEW_LINE> self.title_alert = FBoxTitle(u"Les alertes") <NEW_LINE> jinja = Environment(loader=FileSystemLoader('static')) <NEW_LINE> template = jinja.get_template('chart.html') <NEW_LINE> cc_list = CooperativeCompanie.select().order_by('-start_date') <NEW_LINE> dataset = { "wc": self.width(), "hc": self.height(), "toal_scoop": cc_list.count(), "sc_coop_ca": cc_list.where(CooperativeCompanie.forme == "b").count(), "sc_scoops": cc_list.where(CooperativeCompanie.forme == "a").count(), "union": cc_list.where(CooperativeCompanie.forme == "bv").count(), "federation": cc_list.where(CooperativeCompanie.forme == "bf").count(), "confederation": cc_list.where(CooperativeCompanie.forme == "bc").count(), } <NEW_LINE> graph1 = template.render(base_url=ROOT_DIR, data=dataset) <NEW_LINE> template2 = jinja.get_template('table.html') <NEW_LINE> table_html = template2.render(base_url=ROOT_DIR, dataset=dataset) <NEW_LINE> web_graphic = QWebView() <NEW_LINE> web_graphic.setHtml(graph1) <NEW_LINE> tab_graphic = QVBoxLayout() <NEW_LINE> tab_graphic.addWidget(web_graphic) <NEW_LINE> web_table = QWebView() <NEW_LINE> web_table.setHtml(table_html) <NEW_LINE> tab_table = QVBoxLayout() <NEW_LINE> tab_table.addWidget(web_table) <NEW_LINE> tab_widget = tabbox((tab_graphic, u"Graphique"), (tab_table, u"Tableau")) <NEW_LINE> vbox.addWidget(self.title) <NEW_LINE> vbox.addWidget(tab_widget) <NEW_LINE> self.setLayout(vbox)
Shows the home page
6259903450485f2cf55dc08b
class Product(_messages.Message): <NEW_LINE> <INDENT> imageUri = _messages.StringField(1) <NEW_LINE> productId = _messages.StringField(2) <NEW_LINE> score = _messages.FloatField(3, variant=_messages.Variant.FLOAT)
Information about a product. . Fields: imageUri: The URI of the image which matched the query image. This field is returned only if `view` is set to `FULL` in the request. productId: Product ID. score: A confidence level on the match, ranging from 0 (no confidence) to 1 (full confidence). This field is returned only if `view` is set to `FULL` in the request.
6259903426238365f5fadc61
class ErrorLoadingConfig(Exception): <NEW_LINE> <INDENT> def __init__(self, config_file, message=None): <NEW_LINE> <INDENT> message = 'Failed in reading config file %s. Original message: %s' % (config_file, message) <NEW_LINE> Exception.__init__(self, message)
Exception class, which is used for config loading exceptions.
62599034d4950a0f3b1116c5
class BeamSearch: <NEW_LINE> <INDENT> def __init__(self, beam_size: int) -> None: <NEW_LINE> <INDENT> self._beam_size = beam_size <NEW_LINE> <DEDENT> def search(self, num_steps: int, initial_state: DecoderState, decoder_step: DecoderStep, keep_final_unfinished_states: bool = True) -> Dict[int, List[DecoderState]]: <NEW_LINE> <INDENT> finished_states: Dict[int, List[DecoderState]] = defaultdict(list) <NEW_LINE> states = [initial_state] <NEW_LINE> step_num = 1 <NEW_LINE> while states and step_num <= num_steps: <NEW_LINE> <INDENT> next_states: Dict[int, List[DecoderState]] = defaultdict(list) <NEW_LINE> grouped_state = states[0].combine_states(states) <NEW_LINE> for next_state in decoder_step.take_step(grouped_state, max_actions=self._beam_size): <NEW_LINE> <INDENT> batch_index = next_state.batch_indices[0] <NEW_LINE> if next_state.is_finished(): <NEW_LINE> <INDENT> finished_states[batch_index].append(next_state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if step_num == num_steps and keep_final_unfinished_states: <NEW_LINE> <INDENT> finished_states[batch_index].append(next_state) <NEW_LINE> <DEDENT> next_states[batch_index].append(next_state) <NEW_LINE> <DEDENT> <DEDENT> states = [] <NEW_LINE> for batch_index, batch_states in next_states.items(): <NEW_LINE> <INDENT> states.extend(batch_states[:self._beam_size]) <NEW_LINE> <DEDENT> step_num += 1 <NEW_LINE> <DEDENT> best_states: Dict[int, List[DecoderState]] = {} <NEW_LINE> for batch_index, batch_states in finished_states.items(): <NEW_LINE> <INDENT> finished_to_sort = [(-state.score[0].item(), state) for state in batch_states] <NEW_LINE> finished_to_sort.sort(key=lambda x: x[0]) <NEW_LINE> best_states[batch_index] = [state[1] for state in finished_to_sort[:self._beam_size]] <NEW_LINE> <DEDENT> return best_states <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_params(cls, params: Params) -> 'BeamSearch': <NEW_LINE> <INDENT> beam_size = params.pop('beam_size') <NEW_LINE> return cls(beam_size=beam_size)
This class implements beam search over transition sequences given an initial ``DecoderState`` and a ``DecoderStep``, returning the highest scoring final states found by the beam (the states will keep track of the transition sequence themselves). The initial ``DecoderState`` is assumed to be `batched`. The value we return from the search is a dictionary from batch indices to ranked finished states. IMPORTANT: We assume that the ``DecoderStep`` that you are using returns possible next states in sorted order, so we do not do an additional sort inside of ``BeamSearch.search()``. If you're implementing your own ``DecoderStep``, you must ensure that you've sorted the states that you return.
6259903466673b3332c31500
class BernsteinDualSet(DualSet): <NEW_LINE> <INDENT> def __init__(self, ref_el, degree): <NEW_LINE> <INDENT> topology = ref_el.get_topology() <NEW_LINE> entity_ids = {dim: {entity_i: [] for entity_i in entities} for dim, entities in topology.items()} <NEW_LINE> inverse_topology = {vertices: (dim, entity_i) for dim, entities in topology.items() for entity_i, vertices in entities.items()} <NEW_LINE> dim = ref_el.get_spatial_dimension() <NEW_LINE> kss = mis(dim + 1, degree) <NEW_LINE> nodes = [] <NEW_LINE> for i, ks in enumerate(kss): <NEW_LINE> <INDENT> vertices, = numpy.nonzero(ks) <NEW_LINE> entity_dim, entity_i = inverse_topology[tuple(vertices)] <NEW_LINE> entity_ids[entity_dim][entity_i].append(i) <NEW_LINE> nodes.append(None) <NEW_LINE> <DEDENT> super(BernsteinDualSet, self).__init__(nodes, ref_el, entity_ids)
The dual basis for Bernstein elements.
625990346e29344779b01760
class DigitClassificationModel(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.layers = [] <NEW_LINE> self.layers.append(Layer(784, 400, True, True)) <NEW_LINE> self.layers.append(Layer(400, 80, True, True)) <NEW_LINE> self.layers.append(Layer(80, 10, True, True)) <NEW_LINE> self.batch_size = 10 <NEW_LINE> self.network = NeuralNetwork(self.layers, self.batch_size) <NEW_LINE> self.initial_learning_rate = 0.2 <NEW_LINE> self.learning_rate_update = 0.999 <NEW_LINE> self.batches_per_update = 10 <NEW_LINE> self.batches_per_accuracy_check = 1000 <NEW_LINE> <DEDENT> def run(self, x): <NEW_LINE> <INDENT> return self.network.predict(x) <NEW_LINE> <DEDENT> def get_loss(self, x, y): <NEW_LINE> <INDENT> y_predict = self.run(x) <NEW_LINE> return nn.SoftmaxLoss(y_predict, y) <NEW_LINE> <DEDENT> def train(self, dataset): <NEW_LINE> <INDENT> alpha = self.initial_learning_rate <NEW_LINE> epoch = 1 <NEW_LINE> converged = False <NEW_LINE> while not converged: <NEW_LINE> <INDENT> total_samples = 0 <NEW_LINE> total_batches = 0 <NEW_LINE> for x, y in dataset.iterate_once(self.batch_size): <NEW_LINE> <INDENT> current_loss = self.get_loss(x, y) <NEW_LINE> parameters = self.network.collectModelParameters() <NEW_LINE> step_gradients = nn.gradients(current_loss, parameters) <NEW_LINE> for parameter, gradient in zip(parameters, step_gradients): <NEW_LINE> <INDENT> parameter.update(gradient, -alpha) <NEW_LINE> <DEDENT> total_samples += self.batch_size <NEW_LINE> total_batches += 1 <NEW_LINE> if total_batches % self.batches_per_update == 0: <NEW_LINE> <INDENT> alpha *= self.learning_rate_update <NEW_LINE> <DEDENT> if total_batches % self.batches_per_accuracy_check == 0: <NEW_LINE> <INDENT> accuracy = dataset.get_validation_accuracy() <NEW_LINE> if accuracy > 0.98: <NEW_LINE> <INDENT> print("Achieved 98% accuracy: " + str(accuracy)) <NEW_LINE> converged = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> epoch += 1 <NEW_LINE> if not converged and epoch > 4: <NEW_LINE> <INDENT> print("Could not converge in 4 epochs. Restarting.") <NEW_LINE> epoch = 1 <NEW_LINE> alpha = self.initial_learning_rate <NEW_LINE> self.network.resetModelParameters()
A model for handwritten digit classification using the MNIST dataset. Each handwritten digit is a 28x28 pixel grayscale image, which is flattened into a 784-dimensional vector for the purposes of this model. Each entry in the vector is a floating point number between 0 and 1. The goal is to sort each digit into one of 10 classes (number 0 through 9). (See RegressionModel for more information about the APIs of different methods here. We recommend that you implement the RegressionModel before working on this part of the project.)
62599034be383301e0254922
class PasswordInputPlugin(FormFieldPlugin): <NEW_LINE> <INDENT> uid = UID <NEW_LINE> name = _("Password") <NEW_LINE> group = _("Fields") <NEW_LINE> form = PasswordInputForm <NEW_LINE> def get_form_field_instances(self, request=None, form_entry=None, form_element_entries=None, **kwargs): <NEW_LINE> <INDENT> widget_attrs = { 'class': theme.form_element_html_class, 'placeholder': self.data.placeholder, } <NEW_LINE> field_kwargs = { 'label': self.data.label, 'help_text': self.data.help_text, 'initial': self.data.initial, 'required': self.data.required, 'widget': PasswordInput(attrs=widget_attrs), } <NEW_LINE> if self.data.max_length not in (None, ''): <NEW_LINE> <INDENT> field_kwargs['max_length'] = self.data.max_length <NEW_LINE> <DEDENT> return [(self.data.name, CharField, field_kwargs)]
Password field plugin.
62599034711fe17d825e1522
class Zone: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.records = {} <NEW_LINE> <DEDENT> def add_node(self, name, record_set): <NEW_LINE> <INDENT> self.records[name] = record_set <NEW_LINE> <DEDENT> def read_master_file(self, filename): <NEW_LINE> <INDENT> with open(filename) as file: <NEW_LINE> <INDENT> lines = file.readlines() <NEW_LINE> <DEDENT> dct = {} <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> if line[0] != ";": <NEW_LINE> <INDENT> data = line.split() <NEW_LINE> if data[2] == 'NS': <NEW_LINE> <INDENT> t = Type.NS <NEW_LINE> rdata = NSRecordData(Name(data[3])) <NEW_LINE> <DEDENT> elif data[2] == 'A': <NEW_LINE> <INDENT> t = Type.A <NEW_LINE> rdata = ARecordData(data[3]) <NEW_LINE> <DEDENT> elif data[2] == 'CNAME': <NEW_LINE> <INDENT> t = Type.CNAME <NEW_LINE> rdata = CNAMERecordData(Name(data[3])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> RR = ResourceRecord(Name(data[0]), t, Class.IN, int(data[1]), rdata) <NEW_LINE> if data[0] in dct.keys() : <NEW_LINE> <INDENT> dct[data[0]].append(RR) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dct[data[0]] = [RR] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for key, value in dct.items(): <NEW_LINE> <INDENT> self.add_node(key, value)
A zone in the domain name space
6259903473bcbd0ca4bcb396
class ParamsBaseClass: <NEW_LINE> <INDENT> def to_dict(self): <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> def from_dict(self, params_dict): <NEW_LINE> <INDENT> self.__dict__.update(params_dict)
.
62599034287bf620b6272cf6
class ReplacePreview(megrok.pagelet.Pagelet): <NEW_LINE> <INDENT> grok.layer(asm.cmsui.interfaces.ICMSSkin) <NEW_LINE> grok.require('asm.cms.EditContent') <NEW_LINE> def update(self): <NEW_LINE> <INDENT> self.search = self.request.form.get('search', '') <NEW_LINE> self.found = 0 <NEW_LINE> self.results = [] <NEW_LINE> pages = [self.application] <NEW_LINE> while pages: <NEW_LINE> <INDENT> page = pages.pop() <NEW_LINE> pages.extend(page.subpages) <NEW_LINE> for edition in page.editions: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> replace = asm.cms.interfaces.IReplaceSupport(edition) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> occurrences = replace.search(self.search) <NEW_LINE> self.found += len(occurrences) <NEW_LINE> if occurrences: <NEW_LINE> <INDENT> self.results.append( {'edition': edition, 'occurrences': occurrences})
Given a users search and replace terms show a list of all matches.
625990348a43f66fc4bf3297
class Entry(models.Model): <NEW_LINE> <INDENT> topic = models.ForeignKey(Topic, on_delete=models.PROTECT) <NEW_LINE> text = models.TextField() <NEW_LINE> date_added = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'entries' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.text[:50] + "..."
Информация, изученная пользователем по теме
625990348c3a8732951f7667
class ApproximateQAgent(PacmanQAgent): <NEW_LINE> <INDENT> def __init__(self, extractor='IdentityExtractor', **args): <NEW_LINE> <INDENT> self.featExtractor = util.lookup(extractor, globals())() <NEW_LINE> PacmanQAgent.__init__(self, **args) <NEW_LINE> self.weights = util.Counter() <NEW_LINE> <DEDENT> def getWeights(self): <NEW_LINE> <INDENT> return self.weights <NEW_LINE> <DEDENT> def getQValue(self, state, action): <NEW_LINE> <INDENT> features = self.featExtractor.getFeatures(state, action) <NEW_LINE> qVal = 0 <NEW_LINE> for f in features: <NEW_LINE> <INDENT> new = self.getWeights()[f] * features[f] <NEW_LINE> qVal = qVal + new <NEW_LINE> <DEDENT> return qVal <NEW_LINE> <DEDENT> def update(self, state, action, nextState, reward): <NEW_LINE> <INDENT> new = reward + (self.discount * self.computeValueFromQValues(nextState)) <NEW_LINE> old = self.getQValue(state, action) <NEW_LINE> difference = new - old <NEW_LINE> features = self.featExtractor.getFeatures(state, action) <NEW_LINE> for f in features: <NEW_LINE> <INDENT> n = self.alpha * difference * features[f] <NEW_LINE> self.getWeights()[f] = self.getWeights()[f] + n <NEW_LINE> <DEDENT> <DEDENT> def final(self, state): <NEW_LINE> <INDENT> PacmanQAgent.final(self, state) <NEW_LINE> if self.episodesSoFar == self.numTraining: <NEW_LINE> <INDENT> pass
ApproximateQLearningAgent You should only have to overwrite getQValue and update. All other QLearningAgent functions should work as is.
6259903430c21e258be9991b
class GRUCell(RNNCellBase): <NEW_LINE> <INDENT> def __init__(self, input_size, hidden_size, bias=True): <NEW_LINE> <INDENT> super(GRUCell, self).__init__(input_size, hidden_size,bias, num_chunks=3) <NEW_LINE> <DEDENT> def forward(self, input, h=None): <NEW_LINE> <INDENT> self.check_forward_input(input) <NEW_LINE> if h is None: <NEW_LINE> <INDENT> h= np.zeros(input.shape[0], self.hidden_size, dtype=input.dtype) <NEW_LINE> <DEDENT> self.check_forward_hidden(input, h, '') <NEW_LINE> return gru_cell( input, h, self.W_ih, self.W_hh, self.b_ih, self.b_hh, ) <NEW_LINE> <DEDENT> def __call__(self, input, h=None): <NEW_LINE> <INDENT> return self.forward(input,h) <NEW_LINE> <DEDENT> def backward(self, dh,run,input,h_pre): <NEW_LINE> <INDENT> if h_pre is None: <NEW_LINE> <INDENT> h_pre = np.zeros(input.shape[0], self.hidden_size, dtype=input.dtype) <NEW_LINE> <DEDENT> dx,dh_pre,grads = gru_cell_back( dh,run, input, h_pre, self.W_ih, self.W_hh, self.b_ih, self.b_hh ) <NEW_LINE> for a, b in zip(self.grads,grads): <NEW_LINE> <INDENT> a+=b <NEW_LINE> <DEDENT> return dx,dh_pre,grads
egin{array}{ll} r = \sigma(W_{ir} x + b_{ir} + W_{hr} h + b_{hr}) \ z = \sigma(W_{iz} x + b_{iz} + W_{hz} h + b_{hz}) \ n = anh(W_{in} x + b_{in} + r * (W_{hn} h + b_{hn})) \ h' = (1 - z) * n + z * h \end{array} Inputs: input, hidden - **input** of shape `(batch, input_size)`: tensor containing input features - **hidden** of shape `(batch, hidden_size)`: tensor containing the initial hidden state for each element in the batch. Defaults to zero if not provided. Outputs: h' - **h'** of shape `(batch, hidden_size)`: tensor containing the next hidden state for each element in the batch Attributes: weight_ih: the learnable input-hidden weights, of shape `(3*hidden_size, input_size)` weight_hh: the learnable hidden-hidden weights, of shape `(3*hidden_size, hidden_size)` bias_ih: the learnable input-hidden bias, of shape `(3*hidden_size)` bias_hh: the learnable hidden-hidden bias, of shape `(3*hidden_size)`
62599034a8ecb0332587232c
class Teacher(models.Model): <NEW_LINE> <INDENT> teacher_name = models.CharField(max_length=40) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.teacher_name
Teacher model.
62599034d18da76e235b79d6
class Movie: <NEW_LINE> <INDENT> def __init__(self, name, room, seat, schedule): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.room = room <NEW_LINE> self.total_seat = seat <NEW_LINE> self.free_seat = random.randint(1, 50) <NEW_LINE> self.schedule = schedule <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name + "//Salle " + str(self.room) + "-" + self.schedule <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__() <NEW_LINE> <DEDENT> def take_seat(self): <NEW_LINE> <INDENT> if self.free_seat > 0: <NEW_LINE> <INDENT> self.free_seat -= 1 <NEW_LINE> <DEDENT> return self.free_seat
CLass créant un objet "film" contenant le nom du film la salle de projection ainsi que les horaires.
6259903421bff66bcd723d75
class EvalModelFn(beam.DoFn): <NEW_LINE> <INDENT> def __init__(self, config, checkpoint): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.checkpoint = checkpoint <NEW_LINE> self.wrapper = None <NEW_LINE> <DEDENT> def get_wrapper(self): <NEW_LINE> <INDENT> self.wrapper = inference_utils.get_inference_wrapper( self.config, FLAGS.rules, FLAGS.target_grammar, FLAGS.verbose) <NEW_LINE> inference_utils.get_checkpoint(self.wrapper, FLAGS.model_dir, self.checkpoint) <NEW_LINE> <DEDENT> def process(self, elements): <NEW_LINE> <INDENT> if self.wrapper is None: <NEW_LINE> <INDENT> self.get_wrapper() <NEW_LINE> <DEDENT> example, fallback_prediction = elements <NEW_LINE> metrics_dict, predictions = eval_utils.eval_model( self.wrapper, [example], [fallback_prediction], verbose=FLAGS.verbose) <NEW_LINE> beam_utils.dict_to_beam_counts(metrics_dict, "EvalModel") <NEW_LINE> source = example[0] <NEW_LINE> prediction = predictions[0] <NEW_LINE> yield "%s\t%s" % (source, prediction)
Beam wrapper for the inference wrapper.
62599034b830903b9686ed01
class ShoppinglistTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.app = create_app(config_name="testing") <NEW_LINE> self.client = self.app.test_client <NEW_LINE> self.register_route = '/auth/register' <NEW_LINE> self.login_route = '/auth/login' <NEW_LINE> self.user_route = '/user/' <NEW_LINE> self.confirm_route = '/verify/' <NEW_LINE> with self.app.app_context(): <NEW_LINE> <INDENT> db.session.close() <NEW_LINE> db.drop_all() <NEW_LINE> db.create_all() <NEW_LINE> self.confirm_token=generate_token("[email protected]") <NEW_LINE> <DEDENT> <DEDENT> def register_user(self, username="thisuser", password="userpassword", email="[email protected]"): <NEW_LINE> <INDENT> user_data = { 'username': username, 'password': password, 'email': email } <NEW_LINE> return self.client().post(self.register_route, data=user_data) <NEW_LINE> <DEDENT> def login_user(self, username="thisuser", password="userpassword"): <NEW_LINE> <INDENT> user_data = { 'username': username, 'password': password } <NEW_LINE> self.client().get(self.confirm_route + '{}'.format(self.confirm_token.decode("utf-8"))) <NEW_LINE> return self.client().post(self.login_route, data=user_data) <NEW_LINE> <DEDENT> def test_user_by_id(self): <NEW_LINE> <INDENT> self.register_user() <NEW_LINE> result = self.login_user() <NEW_LINE> access_token = json.loads(result.data.decode())['access_token'] <NEW_LINE> result = self.client().get( self.user_route, headers=dict(Auth=access_token)) <NEW_LINE> self.assertEqual(result.status_code, 200) <NEW_LINE> self.assertIn('thisuser', str(result.data)) <NEW_LINE> <DEDENT> def test_user_can_be_edited_or_deleted(self): <NEW_LINE> <INDENT> self.register_user() <NEW_LINE> result = self.login_user() <NEW_LINE> access_token = json.loads(result.data.decode())['access_token'] <NEW_LINE> result = self.client().get( self.user_route, headers=dict(Auth=access_token)) <NEW_LINE> edit_reqst = self.client().put(self.user_route, headers=dict(Auth=access_token), data={ "username": "ouruser" }) <NEW_LINE> self.assertEqual(edit_reqst.status_code, 200) <NEW_LINE> edit_results = self.client().get(self.user_route, headers=dict(Auth=access_token)) <NEW_LINE> self.assertIn('ouruser', str(edit_results.data)) <NEW_LINE> delete_reqst = self.client().delete(self.user_route, headers=dict(Auth=access_token)) <NEW_LINE> self.assertEqual(delete_reqst.status_code, 200) <NEW_LINE> <DEDENT> def test_password_can_be_edited(self): <NEW_LINE> <INDENT> self.register_user() <NEW_LINE> result = self.login_user() <NEW_LINE> access_token = json.loads(result.data.decode())['access_token'] <NEW_LINE> result = self.client().get( self.user_route, headers=dict(Auth=access_token)) <NEW_LINE> edit_reqst = self.client().put(self.user_route, headers=dict(Auth=access_token), data={ "password": "userpassword", "new_password": "new_password" }) <NEW_LINE> self.assertEqual(edit_reqst.status_code, 200)
This class is a test case for shoppinglist
62599034be8e80087fbc018e
class CarAdapter(object): <NEW_LINE> <INDENT> def __init__(self, manual): <NEW_LINE> <INDENT> self._manual = manual <NEW_LINE> <DEDENT> def low_gear(self): <NEW_LINE> <INDENT> if type(self.gear) is int and self.gear > 1: <NEW_LINE> <INDENT> while(self.gear != 1): <NEW_LINE> <INDENT> self._manual.shift_down <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def accelerate(self): <NEW_LINE> <INDENT> self._manual.accelerate() <NEW_LINE> self._manual.shift_up() <NEW_LINE> <DEDENT> def deccelerate(self): <NEW_LINE> <INDENT> self._manual.deccelerate() <NEW_LINE> self._manual.shift_down()
Adapter class to make an manual car act like an automatic car
625990345e10d32532ce418b
class VideoIntelligenceServiceClient(object): <NEW_LINE> <INDENT> SERVICE_ADDRESS = 'videointelligence.googleapis.com' <NEW_LINE> DEFAULT_SERVICE_PORT = 443 <NEW_LINE> _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) <NEW_LINE> def __init__(self, channel=None, credentials=None, ssl_credentials=None, scopes=None, client_config=None, lib_name=None, lib_version='', metrics_headers=()): <NEW_LINE> <INDENT> if scopes is None: <NEW_LINE> <INDENT> scopes = self._ALL_SCOPES <NEW_LINE> <DEDENT> if client_config is None: <NEW_LINE> <INDENT> client_config = {} <NEW_LINE> <DEDENT> metrics_headers = collections.OrderedDict(metrics_headers) <NEW_LINE> metrics_headers['gl-python'] = platform.python_version() <NEW_LINE> if lib_name: <NEW_LINE> <INDENT> metrics_headers[lib_name] = lib_version <NEW_LINE> <DEDENT> metrics_headers['gapic'] = pkg_resources.get_distribution( 'google-cloud-videointelligence', ).version <NEW_LINE> defaults = api_callable.construct_settings( 'google.cloud.videointelligence.v1beta2.VideoIntelligenceService', video_intelligence_service_client_config.config, client_config, config.STATUS_CODE_NAMES, metrics_headers=metrics_headers, ) <NEW_LINE> self.video_intelligence_service_stub = config.create_stub( video_intelligence_pb2.VideoIntelligenceServiceStub, channel=channel, service_path=self.SERVICE_ADDRESS, service_port=self.DEFAULT_SERVICE_PORT, credentials=credentials, scopes=scopes, ssl_credentials=ssl_credentials) <NEW_LINE> self.operations_client = operations_client.OperationsClient( service_path=self.SERVICE_ADDRESS, channel=channel, credentials=credentials, ssl_credentials=ssl_credentials, scopes=scopes, client_config=client_config, metrics_headers=metrics_headers, ) <NEW_LINE> self._annotate_video = api_callable.create_api_call( self.video_intelligence_service_stub.AnnotateVideo, settings=defaults['annotate_video']) <NEW_LINE> <DEDENT> def annotate_video(self, input_uri, features, input_content=None, video_context=None, output_uri=None, location_id=None, options=None): <NEW_LINE> <INDENT> request = video_intelligence_pb2.AnnotateVideoRequest( input_uri=input_uri, features=features, input_content=input_content, video_context=video_context, output_uri=output_uri, location_id=location_id) <NEW_LINE> return google.gax._OperationFuture( self._annotate_video(request, options), self.operations_client, video_intelligence_pb2.AnnotateVideoResponse, video_intelligence_pb2.AnnotateVideoProgress, options)
Service that implements Google Cloud Video Intelligence API.
6259903463f4b57ef00865fc
class IRCNumeric(enum.Enum): <NEW_LINE> <INDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return str(self.value).zfill(3) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"{self.__class__.__name__}_{self.name}"
Base class for IRC numeric enums.
6259903426238365f5fadc65
class UserList(APIView): <NEW_LINE> <INDENT> permission_classes = (permissions.AllowAny,) <NEW_LINE> def post(self, request, format=None): <NEW_LINE> <INDENT> serializer = UserSerializerWithToken(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response(serializer.data, status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Create a new user. It's called 'UserList' because normally we'd have a get method here too, for retrieving a list of all User objects.
6259903421bff66bcd723d77
class NamedRegex(_BaseRegex): <NEW_LINE> <INDENT> def validate(self, value): <NEW_LINE> <INDENT> value = super(NamedRegex, self).validate(value) <NEW_LINE> return value.groupdict()
A string based type like Regex but returning named groups in dict.
6259903496565a6dacd2d815
class TropicalMaxPlusSemiring(SemiringWithThresholdABC): <NEW_LINE> <INDENT> def __init__(self, threshold): <NEW_LINE> <INDENT> if not isinstance(threshold, int): <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> if threshold < 0: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> self._threshold = threshold <NEW_LINE> <DEDENT> def plus(self, x, y): <NEW_LINE> <INDENT> if not ((isinstance(x, int) or x == -float('inf')) and (isinstance(y, int) or y == -float('inf'))): <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> if (x < 0 and x != -float('inf')) or (y < 0 and y != -float('inf')): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if (x > self._threshold) or (y > self._threshold): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> return max(x, y) <NEW_LINE> <DEDENT> def prod(self, x, y): <NEW_LINE> <INDENT> if not ((isinstance(x, int) or x == -float('inf')) and (isinstance(y, int) or y == -float('inf'))): <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> if (x < 0 and x != -float('inf')) or (y < 0 and y != -float('inf')): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if (x > self._threshold) or (y > self._threshold): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> return min(self._threshold, x + y) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def zero(): <NEW_LINE> <INDENT> return -float('inf') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def one(): <NEW_LINE> <INDENT> return 0
A *tropical max plus semiring* is a semiring comprising the set :math:`\{0, \ldots, t\} \cup\{-\infty\}`, for some value :math:`t\in\mathbb{N} \cup \{0\}`, the threshold of the semiring, together with an operation which returns the maximum of two elements, as the additive operation and addition of integers as the multiplicative operation. *Minus infinity* is a defined as smaller than all integers, and the integer sum of minus infinity and any element of the tropical max plus semiring is minus infinity. If the integer sum of any two elements is greater than the threshold, then the product is the threshold. Args: threshold (int): The threshold of the semiring. Returns: None Raises: TypeError: If threshold is not an int. ValueError: If threshold is negative. Examples: >>> from semigroups import TropicalMaxPlusSemiring >>> TropicalMaxPlusSemiring(26).plus(7, 25) 25 >>> TropicalMaxPlusSemiring(26).prod(7, 25) 26 >>> TropicalMaxPlusSemiring(26).threshold() 26
62599034d164cc6175822084
class UserViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> serializer_class = UserSerializer
This viewset automatically provide `list` and `detail` actions
6259903407d97122c4217dba
class ProgramEnrollmentSerializer(serializers.Serializer): <NEW_LINE> <INDENT> created = serializers.DateTimeField(format=DATETIME_FORMAT) <NEW_LINE> modified = serializers.DateTimeField(format=DATETIME_FORMAT) <NEW_LINE> external_user_key = serializers.CharField() <NEW_LINE> status = serializers.CharField() <NEW_LINE> program_uuid = serializers.UUIDField() <NEW_LINE> program_course_enrollments = ProgramCourseEnrollmentSerializer(many=True) <NEW_LINE> program_name = serializers.SerializerMethodField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = ProgramEnrollment <NEW_LINE> <DEDENT> def get_program_name(self, obj): <NEW_LINE> <INDENT> program_list = get_programs_by_uuids([obj.program_uuid]) <NEW_LINE> return next(iter(program_list), {}).get('title', '')
Serializes a Program Enrollment Model object
625990349b70327d1c57fe97
class Sync(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required=[] <NEW_LINE> self.b_key = "sync" <NEW_LINE> self.a10_url="/axapi/v3/configure/sync" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.all_partitions = "" <NEW_LINE> self.partition_name = "" <NEW_LINE> self.pwd = "" <NEW_LINE> self.auto_authentication = "" <NEW_LINE> self.address = "" <NEW_LINE> self.shared = "" <NEW_LINE> self.A10WW_type = "" <NEW_LINE> self.usr = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
:param all_partitions: {"default": 0, "optional": true, "type": "number", "description": "All partition configurations", "format": "flag"} :param partition_name: {"description": "Partition name", "format": "string", "minLength": 1, "optional": true, "maxLength": 128, "type": "string"} :param pwd: {"minLength": 1, "maxLength": 128, "type": "string", "optional": true, "format": "string"} :param auto_authentication: {"default": 0, "optional": true, "type": "number", "description": "Authenticate with local username and password", "format": "flag"} :param address: {"optional": true, "type": "string", "description": "Specify the destination ip address to sync", "format": "ipv4-address"} :param shared: {"default": 0, "optional": true, "type": "number", "description": "Shared partition", "format": "flag"} :param type: {"optional": true, "enum": ["running", "all"], "type": "string", "description": "'running': Sync local running to peer's running configuration; 'all': Sync local running to peer's running configuration, and local startup to peer's startup configuration; ", "format": "enum"} :param usr: {"minLength": 1, "maxLength": 128, "type": "string", "optional": true, "format": "string"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` Class Description:: Sync operation. Class sync supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/configure/sync`.
62599034be8e80087fbc0190
class SegParagraph(_BaseList): <NEW_LINE> <INDENT> item_class = SegSentence
A list of word-segmented sentences. .. admonition:: Data Structure Examples Text format Used for :meth:`from_text` and :meth:`to_text`. .. code-block:: python [ '中文字 耶 , 啊 哈 哈 。', # Sentence 1 '「 完蛋 了 ! 」 , 畢卡索 他 想', # Sentence 2 ] List/Dict format Used for :meth:`from_list`, :meth:`to_list`, :meth:`from_dict`, and :meth:`to_dict`. .. code-block:: python [ [ '中文字', '耶', ',', '啊', '哈', '哈哈', '。', ], # Sentence 1 [ '「', '完蛋', '了', '!', '」', ',', '畢卡索', '他', '想', ], # Sentence 2 ] .. note:: This class is also used for part-of-speech tagging.
62599034ec188e330fdf99a7
class TypeHIdItem : <NEW_LINE> <INDENT> def __init__(self, size, buff, cm) : <NEW_LINE> <INDENT> self.__CM = cm <NEW_LINE> self.offset = buff.get_idx() <NEW_LINE> self.type = [] <NEW_LINE> for i in range(0, size) : <NEW_LINE> <INDENT> self.type.append( TypeIdItem( buff, cm ) ) <NEW_LINE> <DEDENT> <DEDENT> def get_type(self) : <NEW_LINE> <INDENT> return self.type <NEW_LINE> <DEDENT> def get(self, idx) : <NEW_LINE> <INDENT> try : <NEW_LINE> <INDENT> return self.type[ idx ].get_descriptor_idx() <NEW_LINE> <DEDENT> except IndexError : <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def set_off(self, off) : <NEW_LINE> <INDENT> self.offset = off <NEW_LINE> <DEDENT> def get_off(self) : <NEW_LINE> <INDENT> return self.offset <NEW_LINE> <DEDENT> def reload(self) : <NEW_LINE> <INDENT> for i in self.type : <NEW_LINE> <INDENT> i.reload() <NEW_LINE> <DEDENT> <DEDENT> def show(self) : <NEW_LINE> <INDENT> bytecode._PrintSubBanner("Type List Item") <NEW_LINE> for i in self.type : <NEW_LINE> <INDENT> i.show() <NEW_LINE> <DEDENT> <DEDENT> def get_obj(self) : <NEW_LINE> <INDENT> return [ i for i in self.type ] <NEW_LINE> <DEDENT> def get_raw(self) : <NEW_LINE> <INDENT> return ''.join(i.get_raw() for i in self.type) <NEW_LINE> <DEDENT> def get_length(self) : <NEW_LINE> <INDENT> length = 0 <NEW_LINE> for i in self.type : <NEW_LINE> <INDENT> length += i.get_length() <NEW_LINE> <DEDENT> return length
This class can parse a list of type_id_item of a dex file :param buff: a string which represents a Buff object of the list of type_id_item :type buff: Buff object :param cm: a ClassManager object :type cm: :class:`ClassManager`
6259903491af0d3eaad3af42
class AliyunConfig(models.Model): <NEW_LINE> <INDENT> key = models.CharField('阿里云登录 Key', max_length=256, null=False) <NEW_LINE> secret = models.CharField('阿里云登录 Secret', max_length=256, null=False) <NEW_LINE> region = models.CharField('阿里云 Region', max_length=256, null=False) <NEW_LINE> create_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> update_time = models.DateTimeField(auto_now=True)
阿里云账号配置
62599034287bf620b6272cfb
class BalsaStringListHandler(logging.NullHandler): <NEW_LINE> <INDENT> def __init__(self, max_entries): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.max_entries = max_entries <NEW_LINE> self.strings = [] <NEW_LINE> <DEDENT> def handle(self, record): <NEW_LINE> <INDENT> self.strings.append(self.format(record)) <NEW_LINE> if len(self.strings) > self.max_entries: <NEW_LINE> <INDENT> self.strings.pop(0)
keeps a buffer of the most recent log entries
625990348c3a8732951f766c
class AbstractLossFunction(nn.Module, ABC): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def forward( self, pos_scores: FloatTensorType, neg_scores: FloatTensorType ) -> FloatTensorType: <NEW_LINE> <INDENT> pass
Calculate weighted loss of scores for positive and negative pairs. The inputs are a 1-D tensor of size P containing scores for positive pairs of entities (i.e., those among which an edge exists) and a P x N tensor containing scores for negative pairs (i.e., where no edge should exist). The pairs of entities corresponding to pos_scores[i] and to neg_scores[i,j] have at least one endpoint in common. The output is the loss value these scores induce. If the method supports weighting (as is the case for the logistic loss) all positive scores will be weighted by the same weight and so will all the negative ones.
6259903466673b3332c31506
class checl_web_url(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(checl_web_url, self).__init__() <NEW_LINE> <DEDENT> def get_url_status(self, checkurl): <NEW_LINE> <INDENT> mess = {"url": checkurl} <NEW_LINE> print(">> get_url_status %s" % checkurl) <NEW_LINE> r = requests.get(checkweburl_url, params=mess) <NEW_LINE> print("http status--------->> %s" % r.status_code) <NEW_LINE> print(r.text) <NEW_LINE> return r.status_code
docstring for checl_web_url
62599034d164cc6175822086
class UnaryOperation(object): <NEW_LINE> <INDENT> def __init__(self, tokens): <NEW_LINE> <INDENT> self.op, self.operands = tokens[0]
takes one operand,e.g. not
62599034be8e80087fbc0192
class User(AbstractUser): <NEW_LINE> <INDENT> id = models.UUIDField( primary_key=True, default=uuid.uuid4, editable=False ) <NEW_LINE> email = models.EmailField( "Email", unique=True ) <NEW_LINE> password_reset_token = models.UUIDField( "Password's reset token", null=True, blank=True, unique=True, help_text="This token will be send to email for reseting password" ) <NEW_LINE> password_reset_token_expiration_datetime = models.DateTimeField( "Password's reset token expiration datetime delta", null=True, blank=True, help_text="Used to expire password's reset token" ) <NEW_LINE> username = None <NEW_LINE> first_name = None <NEW_LINE> last_name = None <NEW_LINE> USERNAME_FIELD = "email" <NEW_LINE> REQUIRED_FIELDS = [] <NEW_LINE> objects = UserManager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.email
This model will be used for both admin and client users
625990346fece00bbacccabd
class marshal_with(object): <NEW_LINE> <INDENT> def __init__( self, fields, envelope=None, skip_none=False, mask=None, ordered=False ): <NEW_LINE> <INDENT> self.fields = fields <NEW_LINE> self.envelope = envelope <NEW_LINE> self.skip_none = skip_none <NEW_LINE> self.ordered = ordered <NEW_LINE> self.mask = Mask(mask, skip=True) <NEW_LINE> <DEDENT> def __call__(self, f): <NEW_LINE> <INDENT> @wraps(f) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> resp = f(*args, **kwargs) <NEW_LINE> mask = self.mask <NEW_LINE> if has_app_context(): <NEW_LINE> <INDENT> mask_header = current_app.config["RESTX_MASK_HEADER"] <NEW_LINE> mask = request.headers.get(mask_header) or mask <NEW_LINE> <DEDENT> if isinstance(resp, tuple): <NEW_LINE> <INDENT> data, code, headers = unpack(resp) <NEW_LINE> return ( marshal( data, self.fields, self.envelope, self.skip_none, mask, self.ordered, ), code, headers, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return marshal( resp, self.fields, self.envelope, self.skip_none, mask, self.ordered ) <NEW_LINE> <DEDENT> <DEDENT> return wrapper
A decorator that apply marshalling to the return values of your methods. >>> from flask_restx import fields, marshal_with >>> mfields = { 'a': fields.Raw } >>> @marshal_with(mfields) ... def get(): ... return { 'a': 100, 'b': 'foo' } ... ... >>> get() OrderedDict([('a', 100)]) >>> @marshal_with(mfields, envelope='data') ... def get(): ... return { 'a': 100, 'b': 'foo' } ... ... >>> get() OrderedDict([('data', OrderedDict([('a', 100)]))]) >>> mfields = { 'a': fields.Raw, 'c': fields.Raw, 'd': fields.Raw } >>> @marshal_with(mfields, skip_none=True) ... def get(): ... return { 'a': 100, 'b': 'foo', 'c': None } ... ... >>> get() OrderedDict([('a', 100)]) see :meth:`flask_restx.marshal`
62599034d99f1b3c44d067b8
class TestDocument(Document): <NEW_LINE> <INDENT> def __init__(self, doc_text): <NEW_LINE> <INDENT> self.sents = self._parse_doc_text(doc_text) <NEW_LINE> self.mentions = self._get_mentions(self.sents) <NEW_LINE> <DEDENT> def to_conll_format(self): <NEW_LINE> <INDENT> self._change_coref_values() <NEW_LINE> lines = [] <NEW_LINE> header = '#begin document ({}); part {}\n'.format(self.file_id, self.part) <NEW_LINE> lines.append(header) <NEW_LINE> for i in range(len(self.sents)): <NEW_LINE> <INDENT> sent = self.sents[i] <NEW_LINE> for row_dict in sent: <NEW_LINE> <INDENT> row_vals = [self.file_id, str(int(self.part)), row_dict['word_num'], row_dict['word'], row_dict['pos'], row_dict['parse'], row_dict['lemma'], '-', row_dict['sense'], '-', row_dict['ne'], '-', row_dict['coref'] ] <NEW_LINE> line = '\t'.join(row_vals) + '\n' <NEW_LINE> lines.append(line) <NEW_LINE> <DEDENT> lines.append('\n') <NEW_LINE> <DEDENT> lines.append('#end document\n') <NEW_LINE> return ''.join(lines) <NEW_LINE> <DEDENT> def _change_coref_values(self): <NEW_LINE> <INDENT> for sent in self.sents: <NEW_LINE> <INDENT> for row_dict in sent: <NEW_LINE> <INDENT> row_dict['coref'] = '-' <NEW_LINE> <DEDENT> <DEDENT> for mention in self.mentions: <NEW_LINE> <INDENT> if mention.start == mention.end - 1: <NEW_LINE> <INDENT> self._set_coref(mention.sent_id, mention.start, mention.label, 'singleton' ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._set_coref(mention.sent_id, mention.start, mention.label, 'start' ) <NEW_LINE> self._set_coref(mention.sent_id, mention.end - 1, mention.label, 'end' ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _set_coref(self, sent_num, word_num, coref_id, pos): <NEW_LINE> <INDENT> row_dict = self.sents[sent_num][word_num] <NEW_LINE> old_coref = row_dict['coref'] <NEW_LINE> new_coref = None <NEW_LINE> if pos == 'singleton': <NEW_LINE> <INDENT> new_coref = '({})'.format(coref_id) <NEW_LINE> <DEDENT> elif pos == 'start': <NEW_LINE> <INDENT> new_coref = '({}'.format(coref_id) <NEW_LINE> <DEDENT> elif pos == 'end': <NEW_LINE> <INDENT> new_coref = '{})'.format(coref_id) <NEW_LINE> <DEDENT> if old_coref == '-': <NEW_LINE> <INDENT> row_dict['coref'] = new_coref <NEW_LINE> <DEDENT> elif pos == 'start' or (pos == 'singleton' and old_coref.startswith('(')): <NEW_LINE> <INDENT> row_dict['coref'] = '{}|{}'.format(old_coref, new_coref) <NEW_LINE> <DEDENT> elif pos == 'end' or (pos == 'singleton' and not old_coref.startswith('(')): <NEW_LINE> <INDENT> row_dict['coref'] = '{}|{}'.format(new_coref, old_coref)
A container for storing mentions extracted from a file in CONLL 2012 format. This Document subclass is used for testing, where it is more efficient to compute MentionPairs on the fly. Attributes: sents: A list of sublists of dictionaries corresponding to rows of the CONLL document. mentions: A list of Mention objects extracted from the given text. file_id: The filepath of the corresponding CONLL file, relative to the 'annotations' subdirectory in the corpus. part: The number (as a string) of this individual document within the CONLL file.
6259903476d4e153a661dafb
class UserDoesNotExistsError(QiitaWareError): <NEW_LINE> <INDENT> pass
Error used when a user does not exist
62599034d6c5a102081e323b
class NotAFileError(Exception): <NEW_LINE> <INDENT> pass
Excepción que indica que no la ruta no es un fichero
62599034287bf620b6272cfd
class Game(object): <NEW_LINE> <INDENT> class Player(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.hp = PLAYER_HP <NEW_LINE> self.mana = PLAYER_MANA <NEW_LINE> self.damage = 0 <NEW_LINE> self.armor = 0 <NEW_LINE> self.mana_spent = 0 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'player' <NEW_LINE> <DEDENT> <DEDENT> class Monster(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.hp = BOSS_STATS[0] <NEW_LINE> self.damage = BOSS_STATS[1] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'monster' <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.winner = None <NEW_LINE> self.player = self.Player() <NEW_LINE> self.monster = self.Monster() <NEW_LINE> self.poison = 0 <NEW_LINE> self.shield = 0 <NEW_LINE> self.recharge = 0 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def makegame(cls, game=None): <NEW_LINE> <INDENT> if game is None: <NEW_LINE> <INDENT> return cls() <NEW_LINE> <DEDENT> new_game = cls() <NEW_LINE> new_game.winner = game.winner <NEW_LINE> new_game.poison = game.poison <NEW_LINE> new_game.shield = game.shield <NEW_LINE> new_game.recharge = game.recharge <NEW_LINE> new_game.player.hp = game.player.hp <NEW_LINE> new_game.player.mana = game.player.mana <NEW_LINE> new_game.player.damage = game.player.damage <NEW_LINE> new_game.player.armor = game.player.armor <NEW_LINE> new_game.player.mana_spent = game.player.mana_spent <NEW_LINE> new_game.monster.hp = game.monster.hp <NEW_LINE> return new_game
Game state object. Represents the state of Game at some instance.
6259903494891a1f408b9f84
class OfferToAgent(OfferAspect): <NEW_LINE> <INDENT> agents = models.ManyToManyField('rea.Agent') <NEW_LINE> reason = models.TextField()
Theoretically offer a unique set of Agents an Offer Aspect based on some kind of activity. For example, if the Agent has had this Offer or a Related Offer in their Quote/Cart before then perhaps offer them a better deal? Note: these are best created with an algorithm. Rule-based Offer Aspects will be better applicable. Combine this with other Offer Aspects such as: OfferDiscount OfferValidUntil To give limited time offers based on
6259903471ff763f4b5e88ad
class Path: <NEW_LINE> <INDENT> def __init__(self, lst: list = []) -> None: <NEW_LINE> <INDENT> self.states = lst <NEW_LINE> <DEDENT> def contains(self, state: State) -> bool: <NEW_LINE> <INDENT> if state in self.states: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def getTerminalState(self) -> State: <NEW_LINE> <INDENT> return self.states[-1] <NEW_LINE> <DEDENT> def add(self, state: State) -> bool: <NEW_LINE> <INDENT> if not self.contains(state): <NEW_LINE> <INDENT> self.states.append(state) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def getLength(self) -> int: <NEW_LINE> <INDENT> return len(self.states) <NEW_LINE> <DEDENT> def getContent(self) -> list: <NEW_LINE> <INDENT> return self.states <NEW_LINE> <DEDENT> def printCurrentPath(self) -> None: <NEW_LINE> <INDENT> if self.getLength() != 0: <NEW_LINE> <INDENT> self.states[0].printCurrentState() <NEW_LINE> for index in range(1, len(self.states)): <NEW_LINE> <INDENT> state = self.states[index] <NEW_LINE> prevState = self.states[index - 1] <NEW_LINE> if index % 2 == 1: <NEW_LINE> <INDENT> print(state.getAction(prevState, 1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(state.getAction(prevState, 0)) <NEW_LINE> <DEDENT> state.printCurrentState() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("The path is empty")
Path class holds a list of all possible paths and adds new states into path.
62599034a4f1c619b294f70d
class UsuarioSchema(ma.Schema): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> fields = ("id", "correo", "clave")
Representa el schema de un usuario
625990341f5feb6acb163d08
class Path(String): <NEW_LINE> <INDENT> def validate(self, value): <NEW_LINE> <INDENT> value = super(Path, self).validate(value) <NEW_LINE> return os.path.abspath(os.path.expanduser(value))
A string representing a filesystem path. It will expand '~' to user home directory and return an absolute path if you provide a relative path (this is usefull if you change the working directory of a process after configuration parsing).
625990341d351010ab8f4c30
class LetterRedirectView(RedirectView): <NEW_LINE> <INDENT> def get_redirect_url(self, year, month, day, slug): <NEW_LINE> <INDENT> return reverse( "letter_detail", kwargs={"year": year, "month": month, "day": day, "slug": slug}, )
To help with redirecting from old /letter/1660/01/01/slug-field.php URLs to the new Letter URLs.
62599034ac7a0e7691f73600
class AddChildMemberApiView(CreateAPIView): <NEW_LINE> <INDENT> serializer_class = AddChildMemberSerializer
Add child API view
625990343eb6a72ae038b77e
class ShuffleInitBlock(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, **kwargs): <NEW_LINE> <INDENT> super(ShuffleInitBlock, self).__init__(**kwargs) <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.conv = nn.Conv2D( channels=out_channels, kernel_size=3, strides=2, padding=1, use_bias=False, in_channels=in_channels) <NEW_LINE> self.bn = nn.BatchNorm(in_channels=out_channels) <NEW_LINE> self.activ = nn.Activation('relu') <NEW_LINE> self.pool = nn.MaxPool2D( pool_size=3, strides=2, padding=1) <NEW_LINE> <DEDENT> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> x = self.conv(x) <NEW_LINE> x = self.bn(x) <NEW_LINE> x = self.activ(x) <NEW_LINE> x = self.pool(x) <NEW_LINE> return x
ShuffleNet specific initial block. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels.
625990345e10d32532ce418e
class ScoreModel(ndb.Model): <NEW_LINE> <INDENT> away_name = ndb.StringProperty(default='') <NEW_LINE> away_score = ndb.IntegerProperty(required=True) <NEW_LINE> game_clock = ndb.StringProperty(default='00:00') <NEW_LINE> game_day = ndb.StringProperty(default='') <NEW_LINE> game_id = ndb.IntegerProperty(required=True) <NEW_LINE> game_status = ndb.StringProperty(default='') <NEW_LINE> game_time = ndb.StringProperty(default='00:00') <NEW_LINE> home_name = ndb.StringProperty(default='') <NEW_LINE> home_score = ndb.IntegerProperty(required=True) <NEW_LINE> week = ndb.IntegerProperty(required=True) <NEW_LINE> year = ndb.IntegerProperty(required=True)
Datastore model for Spread data away_name: away team's name away_score: away team's score game_clock: the amount of time remaining in the game game_day: the day of the week that the game is played on game_id: unique identifier of the game game_status: playing status of the game (final, pregame, currently playing, etc) game_time: the time that the game starts at home_name: home team's name home_score: home team's score week: game week year: season
625990349b70327d1c57fe9c
class RequestConfiguration(Payload): <NEW_LINE> <INDENT> INTENT = "COMMAND"
Configuration request message
625990348e05c05ec3f6f6e6
class Entity(object): <NEW_LINE> <INDENT> __metaclass__ = metaEntity <NEW_LINE> def __init__(self, **kw): <NEW_LINE> <INDENT> self._pyport_new = True <NEW_LINE> self._pyport_data = {} <NEW_LINE> self._pyport_dirty = False <NEW_LINE> self._pyport_dirty_list = {} <NEW_LINE> self._pyport_session = None <NEW_LINE> for name in self._pyport_metadata.columns.keys(): <NEW_LINE> <INDENT> self._pyport_data[name] = None <NEW_LINE> <DEDENT> for name in self._pyport_metadata.relations.keys(): <NEW_LINE> <INDENT> self._pyport_data[name] = None <NEW_LINE> <DEDENT> for name, value in kw.items(): <NEW_LINE> <INDENT> if name not in self._pyport_metadata.columns.keys(): <NEW_LINE> <INDENT> raise error.NoSuchAttributeError('Unknown attribute %s'%name) <NEW_LINE> <DEDENT> setattr(self, name, value) <NEW_LINE> del kw[name] <NEW_LINE> <DEDENT> <DEDENT> def set_dirty(self, dirty=True): <NEW_LINE> <INDENT> self._pyport_dirty = dirty <NEW_LINE> if not dirty: <NEW_LINE> <INDENT> self._pyport_dirty_list = {} <NEW_LINE> <DEDENT> if dirty: <NEW_LINE> <INDENT> self.register_as_dirty() <NEW_LINE> <DEDENT> <DEDENT> def is_dirty(self): <NEW_LINE> <INDENT> return self._pyport_dirty or (self._pyport_dirty_list and 1 or 0) <NEW_LINE> <DEDENT> def register_as_dirty(self): <NEW_LINE> <INDENT> if self._pyport_session and self._pyport_session(): <NEW_LINE> <INDENT> self._pyport_session().add_update(self) <NEW_LINE> <DEDENT> <DEDENT> def _pyport_get_property(self, name): <NEW_LINE> <INDENT> return self._pyport_data[name] <NEW_LINE> <DEDENT> def _pyport_set_property(self, name, value): <NEW_LINE> <INDENT> if self._pyport_data[name] != value: <NEW_LINE> <INDENT> self._pyport_data[name] = value <NEW_LINE> self._pyport_dirty_list[name] = 1 <NEW_LINE> self.register_as_dirty() <NEW_LINE> <DEDENT> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return ( self._pyport_data, self._pyport_dirty_list, self._pyport_dirty, self._pyport_new) <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self._pyport_data = state[0] <NEW_LINE> self._pyport_dirty_list = state[1] <NEW_LINE> self._pyport_dirty = state[2] <NEW_LINE> self._pyport_new = state[3] <NEW_LINE> self._pyport_session = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> class_name = '%s.%s'%(self.__class__.__module__,self.__class__.__name__) <NEW_LINE> args = ['%s=%s'%(key,val) for key, val in self._pyport_data.items()] <NEW_LINE> return '%s(%s)'%(class_name,','.join(args))
Entity base class.
6259903426238365f5fadc6b
class BaseTTSTest(unittest.TestCase): <NEW_LINE> <INDENT> CLS = None <NEW_LINE> SLUG = None <NEW_LINE> INIT_ATTRS = ['enabled'] <NEW_LINE> CONF = {} <NEW_LINE> OBJ_ATTRS = [] <NEW_LINE> EVAL_PLAY = True <NEW_LINE> SKIP_IF_NOT_AVAILABLE = True <NEW_LINE> FILE_TYPE = 'WAVE audio' <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> if not cls.CLS: <NEW_LINE> <INDENT> raise unittest.SkipTest() <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> global LAST_PLAY <NEW_LINE> LAST_PLAY = None <NEW_LINE> if not self.CLS: <NEW_LINE> <INDENT> raise unittest.SkipTest() <NEW_LINE> <DEDENT> <DEDENT> def skip_not_available(self): <NEW_LINE> <INDENT> if self.SKIP_IF_NOT_AVAILABLE and not self.CLS(**self.CONF).is_available(): <NEW_LINE> <INDENT> raise unittest.SkipTest() <NEW_LINE> <DEDENT> <DEDENT> def test_class_init_options(self): <NEW_LINE> <INDENT> cls = self.CLS <NEW_LINE> self.assertEqual(cls.SLUG, self.SLUG) <NEW_LINE> self.assertEqual(sorted(cls.get_init_options().keys()), sorted(self.INIT_ATTRS)) <NEW_LINE> <DEDENT> def test_configure_not_enabled(self): <NEW_LINE> <INDENT> with self.assertRaisesRegexp(TTSError, 'Not enabled'): <NEW_LINE> <INDENT> self.CLS(enabled=False).configure() <NEW_LINE> <DEDENT> <DEDENT> def test_class_instantiation(self): <NEW_LINE> <INDENT> self.skip_not_available() <NEW_LINE> obj = self.CLS(**self.CONF) <NEW_LINE> self.assertEqual(obj.SLUG, self.SLUG) <NEW_LINE> self.assertEqual(obj.is_available(), True) <NEW_LINE> self.assertEqual(sorted(obj.get_options().keys()), sorted(self.OBJ_ATTRS)) <NEW_LINE> <DEDENT> def test_class_configure(self): <NEW_LINE> <INDENT> self.skip_not_available() <NEW_LINE> obj = self.CLS(**self.CONF) <NEW_LINE> language, voice, voiceinfo, options = obj._configure() <NEW_LINE> self.assertEqual(language, 'en') <NEW_LINE> self.assertIsNotNone(voice) <NEW_LINE> self.assertEqual(voice, obj.get_languages()['en']['default']) <NEW_LINE> self.assertEqual(sorted(options.keys()), sorted(self.OBJ_ATTRS)) <NEW_LINE> <DEDENT> def test_class_say(self): <NEW_LINE> <INDENT> self.skip_not_available() <NEW_LINE> obj = self.CLS(**self.CONF) <NEW_LINE> obj.say('Cows go moo') <NEW_LINE> if self.EVAL_PLAY: <NEW_LINE> <INDENT> inst, filename, output = LAST_PLAY <NEW_LINE> self.assertIn(self.FILE_TYPE, output) <NEW_LINE> self.assertEqual(inst, obj) <NEW_LINE> self.assertFalse(isfile(filename), 'Tempfile not deleted')
Tests talkey basic functionality
62599034b830903b9686ed05
class SricErrorTimeout(Exception): <NEW_LINE> <INDENT> pass
Request timed out
62599034c432627299fa410f
class Component(ApplicationSession): <NEW_LINE> <INDENT> @inlineCallbacks <NEW_LINE> def onJoin(self, details): <NEW_LINE> <INDENT> for x in [2, 0, -2]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = yield self.call('com.myapp.sqrt', x) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Error: {} {}".format(e, e.args)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Result: {}".format(res)) <NEW_LINE> <DEDENT> <DEDENT> for name in ['foo', 'a', '*'*11, 'Hello']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = yield self.call('com.myapp.checkname', name) <NEW_LINE> <DEDENT> except ApplicationError as e: <NEW_LINE> <INDENT> print("Error: {} {} {} {}".format(e, e.error, e.args, e.kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Result: {}".format(res)) <NEW_LINE> <DEDENT> <DEDENT> self.define(AppError1) <NEW_LINE> try: <NEW_LINE> <INDENT> yield self.call('com.myapp.compare', 3, 17) <NEW_LINE> <DEDENT> except AppError1 as e: <NEW_LINE> <INDENT> print("Compare Error: {}".format(e)) <NEW_LINE> <DEDENT> self.leave() <NEW_LINE> <DEDENT> def onDisconnect(self): <NEW_LINE> <INDENT> reactor.stop()
Example WAMP application frontend that catches exceptions.
625990343eb6a72ae038b780
class RecipeSchema(Schema): <NEW_LINE> <INDENT> title = fields.String(required=True) <NEW_LINE> ingredients = fields.String(required=True) <NEW_LINE> steps = fields.String(required=True) <NEW_LINE> category_id = fields.Integer(required=True) <NEW_LINE> @validates('title') <NEW_LINE> def validate_recipe_title(self, title): <NEW_LINE> <INDENT> if special_character(title): <NEW_LINE> <INDENT> raise ValidationError('Recipe title should not contain special characters') <NEW_LINE> <DEDENT> elif len(title) <= 3: <NEW_LINE> <INDENT> raise ValidationError('Recipe title should more than 3 characters long') <NEW_LINE> <DEDENT> <DEDENT> @validates('category_id') <NEW_LINE> def validate_category_id(self, category_id): <NEW_LINE> <INDENT> if type(category_id) != int: <NEW_LINE> <INDENT> raise ValidationError('Category id should be an interger') <NEW_LINE> <DEDENT> <DEDENT> @validates('steps') <NEW_LINE> def validate_recipe_steps(self, steps): <NEW_LINE> <INDENT> if len(steps) <= 3: <NEW_LINE> <INDENT> raise ValidationError('We need to cook something more that 3 characters') <NEW_LINE> <DEDENT> <DEDENT> @validates('ingredients') <NEW_LINE> def validate_recipe_ingredients(self, ingredients): <NEW_LINE> <INDENT> if len(ingredients) <= 3: <NEW_LINE> <INDENT> raise ValidationError('We need ingredients that are more that 3 characters')
Schema used for validating Recipes.
6259903430c21e258be99926
class Dataset(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, name, subset): <NEW_LINE> <INDENT> assert subset in self.available_subsets(), self.available_subsets() <NEW_LINE> self.name = name <NEW_LINE> self.subset = subset <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def num_classes(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def num_examples_per_epoch(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def download_message(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def available_subsets(self): <NEW_LINE> <INDENT> return ['train', 'valid'] <NEW_LINE> <DEDENT> def data_files(self): <NEW_LINE> <INDENT> tf_record_pattern = os.path.join(FLAGS.data_dir, '%s-*' % self.subset) <NEW_LINE> print('!!!!!!!!!!!!!!') <NEW_LINE> print(tf_record_pattern) <NEW_LINE> data_files = tf.gfile.Glob(tf_record_pattern) <NEW_LINE> if not data_files: <NEW_LINE> <INDENT> print('No files found for dataset %s/%s at %s' % (self.name, self.subset, FLAGS.data_dir)) <NEW_LINE> self.download_message() <NEW_LINE> exit(-1) <NEW_LINE> <DEDENT> return data_files <NEW_LINE> <DEDENT> def reader(self): <NEW_LINE> <INDENT> return tf.TFRecordReader()
A simple class for handling data sets.
62599034e76e3b2f99fd9b25
class bottleneck_transformation(nn.Module): <NEW_LINE> <INDENT> def __init__(self, inplanes, outplanes, innerplanes, stride=1, dilation=1, group=1, downsample=None, deform=False): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> (str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1, stride) <NEW_LINE> self.stride = stride <NEW_LINE> self.deform = deform <NEW_LINE> if not deform: <NEW_LINE> <INDENT> self.conv1 = nn.Conv2d( inplanes, innerplanes, kernel_size=1, stride=str1x1, bias=False) <NEW_LINE> self.bn1 = mynn.AffineChannel2d(innerplanes) <NEW_LINE> self.conv2 = nn.Conv2d( innerplanes, innerplanes, kernel_size=3, stride=str3x3, bias=False, padding=1 * dilation, dilation=dilation, groups=group) <NEW_LINE> self.bn2 = mynn.AffineChannel2d(innerplanes) <NEW_LINE> self.conv3 = nn.Conv2d( innerplanes, outplanes, kernel_size=1, stride=1, bias=False) <NEW_LINE> self.bn3 = mynn.AffineChannel2d(outplanes) <NEW_LINE> self.downsample = downsample <NEW_LINE> self.relu = nn.ReLU(inplace=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.offsets1 = ConvOffset2D(inplanes) <NEW_LINE> self.conv1 = nn.Conv2d( inplanes, innerplanes, kernel_size=1, stride=str1x1, bias=False) <NEW_LINE> self.bn1 = mynn.AffineChannel2d(innerplanes) <NEW_LINE> self.offsets2 = ConvOffset2D(innerplanes) <NEW_LINE> self.conv2 = nn.Conv2d( innerplanes, innerplanes, kernel_size=3, stride=str3x3, bias=False, padding=1 * dilation, dilation=dilation, groups=group) <NEW_LINE> self.bn2 = mynn.AffineChannel2d(innerplanes) <NEW_LINE> self.offsets3 = ConvOffset2D(innerplanes) <NEW_LINE> self.conv3 = nn.Conv2d( innerplanes, outplanes, kernel_size=1, stride=1, bias=False) <NEW_LINE> self.bn3 = mynn.AffineChannel2d(outplanes) <NEW_LINE> self.downsample = downsample <NEW_LINE> self.relu = nn.ReLU(inplace=True) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> residual = x <NEW_LINE> if not self.deform: <NEW_LINE> <INDENT> out = self.conv1(x) <NEW_LINE> out = self.bn1(out) <NEW_LINE> out = self.relu(out) <NEW_LINE> out = self.conv2(out) <NEW_LINE> out = self.bn2(out) <NEW_LINE> out = self.relu(out) <NEW_LINE> out = self.conv3(out) <NEW_LINE> out = self.bn3(out) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = self.offsets1(x) <NEW_LINE> out = self.conv1(x) <NEW_LINE> out = self.bn1(out) <NEW_LINE> out = self.relu(out) <NEW_LINE> out = self.offsets2(out) <NEW_LINE> out = self.conv2(out) <NEW_LINE> out = self.bn2(out) <NEW_LINE> out = self.relu(out) <NEW_LINE> out = self.offsets3(out) <NEW_LINE> out = self.conv3(out) <NEW_LINE> out = self.bn3(out) <NEW_LINE> <DEDENT> if self.downsample is not None: <NEW_LINE> <INDENT> residual = self.downsample(x) <NEW_LINE> <DEDENT> out += residual <NEW_LINE> out = self.relu(out) <NEW_LINE> return out
Bottleneck Residual Block
62599034d53ae8145f91957d
class BaseGeometry: <NEW_LINE> <INDENT> def area(self): <NEW_LINE> <INDENT> raise Exception("area() is not implemented")
BaseGeometry contains a method area
62599034b830903b9686ed06
class LogMainPage(AppDashboard): <NEW_LINE> <INDENT> TEMPLATE = 'logs/main.html' <NEW_LINE> def get(self): <NEW_LINE> <INDENT> is_cloud_admin = self.helper.is_user_cloud_admin() <NEW_LINE> apps_user_is_admin_on = self.helper.get_owned_apps() <NEW_LINE> if (not is_cloud_admin) and (not apps_user_is_admin_on): <NEW_LINE> <INDENT> self.redirect(StatusPage.PATH, self.response) <NEW_LINE> <DEDENT> query = ndb.gql('SELECT * FROM LoggedService') <NEW_LINE> all_services = [] <NEW_LINE> for entity in query: <NEW_LINE> <INDENT> if entity.key.id() not in all_services: <NEW_LINE> <INDENT> all_services.append(entity.key.id()) <NEW_LINE> <DEDENT> <DEDENT> permitted_services = [] <NEW_LINE> for service in all_services: <NEW_LINE> <INDENT> if is_cloud_admin or service in apps_user_is_admin_on: <NEW_LINE> <INDENT> permitted_services.append(service) <NEW_LINE> <DEDENT> <DEDENT> self.render_page(page='logs', template_file=self.TEMPLATE, values = { 'services' : permitted_services })
Class to handle requests to the /logs page.
6259903426068e7796d4da64
class SuppressCrashReport: <NEW_LINE> <INDENT> old_value = None <NEW_LINE> old_modes = None <NEW_LINE> def __enter__(self): <NEW_LINE> <INDENT> if sys.platform.startswith('win'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import msvcrt <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.old_value = msvcrt.GetErrorMode() <NEW_LINE> msvcrt.SetErrorMode(self.old_value | msvcrt.SEM_NOGPFAULTERRORBOX) <NEW_LINE> if hasattr(msvcrt, 'CrtSetReportMode'): <NEW_LINE> <INDENT> self.old_modes = {} <NEW_LINE> for report_type in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]: <NEW_LINE> <INDENT> old_mode = msvcrt.CrtSetReportMode(report_type, msvcrt.CRTDBG_MODE_FILE) <NEW_LINE> old_file = msvcrt.CrtSetReportFile(report_type, msvcrt.CRTDBG_FILE_STDERR) <NEW_LINE> self.old_modes[report_type] = old_mode, old_file <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import resource <NEW_LINE> self.resource = resource <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> self.resource = None <NEW_LINE> <DEDENT> if self.resource is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.old_value = self.resource.getrlimit(self.resource.RLIMIT_CORE) <NEW_LINE> self.resource.setrlimit(self.resource.RLIMIT_CORE, (0, self.old_value[1])) <NEW_LINE> <DEDENT> except (ValueError, OSError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if sys.platform == 'darwin': <NEW_LINE> <INDENT> import subprocess <NEW_LINE> cmd = ['/usr/bin/defaults', 'read', 'com.apple.CrashReporter', 'DialogType'] <NEW_LINE> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) <NEW_LINE> with proc: <NEW_LINE> <INDENT> stdout = proc.communicate()[0] <NEW_LINE> <DEDENT> if stdout.strip() == b'developer': <NEW_LINE> <INDENT> print("this test triggers the Crash Reporter, " "that is intentional", end='', flush=True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *ignore_exc): <NEW_LINE> <INDENT> if self.old_value is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if sys.platform.startswith('win'): <NEW_LINE> <INDENT> import msvcrt <NEW_LINE> msvcrt.SetErrorMode(self.old_value) <NEW_LINE> if self.old_modes: <NEW_LINE> <INDENT> for report_type, (old_mode, old_file) in self.old_modes.items(): <NEW_LINE> <INDENT> msvcrt.CrtSetReportMode(report_type, old_mode) <NEW_LINE> msvcrt.CrtSetReportFile(report_type, old_file) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.resource is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.resource.setrlimit(self.resource.RLIMIT_CORE, self.old_value) <NEW_LINE> <DEDENT> except (ValueError, OSError): <NEW_LINE> <INDENT> pass
Try to prevent a crash report from popping up. On Windows, don't display the Windows Error Reporting dialog. On UNIX, disable the creation of coredump file.
625990341d351010ab8f4c34
class Verbose(object): <NEW_LINE> <INDENT> async def pinged(self, **kw): <NEW_LINE> <INDENT> await super().pinged(**kw) <NEW_LINE> print("%d bytes from %s: icmp_seq=%d ttl=%d time=%.2f ms" % ( kw['size'], kw['host'], kw['seqNum'], kw['ttl'], kw['delay']*1000))
A mix-in class to print a message when each ping os received
625990344e696a045264e6af
class LoadedFiles(NamedTuple): <NEW_LINE> <INDENT> files: Optional[List[str]] = None <NEW_LINE> file_data: Optional[Dict[str, dict]] = None
A collection of data for files loaded at runtime (or, a continuation of this information loaded from a cache).
62599034c432627299fa4111
class QSVR(SVR): <NEW_LINE> <INDENT> def __init__(self, *args, quantum_kernel: Optional[QuantumKernel] = None, **kwargs): <NEW_LINE> <INDENT> if (len(args)) != 0: <NEW_LINE> <INDENT> msg = ( f"Positional arguments ({args}) are deprecated as of version 0.3.0 and " f"will be removed no sooner than 3 months after the release. Instead use " f"keyword arguments." ) <NEW_LINE> warnings.warn(msg, DeprecationWarning, stacklevel=2) <NEW_LINE> <DEDENT> if "kernel" in kwargs: <NEW_LINE> <INDENT> msg = ( "'kernel' argument is not supported and will be discarded, " "please use 'quantum_kernel' instead." ) <NEW_LINE> warnings.warn(msg, QiskitMachineLearningWarning, stacklevel=2) <NEW_LINE> del kwargs["kernel"] <NEW_LINE> <DEDENT> self._quantum_kernel = quantum_kernel if quantum_kernel else QuantumKernel() <NEW_LINE> super().__init__(kernel=self._quantum_kernel.evaluate, *args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def quantum_kernel(self) -> QuantumKernel: <NEW_LINE> <INDENT> return self._quantum_kernel <NEW_LINE> <DEDENT> @quantum_kernel.setter <NEW_LINE> def quantum_kernel(self, quantum_kernel: QuantumKernel): <NEW_LINE> <INDENT> self._quantum_kernel = quantum_kernel <NEW_LINE> self.kernel = self._quantum_kernel.evaluate <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _get_param_names(cls): <NEW_LINE> <INDENT> names = SVR._get_param_names() <NEW_LINE> names.remove("kernel") <NEW_LINE> return sorted(names + ["quantum_kernel"])
Quantum Support Vector Regressor. This class shows how to use a quantum kernel for regression. The class extends `sklearn.svm.SVR <https://scikit-learn.org/stable/modules/generated/sklearn.svm.SVR.html>`_, and thus inherits its methods like ``fit`` and ``predict`` used in the example below. Read more in the `sklearn user guide <https://scikit-learn.org/stable/modules/svm.html#svm-regression>`_. **Example** .. code-block:: qsvr = QSVR(quantum_kernel=qkernel) qsvr.fit(sample_train,label_train) qsvr.predict(sample_test)
62599034d10714528d69ef19
class NullableInfoNameError(BaseOverhaveSynchronizerException): <NEW_LINE> <INDENT> pass
Exception for situation without feature info name.
625990348e05c05ec3f6f6e8
class WorkflowUsers(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> @property <NEW_LINE> def fullname(self): <NEW_LINE> <INDENT> return f"{self.user.first_name} {self.user.last_name}"
Database RareUser Model
62599034ec188e330fdf99b1
class TractionComposition(TractionField): <NEW_LINE> <INDENT> components = List.T( AbstractTractionField.T(), default=[], help='Ordered list of tractions.') <NEW_LINE> def get_tractions(self, nx, ny, patches=None): <NEW_LINE> <INDENT> npatches = nx * ny <NEW_LINE> tractions = num.zeros((npatches, 3)) <NEW_LINE> for comp in self.components: <NEW_LINE> <INDENT> if comp.operation == 'add': <NEW_LINE> <INDENT> tractions += comp.get_tractions(nx, ny, patches) <NEW_LINE> <DEDENT> elif comp.operation == 'mult': <NEW_LINE> <INDENT> tractions *= comp.get_tractions(nx, ny, patches) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError( 'Component %s has an invalid operation %s.' % (comp, comp.operation)) <NEW_LINE> <DEDENT> <DEDENT> return tractions <NEW_LINE> <DEDENT> def add_component(self, field): <NEW_LINE> <INDENT> logger.debug('Adding traction component.') <NEW_LINE> self.components.append(field)
Composition of traction fields. :py:class:`~pyrocko.gf.tractions.TractionField` and :py:class:`~pyrocko.gf.tractions.AbstractTractionField` can be combined to realize a combination of different fields.
6259903494891a1f408b9f87
class Partial(object): <NEW_LINE> <INDENT> def __init__(self, pycolab_thing, *args, **kwargs): <NEW_LINE> <INDENT> if not issubclass(pycolab_thing, (things.Backdrop, things.Sprite, things.Drape)): <NEW_LINE> <INDENT> raise TypeError('the pycolab_thing argument to ascii_art.Partial must be ' 'a Backdrop, Sprite, or Drape subclass.') <NEW_LINE> <DEDENT> self.pycolab_thing = pycolab_thing <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs
Holds a pycolab "thing" and its extra constructor arguments. In a spirit similar to `functools.partial`, a `Partial` object holds a subclass of one of the pycolab game entities described in `things.py`, along with any "extra" arguments required for its constructor (i.e. those besides the constructor arguments specified by the `things.py` base class constructors). `Partial` instances can be used to pass `Sprite`, `Drape` and `Backdrop` subclasses *and* their necessary "extra" constructor arguments to `ascii_art_to_game`.
62599034be383301e0254930
class ClixxException(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.message)
Exception class used for the Clixx.Py library
6259903473bcbd0ca4bcb3a4
class UDF(object): <NEW_LINE> <INDENT> def __init__(self, name, nargs): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.nargs = nargs <NEW_LINE> self.f = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_agg(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_incremental(self): <NEW_LINE> <INDENT> return False
Wrapper for registering metadata about UDFs. TODO: add setup function/dependencies so that compiler can generate the appropriate import statements and definitions.
62599034b830903b9686ed07
class Reader(object): <NEW_LINE> <INDENT> Variant = namedtuple('RohVariant', ('seq', 'pos', 'state', 'quality')) <NEW_LINE> def __init__(self, filename): <NEW_LINE> <INDENT> self.__filename = filename <NEW_LINE> <DEDENT> def variants(self): <NEW_LINE> <INDENT> with open(self.__filename) as input_file: <NEW_LINE> <INDENT> for line in input_file: <NEW_LINE> <INDENT> if line.startswith('#'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> line = line.rstrip() <NEW_LINE> line_parts = line.split('\t') <NEW_LINE> line_parts[1] = int(line_parts[1]) <NEW_LINE> line_parts[2] = line_parts[2] == '1' <NEW_LINE> yield Reader.Variant(*line_parts)
This class implements a reader for output files produced by bcftools roh.
62599034cad5886f8bdc5909
class Train_suits: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.img = [] <NEW_LINE> self.name = "Placeholder"
Structure to store information about train suit images.
6259903450485f2cf55dc099
class ServerMessages(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.conn = amqp.Connection(host=MsgHost, userid=MsgUserid, password=MsgPassword, virtual_host=MsgVirtualHost, insist=False) <NEW_LINE> self.chan = self.conn.channel() <NEW_LINE> self.chan.queue_declare(queue=Queue, durable=True, exclusive=False, auto_delete=False) <NEW_LINE> self.chan.exchange_declare(exchange=Exchange, type="direct", durable=True, auto_delete=False,) <NEW_LINE> routing_key = WorkerRouting <NEW_LINE> self.chan.queue_bind(queue=Queue, exchange=Exchange, routing_key=routing_key) <NEW_LINE> <DEDENT> def recv_message(self, no_ack=False): <NEW_LINE> <INDENT> msg = self.chan.basic_get(queue=Queue, no_ack=False) <NEW_LINE> if msg: <NEW_LINE> <INDENT> message = msg.body <NEW_LINE> if no_ack: <NEW_LINE> <INDENT> self.delivery_tag = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.delivery_tag = msg.delivery_tag <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> message = None <NEW_LINE> self.delivery_tag = None <NEW_LINE> <DEDENT> return message <NEW_LINE> <DEDENT> def ack_message(self): <NEW_LINE> <INDENT> if self.delivery_tag is None: <NEW_LINE> <INDENT> msg = "Can't ACK as no message read?" <NEW_LINE> raise Exception(msg) <NEW_LINE> <DEDENT> self.chan.basic_ack(self.delivery_tag) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.chan.close() <NEW_LINE> self.conn.close()
Class to read from the server queue. If reading an processing with ACK: c = ServerMessages() while True: msg = c.recv_message() if msg is None: break # prolonged processing here c.ack_message()
6259903423e79379d538d627
class ExecutorServer(object): <NEW_LINE> <INDENT> def __init__(self, executor): <NEW_LINE> <INDENT> self._executor = executor <NEW_LINE> <DEDENT> def run_action(self, rpc_ctx, task_id, action_class_str, attributes, params): <NEW_LINE> <INDENT> LOG.info( "Received RPC request 'run_action'[rpc_ctx=%s," " task_id=%s, action_class=%s, attributes=%s, params=%s]" % (rpc_ctx, task_id, action_class_str, attributes, params) ) <NEW_LINE> self._executor.run_action(task_id, action_class_str, attributes, params)
RPC Executor server.
625990346fece00bbacccac6
class StubConverter(object): <NEW_LINE> <INDENT> def __init__(self, currentTest): <NEW_LINE> <INDENT> self.currentTest = currentTest <NEW_LINE> self.convertCount = 0 <NEW_LINE> <DEDENT> def convert(self): <NEW_LINE> <INDENT> self.convertCount += 1 <NEW_LINE> <DEDENT> def assertConvertCountEquals(self, convertCount): <NEW_LINE> <INDENT> self.currentTest.assertEquals(self.convertCount, convertCount)
A stand-in object for a converter that does nothing
6259903466673b3332c31511
class RHExportSubmissionsCSV(RHExportSubmissionsBase): <NEW_LINE> <INDENT> def _export(self, filename, headers, rows): <NEW_LINE> <INDENT> return send_csv(filename + '.csv', headers, rows)
Export submissions as CSV
62599034e76e3b2f99fd9b29
class MacOSKeyboardLayoutPlugin(PlistFileArtifactPreprocessorPlugin): <NEW_LINE> <INDENT> ARTIFACT_DEFINITION_NAME = 'MacOSKeyboardLayoutPlistFile' <NEW_LINE> _PLIST_KEYS = ['AppleCurrentKeyboardLayoutInputSourceID'] <NEW_LINE> def _ParsePlistKeyValue(self, mediator, name, value): <NEW_LINE> <INDENT> if name in self._PLIST_KEYS: <NEW_LINE> <INDENT> if isinstance(value, (list, tuple)): <NEW_LINE> <INDENT> value = value[0] <NEW_LINE> <DEDENT> _, _, keyboard_layout = value.rpartition('.') <NEW_LINE> mediator.SetValue('keyboard_layout', keyboard_layout)
MacOS keyboard layout plugin.
62599034ec188e330fdf99b3