code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TestIoK8sApiCoreV1FlexPersistentVolumeSource(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testIoK8sApiCoreV1FlexPersistentVolumeSource(self): <NEW_LINE> <INDENT> pass
IoK8sApiCoreV1FlexPersistentVolumeSource unit test stubs
6259904f7b25080760ed871c
class WebServerStack(AppServerStack): <NEW_LINE> <INDENT> HEALTH_ENDPOINT = 'HTTP:443/' <NEW_LINE> STACK_NAME_PREFIX = 'Web' <NEW_LINE> INPUTS = dict(BASE_INPUTS, **{'AppServerAMI': ['global:WebServerAMI']})
Web stack for Cac
6259904f4e696a045264e85f
class BaseMessage: <NEW_LINE> <INDENT> def __init__(self, channel: Optional[str] = None, text: Optional[str] = "", blocks: Optional[Union[List[Block], Block]] = None, attachments: Optional[List[Attachment]] = None, thread_ts: Optional[str] = None, mrkdwn: bool = True): <NEW_LINE> <INDENT> if isinstance(blocks, List): <NEW_LINE> <INDENT> self.blocks = blocks <NEW_LINE> <DEDENT> elif isinstance(blocks, Block): <NEW_LINE> <INDENT> self.blocks = [blocks, ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.blocks = None <NEW_LINE> <DEDENT> self.channel = channel <NEW_LINE> self.text = text <NEW_LINE> self.attachments = attachments or [] <NEW_LINE> self.thread_ts = thread_ts <NEW_LINE> self.mrkdwn = mrkdwn <NEW_LINE> <DEDENT> def _resolve(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> message = dict() <NEW_LINE> if self.channel: <NEW_LINE> <INDENT> message["channel"] = self.channel <NEW_LINE> <DEDENT> message["mrkdwn"] = self.mrkdwn <NEW_LINE> if self.blocks: <NEW_LINE> <INDENT> message["blocks"] = [block._resolve() for block in self.blocks] <NEW_LINE> <DEDENT> if self.attachments: <NEW_LINE> <INDENT> message["attachments"] = [attachment._resolve() for attachment in self.attachments] <NEW_LINE> <DEDENT> if self.thread_ts: <NEW_LINE> <INDENT> message["thread_ts"] = self.thread_ts <NEW_LINE> <DEDENT> if self.text or self.text == "": <NEW_LINE> <INDENT> message["text"] = self.text <NEW_LINE> <DEDENT> return message <NEW_LINE> <DEDENT> def to_dict(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return self._resolve() <NEW_LINE> <DEDENT> def json(self) -> str: <NEW_LINE> <INDENT> return dumps(self._resolve(), indent=4) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return self.json() <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self._resolve()[item] <NEW_LINE> <DEDENT> def keys(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return self._resolve()
Abstract class for shared functionality between Messages and Acknowledgement responses.
6259904f8e7ae83300eea510
class Edge: <NEW_LINE> <INDENT> def __init__(self, promptID, sourceID, targetID, data=None): <NEW_LINE> <INDENT> self.promptID = promptID <NEW_LINE> self.sourceID = sourceID <NEW_LINE> self.targetID = targetID <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> for key in data.keys(): <NEW_LINE> <INDENT> setattr(self, key, data[key]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = "Edge with: " <NEW_LINE> attrs = self.__dict__ <NEW_LINE> for key in attrs.keys(): <NEW_LINE> <INDENT> result += str(key) + ": " + str(attrs[key]) + " " <NEW_LINE> <DEDENT> return result
Graph Edge as defined by Ideagens
6259904fdc8b845886d54a3c
class A(Element): <NEW_LINE> <INDENT> tag = 'a' <NEW_LINE> indent = " " <NEW_LINE> def __init__(self, link, content): <NEW_LINE> <INDENT> super().__init__(content, href=link)
create the a tag for links, with a custom __init__
6259904f8a43f66fc4bf3616
class DateColumn(Column): <NEW_LINE> <INDENT> def __init__(self, *args, format='%Y-%m-%d', **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.format = format <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if isinstance(value, datetime.date): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return datetime.datetime.strptime(value, self.format).date() <NEW_LINE> <DEDENT> def to_string(self, value): <NEW_LINE> <INDENT> return value.strftime(self.format)
A column that contains data in form of Python dates, represented in Python by datetime.date format A strptime()-stype format string.
6259904ff7d966606f7492f7
class Hook: <NEW_LINE> <INDENT> def __init__(self, module): <NEW_LINE> <INDENT> self.module = module <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if attr == 'uniout': <NEW_LINE> <INDENT> if is_py2: <NEW_LINE> <INDENT> import uniout <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return getattr(self.module, attr)
注入uniout模块,实现import时才触发
6259904fb57a9660fecd2efb
class AASTex(Latex): <NEW_LINE> <INDENT> _format_name = 'aastex' <NEW_LINE> _io_registry_format_aliases = ['aastex'] <NEW_LINE> _io_registry_suffix = '' <NEW_LINE> _description = 'AASTeX deluxetable used for AAS journals' <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> Latex.__init__(self, **kwargs) <NEW_LINE> self.header = AASTexHeader() <NEW_LINE> self.data = AASTexData() <NEW_LINE> self.header.comment = '%|' + '|'.join( [r'\\' + command for command in self.ignore_latex_commands]) <NEW_LINE> self.header.splitter = AASTexHeaderSplitter() <NEW_LINE> self.data.splitter = LatexSplitter() <NEW_LINE> self.data.comment = self.header.comment <NEW_LINE> self.data.header = self.header <NEW_LINE> self.header.data = self.data <NEW_LINE> self.latex['tabletype'] = 'deluxetable' <NEW_LINE> self.header.latex = self.latex <NEW_LINE> self.data.latex = self.latex
Write and read AASTeX tables. This class implements some AASTeX specific commands. AASTeX is used for the AAS (American Astronomical Society) publications like ApJ, ApJL and AJ. It derives from the ``Latex`` reader and accepts the same keywords. However, the keywords ``header_start``, ``header_end``, ``data_start`` and ``data_end`` in ``latexdict`` have no effect.
6259904fa8ecb03325872691
class BackgroundAction(Protocol): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> async def __call__(self, conn: Redis, duration: float) -> None: <NEW_LINE> <INDENT> ...
An action to perform with the connection.
6259904f0a50d4780f7067fc
class _Line(object): <NEW_LINE> <INDENT> def __init__(self, subplot, cnvTimeFunc, wdg, **kargs): <NEW_LINE> <INDENT> self.subplot = subplot <NEW_LINE> self._cnvTimeFunc = cnvTimeFunc <NEW_LINE> self._wdg = wdg <NEW_LINE> self._tList = [] <NEW_LINE> self._yList = [] <NEW_LINE> self.line2d = matplotlib.lines.Line2D([], [], animated=True, **kargs) <NEW_LINE> self.subplot.add_line(self.line2d) <NEW_LINE> self.subplot._scwLines.append(self) <NEW_LINE> <DEDENT> def addPoint(self, y, t=None): <NEW_LINE> <INDENT> if y == None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if t == None: <NEW_LINE> <INDENT> t = time.time() <NEW_LINE> <DEDENT> mplDays = self._cnvTimeFunc(t) <NEW_LINE> self._tList.append(mplDays) <NEW_LINE> self._yList.append(y) <NEW_LINE> self._redraw() <NEW_LINE> <DEDENT> def _redraw(self): <NEW_LINE> <INDENT> self.line2d.set_data(self._tList, self._yList) <NEW_LINE> if not self._wdg.winfo_ismapped(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if len(self._yList) > 0: <NEW_LINE> <INDENT> lastY = self._yList[-1] <NEW_LINE> if self.subplot.get_autoscaley_on() and numpy.isfinite(lastY): <NEW_LINE> <INDENT> yMin, yMax = self.subplot.get_ylim() <NEW_LINE> self.line2d.set_data(self._tList, self._yList) <NEW_LINE> if not (yMin <= lastY <= yMax): <NEW_LINE> <INDENT> self.subplot.relim() <NEW_LINE> self.subplot.autoscale_view(scalex=False, scaley=True) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.subplot._scwBackground: <NEW_LINE> <INDENT> canvas = self.subplot.figure.canvas <NEW_LINE> canvas.restore_region(self.subplot._scwBackground) <NEW_LINE> for line in self.subplot._scwLines: <NEW_LINE> <INDENT> self.subplot.draw_artist(line.line2d) <NEW_LINE> <DEDENT> canvas.blit(self.subplot.bbox) <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._tList = [] <NEW_LINE> self._yList = [] <NEW_LINE> self._redraw() <NEW_LINE> <DEDENT> def _purgeOldData(self, minMplDays): <NEW_LINE> <INDENT> if not self._tList: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> numToDitch = bisect.bisect_left(self._tList, minMplDays) - 1 <NEW_LINE> if numToDitch > 0: <NEW_LINE> <INDENT> self._tList = self._tList[numToDitch:] <NEW_LINE> self._yList = self._yList[numToDitch:] <NEW_LINE> self.line2d.set_data(self._tList, self._yList)
A line (trace) on a strip chart representing some varying quantity Attributes that might be useful: - line2d: the matplotlib.lines.Line2D associated with this line - subplot: the matplotlib Subplot instance displaying this line - cnvTimeFunc: a function that takes a POSIX timestamp (e.g. time.time()) and returns matplotlib days; typically an instance of TimeConverter; defaults to TimeConverter(useUTC=False)
6259904f8da39b475be04664
class ImageWithThumbnailsField(ImageField): <NEW_LINE> <INDENT> attr_class = ImageWithThumbnailsFieldFile <NEW_LINE> descriptor_class = FallbackFieldDescriptor <NEW_LINE> def __init__(self, thumbnails=None, fallback_path=None, *args, **kwargs): <NEW_LINE> <INDENT> super(ImageWithThumbnailsField, self).__init__(*args, **kwargs) <NEW_LINE> self.thumbnails = thumbnails or [] <NEW_LINE> self.fallback_path = fallback_path <NEW_LINE> <DEDENT> def get_thumbnail_filename(self, instance, original_file, thumbnail_name, ext): <NEW_LINE> <INDENT> path = os.path.dirname(original_file.name) <NEW_LINE> hash_value, _ = os.path.splitext(os.path.basename(original_file.name)) <NEW_LINE> filename = '%(thumbnail)s.%(hash)s%(ext)s' % { 'path': path, 'thumbnail': thumbnail_name, 'hash': hash_value, 'ext': ext} <NEW_LINE> return os.path.join(path, filename) <NEW_LINE> <DEDENT> def south_field_triple(self): <NEW_LINE> <INDENT> from south.modelsinspector import introspector <NEW_LINE> field_class = "django.db.models.fields.files.ImageField" <NEW_LINE> args, kwargs = introspector(self) <NEW_LINE> return (field_class, args, kwargs)
An ``ImageField`` subclass, extended with zero to many thumbnails.
6259904f462c4b4f79dbce7e
class MedicationProductIngredient(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_name = "MedicationProductIngredient" <NEW_LINE> def __init__(self, jsondict=None, strict=True): <NEW_LINE> <INDENT> self.amount = None <NEW_LINE> self.item = None <NEW_LINE> super(MedicationProductIngredient, self).__init__(jsondict=jsondict, strict=strict) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(MedicationProductIngredient, self).elementProperties() <NEW_LINE> js.extend([ ("amount", "amount", ratio.Ratio, False, None, False), ("item", "item", fhirreference.FHIRReference, False, None, True), ]) <NEW_LINE> return js
Active or inactive ingredient. Identifies a particular constituent of interest in the product.
6259904f76e4537e8c3f0a05
class Routing(object): <NEW_LINE> <INDENT> def __init__(self, topo): <NEW_LINE> <INDENT> self.topo = topo <NEW_LINE> <DEDENT> def get_route(self, src, dst, pkt): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for data center network routing. Routing engines must implement the get_route() method.
6259904f99cbb53fe6832364
class FeatureOfInterestList(Retriever): <NEW_LINE> <INDENT> _ID = 'foi_id' <NEW_LINE> _IDENTIFIER = 'foi_identifier' <NEW_LINE> _NAME = 'foi_name' <NEW_LINE> _DOMAIN = 'foi_domain' <NEW_LINE> _TYPE = 'foi_type' <NEW_LINE> @asyncio.coroutine <NEW_LINE> def before(self, request): <NEW_LINE> <INDENT> request['featureOfInterestList'] = []
Query an SOS to retrieve observation data structured according to the O&M specification.
6259904f24f1403a9268630d
class MatchCriteriaMailgunAllOf(object): <NEW_LINE> <INDENT> openapi_types = { 'type': 'str', 'address': 'str', 'subject': 'str' } <NEW_LINE> attribute_map = { 'type': 'type', 'address': 'address', 'subject': 'subject' } <NEW_LINE> nulls = set() <NEW_LINE> def __init__(self, type='mailgun', address=None, subject='New message from {appName}', local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._type = None <NEW_LINE> self._address = None <NEW_LINE> self._subject = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> self.address = address <NEW_LINE> if subject is not None: <NEW_LINE> <INDENT> self.subject = subject <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def address(self): <NEW_LINE> <INDENT> return self._address <NEW_LINE> <DEDENT> @address.setter <NEW_LINE> def address(self, address): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and address is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `address`, must not be `None`") <NEW_LINE> <DEDENT> self._address = address <NEW_LINE> <DEDENT> @property <NEW_LINE> def subject(self): <NEW_LINE> <INDENT> return self._subject <NEW_LINE> <DEDENT> @subject.setter <NEW_LINE> def subject(self, subject): <NEW_LINE> <INDENT> self._subject = subject <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, MatchCriteriaMailgunAllOf): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, MatchCriteriaMailgunAllOf): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259904fe64d504609df9e0e
class Cache: <NEW_LINE> <INDENT> cache_dir = './.cache/' <NEW_LINE> date_format = '%Y-%m-%d' <NEW_LINE> today = dt.datetime.today() <NEW_LINE> def __init__(self, ticker:str, datatype:str, expiration=today, period=None, suffix='pkl'): <NEW_LINE> <INDENT> self._ticker = ticker <NEW_LINE> self._datatype = datatype <NEW_LINE> self._expiration = expiration <NEW_LINE> self._suffix = suffix <NEW_LINE> self._period = period <NEW_LINE> self._path = None <NEW_LINE> self._build_cache_path() <NEW_LINE> <DEDENT> def set_expiration(self, expiration_str:str): <NEW_LINE> <INDENT> self._expiration = dt.datetime.strptime(expiration_str, self.date_format) <NEW_LINE> self._build_cache_path() <NEW_LINE> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> return self._path <NEW_LINE> <DEDENT> def _get_cache_expiration(self, path=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> path = self._path <NEW_LINE> <DEDENT> return dt.datetime.strptime(Path(path).stem.split('_')[-1], self.date_format) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print('*** Cannot extract date from filename ***') <NEW_LINE> return -1 <NEW_LINE> <DEDENT> <DEDENT> def _build_cache_basename(self): <NEW_LINE> <INDENT> basename = f'{self._ticker}_{self._datatype}' <NEW_LINE> expiration_string = self._expiration.strftime(self.date_format) <NEW_LINE> if self._period == 'annual': <NEW_LINE> <INDENT> basename = basename + '_a' <NEW_LINE> <DEDENT> elif self._period == 'quarter': <NEW_LINE> <INDENT> basename = basename + '_q' <NEW_LINE> <DEDENT> elif self._period is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = f"Period {self._period} should be 'annual' or 'quarter'" <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> basename = basename + f'_{expiration_string}' <NEW_LINE> return basename <NEW_LINE> <DEDENT> def _build_cache_path(self): <NEW_LINE> <INDENT> os.makedirs(self.cache_dir, exist_ok=True) <NEW_LINE> basename = f'{self._build_cache_basename()}.{self._suffix}' <NEW_LINE> self._path = os.path.join(self.cache_dir, basename) <NEW_LINE> <DEDENT> def save_to_cache(self, dataframe): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert self._path is not None, 'Initialize path first' <NEW_LINE> dataframe.to_pickle(self._path) <NEW_LINE> <DEDENT> except AssertionError as error: <NEW_LINE> <INDENT> print(error) <NEW_LINE> <DEDENT> <DEDENT> def load_cache(self): <NEW_LINE> <INDENT> date_pattern = '????-??-??' <NEW_LINE> if self._period == 'annual': <NEW_LINE> <INDENT> pattern = f'{self._ticker}_{self._datatype}_a_{date_pattern}.{self._suffix}' <NEW_LINE> <DEDENT> elif self._period == 'quarter': <NEW_LINE> <INDENT> pattern = f'{self._ticker}_{self._datatype}_q_{date_pattern}.{self._suffix}' <NEW_LINE> <DEDENT> elif self._period is None: <NEW_LINE> <INDENT> pattern = f'{self._ticker}_{self._datatype}_{date_pattern}.{self._suffix}' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = 'self._period should be annual, quarter or None' <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> pattern = os.path.join(self.cache_dir, pattern) <NEW_LINE> matching_caches = glob.glob(pattern) <NEW_LINE> for matching_cache in matching_caches: <NEW_LINE> <INDENT> if self._get_cache_expiration(matching_cache) > dt.datetime.now(): <NEW_LINE> <INDENT> dataframe = pd.read_pickle(matching_cache) <NEW_LINE> return dataframe <NEW_LINE> <DEDENT> <DEDENT> return None
Implementation of caching
6259904fb7558d5895464967
class InlineResponse2004Result(object): <NEW_LINE> <INDENT> swagger_types = { 'data': 'InlineResponse2004ResultData' } <NEW_LINE> attribute_map = { 'data': 'data' } <NEW_LINE> def __init__(self, data=None): <NEW_LINE> <INDENT> self._data = None <NEW_LINE> self.discriminator = None <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InlineResponse2004Result): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259904f3539df3088ecd721
class Application(Group, Task): <NEW_LINE> <INDENT> def __init__(self, command_line): <NEW_LINE> <INDENT> super(Application, self).__init__(parent=None, parser=None) <NEW_LINE> self._args = None <NEW_LINE> self._command_line = command_line <NEW_LINE> self._status = constant.TASK_RUNNING <NEW_LINE> self._result = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def args(self): <NEW_LINE> <INDENT> return self._args <NEW_LINE> <DEDENT> @property <NEW_LINE> def command_line(self): <NEW_LINE> <INDENT> return self._command_line <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @property <NEW_LINE> def result(self): <NEW_LINE> <INDENT> return self._result <NEW_LINE> <DEDENT> def on_task_done(self, task, result): <NEW_LINE> <INDENT> self._result = result <NEW_LINE> self._status = constant.TASK_DONE <NEW_LINE> LOG.debug("Command %(command)s sucessfully run. (Result: %(result)s)", {"command": task.name, "result": result}) <NEW_LINE> <DEDENT> def on_task_fail(self, task, exc): <NEW_LINE> <INDENT> self._result = exc <NEW_LINE> self._status = constant.TASK_FAILED <NEW_LINE> LOG.error("Command %(command)s failed with: %(reason)s", {"command": task.name, "reason": exc}) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _on_task_done(self, result): <NEW_LINE> <INDENT> self.on_task_done(self, result) <NEW_LINE> <DEDENT> def _on_task_fail(self, exc): <NEW_LINE> <INDENT> self.on_task_fail(self, exc) <NEW_LINE> <DEDENT> def _prologue(self): <NEW_LINE> <INDENT> super(Application, self)._prologue() <NEW_LINE> self._args = self._parser.parse_args(self.command_line) <NEW_LINE> <DEDENT> def _work(self): <NEW_LINE> <INDENT> if not self.args: <NEW_LINE> <INDENT> LOG.error("No command line arguments was provided.") <NEW_LINE> return <NEW_LINE> <DEDENT> work_function = getattr(self.args, "work", None) <NEW_LINE> if not work_function: <NEW_LINE> <INDENT> LOG.error("No handle was provided for the required action.") <NEW_LINE> return <NEW_LINE> <DEDENT> work_function()
Contract class for all the command line applications. :ivar: commands: A list which contains (command, parser_name) tuples :: Example: :: class Example(CommandGroup): commands = [ (ExampleOne, "main_parser"), (ExampleTwo, "main_parser"), (ExampleThree, "second_parser"), ] # ...
6259904f29b78933be26ab02
class UserLoginView(APIView): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> username = request.data["email"] <NEW_LINE> password = request.data["password"] <NEW_LINE> if User.objects.filter(username=username, is_active=True).exists() is False: <NEW_LINE> <INDENT> error = { "error": "Sorry, user does not exist" } <NEW_LINE> return Response(error, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> user = authenticate(username=username, password=password) <NEW_LINE> if user: <NEW_LINE> <INDENT> token = Token.objects.get(user_id=user.id) <NEW_LINE> data = { "token": token.key, "name": user.first_name } <NEW_LINE> return Response(data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> error = { "error": "sorry, email or password is incorrect" } <NEW_LINE> return Response(error, status=status.HTTP_403_FORBIDDEN)
returns user token
6259904f73bcbd0ca4bcb709
class TemperatureData(models.Model): <NEW_LINE> <INDENT> temp_type = models.ForeignKey(TemperatureType, on_delete=models.CASCADE) <NEW_LINE> created_at = models.DateTimeField(default=timezone.now) <NEW_LINE> Year = models.IntegerField(null=True, blank=True, default=None) <NEW_LINE> JAN = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> FEB = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> MAR = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> APR = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> MAY = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> JUN = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> JUL = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> AUG = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> SEP = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> OCT = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> NOV = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> DEC = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> WIN = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> SPR = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> SUM = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> AUT = models.FloatField(null=True, blank=True, default=None) <NEW_LINE> ANN = models.FloatField(null=True, blank=True, default=None)
docstring for Country
6259904f23e79379d538d97c
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> for j in range(num_train): <NEW_LINE> <INDENT> dists[i, j] = np.sqrt(((X[i] - self.X_train[j]) ** 2).sum()) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> dists[i, :] = np.linalg.norm(X[i] - self.X_train, axis=1) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> dists = np.sqrt(np.square(X).sum(axis=1).reshape(-1, 1) + np.square(self.X_train).sum(axis=1) - 2 * X.dot(self.X_train.T)) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> k_args = dists[i].argsort()[:k] <NEW_LINE> closest_y = self.y_train[k_args] <NEW_LINE> values, counts = np.unique(closest_y, return_counts=True) <NEW_LINE> y_pred[i] = values[counts.argmax()] <NEW_LINE> <DEDENT> return y_pred
a kNN classifier with L2 distance
6259904fa219f33f346c7c81
class AntColony(object): <NEW_LINE> <INDENT> def __init__(self, strategy, hive, ant_types, create_places, food=4): <NEW_LINE> <INDENT> self.time = 0 <NEW_LINE> self.food = food <NEW_LINE> self.strategy = strategy <NEW_LINE> self.hive = hive <NEW_LINE> self.ant_types = OrderedDict((a.name, a) for a in ant_types) <NEW_LINE> self.configure(hive, create_places) <NEW_LINE> <DEDENT> def configure(self, hive, create_places): <NEW_LINE> <INDENT> self.queen = Place('AntQueen') <NEW_LINE> self.places = OrderedDict() <NEW_LINE> self.bee_entrances = [] <NEW_LINE> def register_place(place, is_bee_entrance): <NEW_LINE> <INDENT> self.places[place.name] = place <NEW_LINE> if is_bee_entrance: <NEW_LINE> <INDENT> place.entrance = hive <NEW_LINE> self.bee_entrances.append(place) <NEW_LINE> <DEDENT> <DEDENT> register_place(self.hive, False) <NEW_LINE> create_places(self.queen, register_place) <NEW_LINE> <DEDENT> def simulate(self): <NEW_LINE> <INDENT> while len(self.queen.bees) == 0 and len(self.bees) > 0: <NEW_LINE> <INDENT> self.hive.strategy(self) <NEW_LINE> self.strategy(self) <NEW_LINE> for ant in self.ants: <NEW_LINE> <INDENT> if ant.armor > 0: <NEW_LINE> <INDENT> ant.action(self) <NEW_LINE> <DEDENT> <DEDENT> for bee in self.bees: <NEW_LINE> <INDENT> if bee.armor > 0: <NEW_LINE> <INDENT> bee.action(self) <NEW_LINE> <DEDENT> <DEDENT> self.time += 1 <NEW_LINE> <DEDENT> if len(self.queen.bees) > 0: <NEW_LINE> <INDENT> print('The ant queen has perished. Please try again.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('All bees are vanquished. You win!') <NEW_LINE> <DEDENT> <DEDENT> def deploy_ant(self, place_name, ant_type_name): <NEW_LINE> <INDENT> constructor = self.ant_types[ant_type_name] <NEW_LINE> if self.food < constructor.food_cost: <NEW_LINE> <INDENT> print('Not enough food remains to place ' + ant_type_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.places[place_name].add_insect(constructor()) <NEW_LINE> self.food -= constructor.food_cost <NEW_LINE> <DEDENT> <DEDENT> def remove_ant(self, place_name): <NEW_LINE> <INDENT> place = self.places[place_name] <NEW_LINE> if place.ant is not None: <NEW_LINE> <INDENT> place.remove_insect(place.ant) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def ants(self): <NEW_LINE> <INDENT> return [p.ant for p in self.places.values() if p.ant is not None] <NEW_LINE> <DEDENT> @property <NEW_LINE> def bees(self): <NEW_LINE> <INDENT> return [b for p in self.places.values() for b in p.bees] <NEW_LINE> <DEDENT> @property <NEW_LINE> def insects(self): <NEW_LINE> <INDENT> return self.ants + self.bees <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> status = ' (Food: {0}, Time: {1})'.format(self.food, self.time) <NEW_LINE> return str([str(i) for i in self.ants + self.bees]) + status
An ant collective that manages global game state and simulates time. Attributes: time -- elapsed time food -- the colony's available food total queen -- the place where the queen resides places -- A list of all places in the colony (including a Hive) bee_entrances -- A list of places that bees can enter
6259904f23849d37ff85253e
@dataclass <NEW_LINE> class ImplementationGuideDependsOn(BackboneElement): <NEW_LINE> <INDENT> resource_type: ClassVar[str] = "ImplementationGuideDependsOn" <NEW_LINE> uri: str = None <NEW_LINE> packageId: Optional[str] = None <NEW_LINE> version: Optional[str] = None
Another Implementation guide this depends on. Another implementation guide that this implementation depends on. Typically, an implementation guide uses value sets, profiles etc.defined in other implementation guides.
6259904f15baa7234946340d
class NewPost(Handler): <NEW_LINE> <INDENT> @login_required <NEW_LINE> def get(self): <NEW_LINE> <INDENT> self.render('newpost.html') <NEW_LINE> <DEDENT> @login_required <NEW_LINE> def post(self): <NEW_LINE> <INDENT> title = self.request.get('title') <NEW_LINE> blogpost = self.request.get('blogpost') <NEW_LINE> author = find_by_name(self.get_cookie('username')) <NEW_LINE> if title and blogpost: <NEW_LINE> <INDENT> p = Post( parent=post_key(), author=author, title=title, blogpost=blogpost ) <NEW_LINE> p.put() <NEW_LINE> return self.redirect('/blog/%s' % str(p.key().id())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error = "Please enter a title and post content." <NEW_LINE> self.render( 'newpost.html', title=title, blogpost=blogpost, error=error )
Handle the new post entry page.
6259904f8a43f66fc4bf3618
class TaskImagesView(generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = ImagesSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> pk = self.kwargs.get('pk') <NEW_LINE> queryset = ImageTask.objects.filter(to_task=pk) <NEW_LINE> return queryset
Вывод списка изображений задания
6259904f8e71fb1e983bcf45
class ShuffleConvExpansion(nn.Module): <NEW_LINE> <INDENT> def __init__(self, d_features, n_channel, n_depth): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.d_features = d_features <NEW_LINE> self.n_channel = n_channel <NEW_LINE> self.n_depth = n_depth <NEW_LINE> self.index = feature_shuffle_index(d_features, depth=self.dim) <NEW_LINE> self.index = torch.tensor(self.index) <NEW_LINE> self.d_features = d_features <NEW_LINE> self.conv = nn.Conv1d(self.n_channel * self.n_depth, self.n_channel * self.n_depth, kernel_size=3, padding=1, groups=self.n_channel * self.n_depth) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = x[:, self.index] <NEW_LINE> x = x.view(-1, self.n_channel * self.n_depth, self.d_features) <NEW_LINE> x = self.conv(x) <NEW_LINE> return x <NEW_LINE> <DEDENT> def initialize_param(self, init, *args): <NEW_LINE> <INDENT> init(self.conv.weight, *args)
expansion 1D -> 2D
6259904f94891a1f408ba135
class TableError(PyLaTeXError): <NEW_LINE> <INDENT> pass
A Base class for all errors concerning tables.
6259904f91af0d3eaad3b2a5
class UpsampleConv(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, stride, padding): <NEW_LINE> <INDENT> super(UpsampleConv, self).__init__() <NEW_LINE> self.upsample = nn.Upsample(scale_factor=2, mode='bilinear') <NEW_LINE> self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.upsample(x) <NEW_LINE> x = self.conv(x) <NEW_LINE> return x
Upsample tensor and then conv
6259904fd53ae8145f9198e3
class DuckduckgoSelScrape(SelScrape): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> SelScrape.__init__(self, *args, **kwargs) <NEW_LINE> self.largest_id = 0 <NEW_LINE> <DEDENT> def _goto_next_page(self): <NEW_LINE> <INDENT> super().page_down() <NEW_LINE> return 'No more results' not in self.html <NEW_LINE> <DEDENT> def wait_until_serp_loaded(self): <NEW_LINE> <INDENT> super()._wait_until_search_input_field_contains_query()
Duckduckgo is a little special since new results are obtained by ajax. next page thus is then to scroll down. Furthermore duckduckgo.com doesn't seem to work with Phantomjs. Maybe they block it, but I don't know how ??! It cannot be the User-Agent, because I already tried this.
6259904f55399d3f0562799b
class InterfacesEdit(View): <NEW_LINE> <INDENT> def get(self, request, pk): <NEW_LINE> <INDENT> data = [i for i in models.Interfaces.objects.filter(id = pk).values()] <NEW_LINE> json_ = return_information(data, 200, "查询成功!") <NEW_LINE> return JsonResponse(json_, json_dumps_params = {'ensure_ascii': False}) <NEW_LINE> <DEDENT> def put(self, request, pk): <NEW_LINE> <INDENT> datas = request.body <NEW_LINE> datas_ = json.loads(datas, encoding = 'utf-8') <NEW_LINE> update_ = {"name": datas_["name"], "tester": datas_["tester"], "desc": datas_["desc"]} <NEW_LINE> models.Interfaces.objects.filter(id = pk).update(**update_) <NEW_LINE> update_data = [i for i in models.Interfaces.objects.filter(id = pk).values()] <NEW_LINE> json_ = return_information(update_data, 201, "修改数据成功") <NEW_LINE> return JsonResponse(json_, json_dumps_params = {'ensure_ascii': False}) <NEW_LINE> <DEDENT> def delete(self, request, pk): <NEW_LINE> <INDENT> models.Interfaces.objects.get(id = pk).delete() <NEW_LINE> json_ = return_information([], 204, "删除成功") <NEW_LINE> return JsonResponse(json_, json_dumps_params = {'ensure_ascii': False})
接口信息的编辑及单条获取
6259904f287bf620b627306b
class Index(webapp2.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.redirect('/grass')
Redirects to grass (default)
6259904f379a373c97d9a4ac
class KRRCTypeA2DualElement(KRRCNonSimplyLacedElement): <NEW_LINE> <INDENT> def epsilon(self, a): <NEW_LINE> <INDENT> if a == self.parent()._cartan_type.special_node(): <NEW_LINE> <INDENT> return self.to_tensor_product_of_kirillov_reshetikhin_tableaux().epsilon(a) <NEW_LINE> <DEDENT> a = self.parent()._rc_index_inverse[a] <NEW_LINE> if not self[a]: <NEW_LINE> <INDENT> epsilon = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> epsilon = -min(0, min(self[a].rigging)) <NEW_LINE> <DEDENT> n = len(self.parent()._rc_index) <NEW_LINE> if a == n-1: <NEW_LINE> <INDENT> epsilon *= 2 <NEW_LINE> <DEDENT> return Integer(epsilon) <NEW_LINE> <DEDENT> def phi(self, a): <NEW_LINE> <INDENT> if a == self.parent()._cartan_type.special_node(): <NEW_LINE> <INDENT> return self.to_tensor_product_of_kirillov_reshetikhin_tableaux().phi(a) <NEW_LINE> <DEDENT> a = self.parent()._rc_index_inverse[a] <NEW_LINE> p_inf = self.parent()._calc_vacancy_number(self, a, float("inf")) <NEW_LINE> if not self[a]: <NEW_LINE> <INDENT> phi = p_inf <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> phi = p_inf - min(0, min(self[a].rigging)) <NEW_LINE> <DEDENT> n = len(self.parent()._rc_index) <NEW_LINE> if a == n-1: <NEW_LINE> <INDENT> phi *= 2 <NEW_LINE> <DEDENT> return Integer(phi) <NEW_LINE> <DEDENT> @cached_method <NEW_LINE> def cocharge(self): <NEW_LINE> <INDENT> cc = ZZ.zero() <NEW_LINE> rigging_sum = ZZ.zero() <NEW_LINE> for a, p in enumerate(self): <NEW_LINE> <INDENT> t_check = 1 <NEW_LINE> for pos, i in enumerate(p._list): <NEW_LINE> <INDENT> rigging_sum += t_check * p.rigging[pos] <NEW_LINE> for dim in self.parent().dims: <NEW_LINE> <INDENT> if dim[0] == a + 1: <NEW_LINE> <INDENT> cc += t_check * min(dim[1], i) <NEW_LINE> <DEDENT> <DEDENT> cc -= t_check * p.vacancy_numbers[pos] <NEW_LINE> <DEDENT> <DEDENT> return cc / ZZ(2) + rigging_sum <NEW_LINE> <DEDENT> cc = cocharge
`U_q^{\prime}(\mathfrak{g})` rigged configurations in type `A_{2n}^{(2)\dagger}`.
6259904fac7a0e7691f7395c
class RPCCoverage(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dir = tempfile.mkdtemp(prefix="coverage") <NEW_LINE> self.flag = '--coveragedir=%s' % self.dir <NEW_LINE> <DEDENT> def report_rpc_coverage(self): <NEW_LINE> <INDENT> uncovered = self._get_uncovered_rpc_commands() <NEW_LINE> if uncovered: <NEW_LINE> <INDENT> print("Uncovered RPC commands:") <NEW_LINE> print("".join((" - %s\n" % i) for i in sorted(uncovered))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("All RPC commands covered.") <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> return shutil.rmtree(self.dir) <NEW_LINE> <DEDENT> def _get_uncovered_rpc_commands(self): <NEW_LINE> <INDENT> reference_filename = 'rpc_interface.txt' <NEW_LINE> coverage_file_prefix = 'coverage.' <NEW_LINE> coverage_ref_filename = os.path.join(self.dir, reference_filename) <NEW_LINE> coverage_filenames = set() <NEW_LINE> all_cmds = set() <NEW_LINE> covered_cmds = set() <NEW_LINE> if not os.path.isfile(coverage_ref_filename): <NEW_LINE> <INDENT> raise RuntimeError("No coverage reference found") <NEW_LINE> <DEDENT> with open(coverage_ref_filename, 'r') as f: <NEW_LINE> <INDENT> all_cmds.update([i.strip() for i in f.readlines()]) <NEW_LINE> <DEDENT> for root, dirs, files in os.walk(self.dir): <NEW_LINE> <INDENT> for filename in files: <NEW_LINE> <INDENT> if filename.startswith(coverage_file_prefix): <NEW_LINE> <INDENT> coverage_filenames.add(os.path.join(root, filename)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for filename in coverage_filenames: <NEW_LINE> <INDENT> with open(filename, 'r') as f: <NEW_LINE> <INDENT> covered_cmds.update([i.strip() for i in f.readlines()]) <NEW_LINE> <DEDENT> <DEDENT> return all_cmds - covered_cmds
Coverage reporting utilities for test_runner. Coverage calculation works by having each test script subprocess write coverage files into a particular directory. These files contain the RPC commands invoked during testing, as well as a complete listing of RPC commands per `blessing-cli help` (`rpc_interface.txt`). After all tests complete, the commands run are combined and diff'd against the complete list to calculate uncovered RPC commands. See also: test/functional/test_framework/coverage.py
6259904f4428ac0f6e6599b4
class MonoGmm(GMM): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.conf.get('mono_gmm', 'name') <NEW_LINE> <DEDENT> @property <NEW_LINE> def trainscript(self): <NEW_LINE> <INDENT> return 'steps/train_mono.sh' <NEW_LINE> <DEDENT> @property <NEW_LINE> def conf_file(self): <NEW_LINE> <INDENT> return 'mono.conf' <NEW_LINE> <DEDENT> @property <NEW_LINE> def parent_gmm_alignments(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> @property <NEW_LINE> def trainops(self): <NEW_LINE> <INDENT> return '--nj %s' % self.conf.get('general', 'num_jobs') <NEW_LINE> <DEDENT> @property <NEW_LINE> def graphopts(self): <NEW_LINE> <INDENT> return '--mono'
a class for the monophone GMM
6259904f82261d6c52730907
class Age(NamedModel): <NEW_LINE> <INDENT> class Meta(NamedModel.Meta): <NEW_LINE> <INDENT> app_label = _app_label <NEW_LINE> db_table = db_table(_app_label, _age) <NEW_LINE> verbose_name = _(_age_verbose) <NEW_LINE> verbose_name_plural = _(pluralize(_age_verbose))
Age demographics model class. Sample name field values may include: - age < 12 - 50 < age < 60
6259904f45492302aabfd955
class MailRead(default_cmds.MuxCommand): <NEW_LINE> <INDENT> key = "read" <NEW_LINE> aliases = [ "+read" ] <NEW_LINE> locks = "cmd:all()" <NEW_LINE> help_category = "Mail" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> if len(self.caller.db.mailsystem) == 0: <NEW_LINE> <INDENT> self.caller.msg("SYSTEM: You don't have any mail!") <NEW_LINE> <DEDENT> elif not self.args.isnumeric(): <NEW_LINE> <INDENT> self.caller.msg("SYSTEM: Index must be a number.") <NEW_LINE> <DEDENT> elif int(self.args) > len(self.caller.db.mailsystem)-1: <NEW_LINE> <INDENT> self.caller.msg("SYSTEM: You don't have that many mails.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mail = self.caller.db.mailsystem[int(self.args)] <NEW_LINE> self.caller.msg(pad(" Mail Sys ", width=80, fillchar="=")) <NEW_LINE> self.caller.msg(pad(mail[1], width=80)) <NEW_LINE> self.caller.msg("{0:39} {1:>39}".format("To: " + self.caller.key, "From: " + mail[0])) <NEW_LINE> self.caller.msg("{0:^80}".format('Sent on ' + str(mail[3].month) + '/' + str(mail[3].day) + '/' + str(mail[3].year))) <NEW_LINE> self.caller.msg(pad('=',width=80,fillchar='=')) <NEW_LINE> evmore.msg(self.caller, mail[2] + "\n")
Read a mail Usage: read #, +read # Read a mail
6259904f76d4e153a661dcb9
class IMedialogMTilesMgalleryLayer(IDefaultBrowserLayer): <NEW_LINE> <INDENT> pass
Marker interface that defines a browser layer.
6259904f29b78933be26ab03
class CheckboxTreeview(ttk.Treeview): <NEW_LINE> <INDENT> def __init__(self, master=None, **kw): <NEW_LINE> <INDENT> ttk.Treeview.__init__(self, master, **kw) <NEW_LINE> self.im_checked = tk.PhotoImage(file='resources/checked.png') <NEW_LINE> self.im_unchecked = tk.PhotoImage(file='resources/unchecked.png') <NEW_LINE> self.tag_configure('unchecked', image=self.im_unchecked) <NEW_LINE> self.tag_configure('checked', image=self.im_checked) <NEW_LINE> self.bind('<Button-1>', self._box_click, True) <NEW_LINE> <DEDENT> def _box_click(self, event): <NEW_LINE> <INDENT> x, y, widget = event.x, event.y, event.widget <NEW_LINE> elem = widget.identify("element", x, y) <NEW_LINE> if "image" in elem: <NEW_LINE> <INDENT> item = self.identify_row(y) <NEW_LINE> self._toggle_state(item) <NEW_LINE> <DEDENT> <DEDENT> def _check_item(self, item, tags): <NEW_LINE> <INDENT> new_tags = [t for t in tags if t != 'unchecked'] + ['checked'] <NEW_LINE> self.item(item, tags=tuple(new_tags)) <NEW_LINE> <DEDENT> def _uncheck_item(self, item, tags): <NEW_LINE> <INDENT> new_tags = [t for t in tags if t != 'checked'] + ['unchecked'] <NEW_LINE> self.item(item, tags=tuple(new_tags)) <NEW_LINE> <DEDENT> def _toggle_state(self, item): <NEW_LINE> <INDENT> tags = self.item(item, 'tags') <NEW_LINE> if 'checked' in tags: <NEW_LINE> <INDENT> self._uncheck_item(item, tags) <NEW_LINE> <DEDENT> elif 'unchecked' in tags: <NEW_LINE> <INDENT> self._check_item(item, tags) <NEW_LINE> <DEDENT> <DEDENT> def check_item(self, item): <NEW_LINE> <INDENT> tags = self.item(item, 'tags') <NEW_LINE> if 'unchecked' in tags: <NEW_LINE> <INDENT> self._check_item(item, tags) <NEW_LINE> <DEDENT> <DEDENT> def uncheck_item(self, item): <NEW_LINE> <INDENT> tags = self.item(item, 'tags') <NEW_LINE> if 'checked' in tags: <NEW_LINE> <INDENT> self._uncheck_item(item, tags)
`ttk.Treeview` widget with checkboxes. The checkboxes are done via the image attribute of the item, so to keep the checkbox, you cannot add an image to the item.
6259904f07d97122c4218126
class Message(collections.namedtuple('Message', ['header', 'data'])): <NEW_LINE> <INDENT> pass
Represents a message to be sent or received. Instead of building this message directly, use the new_message function. Fields: header: instance of Header, represents the header received with each message. data: on MessageType.MSG messages, is the string to be sent to the receiver. On MessageType.CLIST messages, is an array of integers, representing the ID of each client connected to the server. None for all other types of message.
6259904f6fece00bbaccce3c
class DatabaseConfiguration: <NEW_LINE> <INDENT> def __init__(self, engine: str): <NEW_LINE> <INDENT> self._engine = engine <NEW_LINE> <DEDENT> @property <NEW_LINE> def engine(self): <NEW_LINE> <INDENT> return self._engine
Configuration of the database access
6259904f4e696a045264e861
class PerLengthSequenceImpedance(PerLengthImpedance): <NEW_LINE> <INDENT> r: Optional[float] = None <NEW_LINE> x: Optional[float] = None <NEW_LINE> bch: Optional[float] = None <NEW_LINE> gch: Optional[float] = None <NEW_LINE> r0: Optional[float] = None <NEW_LINE> x0: Optional[float] = None <NEW_LINE> b0ch: Optional[float] = None <NEW_LINE> g0ch: Optional[float] = None
Sequence impedance and admittance parameters per unit length, for transposed lines of 1, 2, or 3 phases. For 1-phase lines, define x=x0=xself. For 2-phase lines, define x=xs-xm and x0=xs+xm. Typically, one PerLengthSequenceImpedance is used for many ACLineSegments.
6259904f435de62698e9d280
class Test_Instancing(unittest.TestCase): <NEW_LINE> <INDENT> def test_instancing(self): <NEW_LINE> <INDENT> for i in dir(ac): <NEW_LINE> <INDENT> with self.subTest(i): <NEW_LINE> <INDENT> j = getattr(ac,i) <NEW_LINE> if isinstance(j,type): <NEW_LINE> <INDENT> j() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def test_set_traits(self): <NEW_LINE> <INDENT> for i in dir(ac): <NEW_LINE> <INDENT> j = getattr(ac,i) <NEW_LINE> if isinstance(j,type) and issubclass(j,HasTraits) and ('digest' in j.class_traits().keys()): <NEW_LINE> <INDENT> do = j.class_traits()['digest'].depends_on <NEW_LINE> if do: <NEW_LINE> <INDENT> obj = j() <NEW_LINE> for k in do: <NEW_LINE> <INDENT> with self.subTest(i+'.'+k): <NEW_LINE> <INDENT> if k in j.class_trait_names(): <NEW_LINE> <INDENT> tr = j.class_traits()[k] <NEW_LINE> if tr.is_trait_type(Int): <NEW_LINE> <INDENT> setattr(obj,k,1) <NEW_LINE> <DEDENT> elif tr.is_trait_type(Float): <NEW_LINE> <INDENT> setattr(obj,k,0.1) <NEW_LINE> <DEDENT> elif tr.is_trait_type(Bool): <NEW_LINE> <INDENT> setattr(obj,k,False) <NEW_LINE> <DEDENT> elif tr.is_trait_type(Range): <NEW_LINE> <INDENT> low = tr.handler._low <NEW_LINE> high = tr.handler._high <NEW_LINE> setattr(obj,k,(high+low)/2) <NEW_LINE> <DEDENT> elif tr.is_trait_type(TraitEnum) or tr.is_trait_type(Enum): <NEW_LINE> <INDENT> v = tr.handler.values <NEW_LINE> setattr(obj,k,v[len(v)//2])
Test that ensures that digest of Acoular classes changes correctly on changes of CArray and List attributes.
6259904f30c21e258be99c87
class CancelTrade(Trade): <NEW_LINE> <INDENT> pass
The act of cancelling a `Sell`_, `Donate`_ or `Rent`_.
6259904f63d6d428bbee3c4e
class TagContrastTable(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> tag = MyForeignKey(Tag, blank=True, null=True) <NEW_LINE> cat_name = models.CharField(max_length=32, blank=True, null=True) <NEW_LINE> is_deleted = models.BooleanField(blank=True, default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.cat_name
标签对照表
6259904f8da39b475be04668
class FullPathAction(argparse.Action): <NEW_LINE> <INDENT> def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace, values: Any, option_string: str = None) -> None: <NEW_LINE> <INDENT> setattr(namespace, self.dest, os.path.abspath(str(values)))
An argparse.Action to ensure provided paths are absolute.
6259904fd486a94d0ba2d447
class EventCallbackLog(Resource): <NEW_LINE> <INDENT> type = 'event_callback_logs'
Represents a request and response from single attempt to notify a callback of an event.
6259904f462c4b4f79dbce82
class geometric_mean: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.s=0. <NEW_LINE> self.N=0 <NEW_LINE> self.ret_value = -1 <NEW_LINE> <DEDENT> def step(self, value): <NEW_LINE> <INDENT> if isfloat(value): <NEW_LINE> <INDENT> v=float(value) <NEW_LINE> if not isnan(v): <NEW_LINE> <INDENT> if v<0: <NEW_LINE> <INDENT> self.ret_value=None <NEW_LINE> <DEDENT> elif v==0 and self.ret_value!=None: <NEW_LINE> <INDENT> self.ret_value=0. <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.s+=log(v) <NEW_LINE> <DEDENT> self.N+=1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> if self.N==0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.ret_value != -1: <NEW_LINE> <INDENT> return self.ret_value <NEW_LINE> <DEDENT> return exp(self.s/float(self.N))
Takes the absolute value of the elements and computes the mean. Modeled after scipy.stats.gmean. If x contains any values < 0. return nan, if
6259904fd7e4931a7ef3d4fb
@python_2_unicode_compatible <NEW_LINE> class PreStepSource(models.Model): <NEW_LINE> <INDENT> recipe = models.ForeignKey(Recipe, related_name='prestepsources', on_delete=models.CASCADE) <NEW_LINE> filename = models.CharField(max_length=120, blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.filename
Since we use bash to execute our steps, we can just add some files to be sourced to import variables, functions, etc, before running the step.
6259904f71ff763f4b5e8c2b
class QuitFrame(Frame): <NEW_LINE> <INDENT> def __init__(self, master=None): <NEW_LINE> <INDENT> Frame.__init__(self, master) <NEW_LINE> self.root = master
Build quit frame
6259904f507cdc57c63a6223
class TestTemplateManager(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._manager = phishingpage.TemplateManager() <NEW_LINE> self._template_path = PHISHING_PAGES_DIR <NEW_LINE> <DEDENT> def test_get_templates(self): <NEW_LINE> <INDENT> actual = self._manager.get_templates() <NEW_LINE> if ("connection_reset" and "office365" and "firmware-upgrade") not in actual: <NEW_LINE> <INDENT> self.fail("Failed to get correct templates!") <NEW_LINE> <DEDENT> <DEDENT> def test_find_user_templates(self): <NEW_LINE> <INDENT> name = "new_template" <NEW_LINE> path = self._template_path + name <NEW_LINE> os.makedirs(path) <NEW_LINE> actual = self._manager.find_user_templates() <NEW_LINE> if name not in actual: <NEW_LINE> <INDENT> self.fail("Failed to find a new template!") <NEW_LINE> <DEDENT> os.rmdir(path) <NEW_LINE> <DEDENT> def test_add_user_templates(self): <NEW_LINE> <INDENT> name = "new_template" <NEW_LINE> path = self._template_path + name <NEW_LINE> os.makedirs(path) <NEW_LINE> self._manager.add_user_templates() <NEW_LINE> templates = self._manager.get_templates() <NEW_LINE> if name not in templates: <NEW_LINE> <INDENT> self.fail("Failed to add a new template!") <NEW_LINE> <DEDENT> os.rmdir(path)
Test TemplateManager class.
6259904f435de62698e9d281
class SerViviente: <NEW_LINE> <INDENT> def __init__(self, tipo, nombre, fuerza, arma): <NEW_LINE> <INDENT> self.tipo = tipo <NEW_LINE> self.nombre = nombre <NEW_LINE> self.fuerza = fuerza <NEW_LINE> self.arma = arma <NEW_LINE> self.describir_ser_viviente() <NEW_LINE> <DEDENT> def estado_ser_viviente(self): <NEW_LINE> <INDENT> print("El {} ahora tiene un nivel de fuerza de {}".format(self.tipo, self.fuerza)) <NEW_LINE> if self.fuerza == 0: <NEW_LINE> <INDENT> print("El {} ha muerto!!!!".format(self.tipo)) <NEW_LINE> <DEDENT> <DEDENT> def describir_ser_viviente(self): <NEW_LINE> <INDENT> print("Este es un {} llamado {}. Tiene fuerza nivel {} y posee {}.".format(self.tipo, self.nombre, self.fuerza, self.arma))
Clase para definir tanto héroes como mónstruos en nuestro juego
6259904f3617ad0b5ee075c3
class HeunDeterministic(Integrator): <NEW_LINE> <INDENT> _ui_name = "Heun" <NEW_LINE> def scheme(self, X, dfun, coupling, local_coupling, stimulus): <NEW_LINE> <INDENT> m_dx_tn = dfun(X, coupling, local_coupling) <NEW_LINE> inter = X + self.dt * (m_dx_tn + stimulus) <NEW_LINE> if self.state_variable_boundaries is not None: <NEW_LINE> <INDENT> self.bound_state(inter) <NEW_LINE> <DEDENT> if self.clamped_state_variable_values is not None: <NEW_LINE> <INDENT> self.clamp_state(inter) <NEW_LINE> <DEDENT> dX = (m_dx_tn + dfun(inter, coupling, local_coupling)) * self.dt / 2.0 <NEW_LINE> X_next = X + dX + self.dt * stimulus <NEW_LINE> if self.state_variable_boundaries is not None: <NEW_LINE> <INDENT> self.bound_state(X_next) <NEW_LINE> <DEDENT> if self.clamped_state_variable_values is not None: <NEW_LINE> <INDENT> self.clamp_state(X_next) <NEW_LINE> <DEDENT> return X_next
It is a simple example of a predictor-corrector method. It is also known as modified trapezoidal method, which uses the Euler method as its predictor. And it is also a implicit integration scheme.
6259904fe76e3b2f99fd9e83
class RemovedIn20Warning(SADeprecationWarning): <NEW_LINE> <INDENT> deprecated_since = "1.4" <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return ( super(RemovedIn20Warning, self).__str__() + " (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9)" )
Issued for usage of APIs specifically deprecated in SQLAlchemy 2.0. .. seealso:: :ref:`error_b8d9`. :ref:`deprecation_20_mode`
6259904f097d151d1a2c24f4
class CourseDetailView(DetailView): <NEW_LINE> <INDENT> model = Course <NEW_LINE> context_object_name = u"course" <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> kwargs = super(CourseDetailView, self).get_context_data() <NEW_LINE> kwargs['note_set'] = self.object.note_set.filter(is_hidden=False) <NEW_LINE> kwargs['file_upload_form'] = FileUploadForm() <NEW_LINE> kwargs['note_categories'] = Note.NOTE_CATEGORIES <NEW_LINE> if self.request.user.is_authenticated(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.request.user.get_profile().flagged_courses.get(pk=self.object.pk) <NEW_LINE> kwargs['already_flagged'] = True <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return kwargs
Class-based view for the course html page
6259904f4e696a045264e862
class Person: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.firstname = "John" <NEW_LINE> self.lastname = "Depp" <NEW_LINE> self.eyecolor = "grey" <NEW_LINE> self.age = 42
This class provides information related to the Person
6259904f07f4c71912bb08b9
class MyPandasStandardScaler(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.scaler = StandardScaler(**kwargs) <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> self.cont_cols = get_cont_cols(X) <NEW_LINE> self.scaler.fit(X[self.cont_cols]) <NEW_LINE> return self <NEW_LINE> <DEDENT> def transform(self, X, y=None): <NEW_LINE> <INDENT> X = X.copy() <NEW_LINE> X[self.cont_cols] = self.scaler.transform(X[self.cont_cols]) <NEW_LINE> return X
Transform all numeric, non-categorical variables according to StandardScaler. Return pandas DataFrame
6259904f435de62698e9d282
class OpenBCISample(object): <NEW_LINE> <INDENT> def __init__(self, aux_data=None, board_time=0, channel_data=None, error=None, imp_data=None, packet_type=0, protocol='wifi', sample_number=0, start_byte=0, stop_byte=0, valid=True, accel_data=None): <NEW_LINE> <INDENT> self.aux_data = aux_data if aux_data is not None else [] <NEW_LINE> self.board_time = board_time <NEW_LINE> self.channels_data = channel_data if aux_data is not None else [] <NEW_LINE> self.error = error <NEW_LINE> self.id = sample_number <NEW_LINE> self.imp_data = imp_data if aux_data is not None else [] <NEW_LINE> self.packet_type = packet_type <NEW_LINE> self.protocol = protocol <NEW_LINE> self.sample_number = sample_number <NEW_LINE> self.start_byte = start_byte <NEW_LINE> self.stop_byte = stop_byte <NEW_LINE> self.timestamp = 0 <NEW_LINE> self._timestamps = {} <NEW_LINE> self.valid = valid <NEW_LINE> self.accel_data = accel_data if accel_data is not None else []
Object encapulsating a single sample from the OpenBCI board.
6259904f21a7993f00c673ed
class OrderedDict(collections.OrderedDict): <NEW_LINE> <INDENT> def insert_before(self, existing_key, new_key, new_value): <NEW_LINE> <INDENT> if existing_key not in self or new_key in self: <NEW_LINE> <INDENT> self.update([(new_key, new_value)]) <NEW_LINE> return <NEW_LINE> <DEDENT> new_items = [] <NEW_LINE> for (k, v) in self.items(): <NEW_LINE> <INDENT> if k == existing_key: <NEW_LINE> <INDENT> new_items.append((new_key, new_value)) <NEW_LINE> <DEDENT> new_items.append((k, v)) <NEW_LINE> <DEDENT> self.clear() <NEW_LINE> self.update(new_items) <NEW_LINE> <DEDENT> def insert_after(self, existing_key, new_key, new_value): <NEW_LINE> <INDENT> if existing_key not in self or new_key in self: <NEW_LINE> <INDENT> self.update([(new_key, new_value)]) <NEW_LINE> return <NEW_LINE> <DEDENT> new_items = [] <NEW_LINE> for (k, v) in self.items(): <NEW_LINE> <INDENT> new_items.append((k, v)) <NEW_LINE> if k == existing_key: <NEW_LINE> <INDENT> new_items.append((new_key, new_value)) <NEW_LINE> <DEDENT> <DEDENT> self.clear() <NEW_LINE> self.update(new_items)
Adds more insertion functionality to OrderedDict.
6259904f7d847024c075d858
class HDF5Reporter(_BaseReporter): <NEW_LINE> <INDENT> @property <NEW_LINE> def backend(self): <NEW_LINE> <INDENT> return HDF5TrajectoryFile <NEW_LINE> <DEDENT> def __init__(self, file, reportInterval, coordinates=True, time=True, cell=True, potentialEnergy=True, kineticEnergy=True, temperature=True, velocities=False, atomSubset=None): <NEW_LINE> <INDENT> super(HDF5Reporter, self).__init__(file, reportInterval, coordinates, time, cell, potentialEnergy, kineticEnergy, temperature, velocities, atomSubset)
HDF5Reporter stores a molecular dynamics trajectory in the HDF5 format. This object supports saving all kinds of information from the simulation -- more than any other trajectory format. In addition to all of the options, the topology of the system will also (of course) be stored in the file. All of the information is compressed, so the size of the file is not much different than DCD, despite the added flexibility. Parameters ---------- file : str, or HDF5TrajectoryFile Either an open HDF5TrajecoryFile object to write to, or a string specifying the filename of a new HDF5 file to save the trajectory to. reportInterval : int The interval (in time steps) at which to write frames. coordinates : bool Whether to write the coordinates to the file. time : bool Whether to write the current time to the file. cell : bool Whether to write the current unit cell dimensions to the file. potentialEnergy : bool Whether to write the potential energy to the file. kineticEnergy : bool Whether to write the kinetic energy to the file. temperature : bool Whether to write the instantaneous temperature to the file. velocities : bool Whether to write the velocities to the file. atomSubset : array_like, default=None Only write a subset of the atoms, with these (zero based) indices to the file. If None, *all* of the atoms will be written to disk. Notes ----- If you use the ``atomSubset`` option to write only a subset of the atoms to disk, the ``kineticEnergy``, ``potentialEnergy``, and ``temperature`` fields will not change. They will still refer to the energy and temperature of the *whole* system, and are not "subsetted" to only include the energy of your subsystem. Examples -------- >>> simulation = Simulation(topology, system, integrator) >>> h5_reporter = HDF5Reporter('traj.h5', 100) >>> simulation.reporters.append(h5_reporter) >>> simulation.step(10000) >>> traj = mdtraj.trajectory.load('traj.lh5')
6259904f8da39b475be0466a
class Dataset(object): <NEW_LINE> <INDENT> def __init__(self, ds_name, *views): <NEW_LINE> <INDENT> self.ds_name = ds_name <NEW_LINE> self.views = views <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.views[key] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_cache(cls, ds_name, views): <NEW_LINE> <INDENT> return cls(ds_name, *[View.from_cache(ds_name, view_name) for view_name in views]) <NEW_LINE> <DEDENT> def get_values(self): <NEW_LINE> <INDENT> val_list = [view.view_data for view in self.views] <NEW_LINE> return np.hstack(val_list) <NEW_LINE> <DEDENT> def get_feature_names(self): <NEW_LINE> <INDENT> return [feature_name for view in self.views for feature_name in view.feature_names]
The Dataset object represents a single named dataset with multiple views into it.
6259904f596a897236128ff1
class ScrapeTable(QTableView): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> from visualscrape.ui.viewer.support import ScrapeModel, ScrapeItemDelegate <NEW_LINE> super(ScrapeTable, self).__init__(parent) <NEW_LINE> self.setModel(ScrapeModel()) <NEW_LINE> self.setItemDelegate(ScrapeItemDelegate()) <NEW_LINE> self.resizeRowsToContents() <NEW_LINE> self.resizeColumnsToContents() <NEW_LINE> self.horizontalHeader().setMovable(True)
The table view approach to visualization
6259904fd486a94d0ba2d449
class FabrikBlock(Block): <NEW_LINE> <INDENT> def __init__(self, vertex): <NEW_LINE> <INDENT> super(FabrikBlock, self).__init__(vertex) <NEW_LINE> self._node = vertex <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> string = "<Block vertex=" + str(self.vertex.name) + ", index=" + str(self.index) + ", type=" + self.node.nodeType + ">" <NEW_LINE> return string <NEW_LINE> <DEDENT> @property <NEW_LINE> def node(self): <NEW_LINE> <INDENT> return self._node <NEW_LINE> <DEDENT> @property <NEW_LINE> def flows(self): <NEW_LINE> <INDENT> return (x.flow for x in self._node.feeds) <NEW_LINE> <DEDENT> @property <NEW_LINE> def flowsGoingOut(self): <NEW_LINE> <INDENT> return filter(lambda f: (f.origin.block == self), self.flows) <NEW_LINE> <DEDENT> @property <NEW_LINE> def flowsComingIn(self): <NEW_LINE> <INDENT> return filter(lambda f: (f.dest.block == self), self.flows) <NEW_LINE> <DEDENT> @property <NEW_LINE> def isFlowOrigin(self): <NEW_LINE> <INDENT> if self.flowsGoingOut: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def isFlowDest(self): <NEW_LINE> <INDENT> if self.flowsComingIn: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
A subclass of the Block: a visual representation of a Vertex
6259904f004d5f362081fa2b
class Game(models.Model): <NEW_LINE> <INDENT> SEMS = ( ("S", "Spring"), ("F", "Fall"), ) <NEW_LINE> semester = models.CharField(max_length=1, choices=SEMS) <NEW_LINE> year = models.PositiveIntegerField() <NEW_LINE> start_date = models.DateField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> if self.semester == "S": <NEW_LINE> <INDENT> return "Spring '" + str(self.year)[-2:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Fall '" + str(self.year)[-2:]
Games are the events that tie everything together
6259904f45492302aabfd958
class ExecutionContextValueDoesNotExist(GenUtilsKeyError): <NEW_LINE> <INDENT> message = "Could not get key={key} from ExecutionContext." <NEW_LINE> key = None
Raised when attempting to get a value that does not exist in a backend
6259904f71ff763f4b5e8c2d
class TestException(Exception): <NEW_LINE> <INDENT> pass
Test exception
6259904fe5267d203ee6cd70
class ServiceHttpColonyPlugin(colony.base.system.Plugin): <NEW_LINE> <INDENT> id = "pt.hive.colony.plugins.service.http.colony" <NEW_LINE> name = "Http Service Colony" <NEW_LINE> description = "The plugin that offers the http service colony" <NEW_LINE> version = "1.0.0" <NEW_LINE> author = "Hive Solutions Lda. <[email protected]>" <NEW_LINE> platforms = [ colony.base.system.CPYTHON_ENVIRONMENT, colony.base.system.JYTHON_ENVIRONMENT, colony.base.system.IRON_PYTHON_ENVIRONMENT ] <NEW_LINE> capabilities = [ "http_service_handler" ] <NEW_LINE> capabilities_allowed = [ "http_handler" ] <NEW_LINE> main_modules = [ "service_http.colony.exceptions", "service_http.colony.system" ] <NEW_LINE> service_http_colony = None <NEW_LINE> http_handler_plugins = [] <NEW_LINE> def load_plugin(self): <NEW_LINE> <INDENT> colony.base.system.Plugin.load_plugin(self) <NEW_LINE> import service_http.colony.system <NEW_LINE> self.service_http_colony = service_http.colony.system.ServiceHttpColony(self) <NEW_LINE> <DEDENT> @colony.base.decorators.load_allowed <NEW_LINE> def load_allowed(self, plugin, capability): <NEW_LINE> <INDENT> colony.base.system.Plugin.load_allowed(self, plugin, capability) <NEW_LINE> <DEDENT> @colony.base.decorators.unload_allowed <NEW_LINE> def unload_allowed(self, plugin, capability): <NEW_LINE> <INDENT> colony.base.system.Plugin.unload_allowed(self, plugin, capability) <NEW_LINE> <DEDENT> def get_handler_name(self): <NEW_LINE> <INDENT> return self.service_http_colony.get_handler_name() <NEW_LINE> <DEDENT> def handle_request(self, request): <NEW_LINE> <INDENT> return self.service_http_colony.handle_request(request) <NEW_LINE> <DEDENT> @colony.base.decorators.load_allowed_capability("http_handler") <NEW_LINE> def http_handler_load_allowed(self, plugin, capability): <NEW_LINE> <INDENT> self.http_handler_plugins.append(plugin) <NEW_LINE> self.service_http_colony.http_handler_load(plugin) <NEW_LINE> <DEDENT> @colony.base.decorators.unload_allowed_capability("http_handler") <NEW_LINE> def http_handler_unload_allowed(self, plugin, capability): <NEW_LINE> <INDENT> self.http_handler_plugins.remove(plugin) <NEW_LINE> self.service_http_colony.http_handler_unload(plugin)
The main class for the Http Service Colony plugin.
6259904f7b25080760ed8720
class _RealFFTBasis(_AbstractMicrostructureBasis): <NEW_LINE> <INDENT> def _fftn(self, X): <NEW_LINE> <INDENT> return rfftn(X, axes=self._axes, threads=self._n_jobs) <NEW_LINE> <DEDENT> def _ifftn(self, X): <NEW_LINE> <INDENT> return irfftn(X, axes_shape=self._axes_shape, axes=self._axes, threads=self._n_jobs).real
This class is used to make the bases that create real valued microstructure functions use the real rFFT/irFFT algorithms and selects the appropriate fft module depending on whether or not pyfftw is installed.
6259904fbaa26c4b54d50730
class ComputeAtmosphericProperties(Group): <NEW_LINE> <INDENT> def initialize(self): <NEW_LINE> <INDENT> self.options.declare('num_nodes',default=1,desc="Number of mission analysis points to run") <NEW_LINE> self.options.declare('true_airspeed_in',default=False,desc="Number of mission analysis points to run") <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> nn = self.options['num_nodes'] <NEW_LINE> tas_in = self.options['true_airspeed_in'] <NEW_LINE> self.add_subsystem('temp', TemperatureComp(num_nodes=nn), promotes_inputs=['fltcond|h','fltcond|TempIncrement'], promotes_outputs=['fltcond|T']) <NEW_LINE> self.add_subsystem('pressure',PressureComp(num_nodes=nn), promotes_inputs=['fltcond|h'], promotes_outputs=['fltcond|p']) <NEW_LINE> self.add_subsystem('density',DensityComp(num_nodes=nn), promotes_inputs=['fltcond|p', 'fltcond|T'], promotes_outputs=['fltcond|rho']) <NEW_LINE> self.add_subsystem('speedofsound',SpeedOfSoundComp(num_nodes=nn), promotes_inputs=['fltcond|T'], promotes_outputs=['fltcond|a']) <NEW_LINE> if tas_in: <NEW_LINE> <INDENT> self.add_subsystem('equivair',EquivalentAirspeedComp(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.add_subsystem('trueair',TrueAirspeedComp(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*']) <NEW_LINE> <DEDENT> self.add_subsystem('dynamicpressure',DynamicPressureComp(num_nodes=nn),promotes_inputs=["*"],promotes_outputs=["*"]) <NEW_LINE> self.add_subsystem('machnumber',MachNumberComp(num_nodes=nn),promotes_inputs=["*"],promotes_outputs=["*"])
Computes pressure, density, temperature, dyn pressure, and true airspeed Inputs ------ fltcond|h : float Altitude (vector, km) fltcond|Ueas : float Equivalent airspeed (vector, m/s) fltcond|TempIncrement : float Temperature increment for non-standard day (vector, degC) Outputs ------- fltcond|p : float Pressure (vector, Pa) fltcond|rho : float Density (vector, kg/m3) fltcond|T : float Temperature (vector, K) fltcond|Utrue : float True airspeed (vector, m/s) fltcond|q : float Dynamic pressure (vector, Pa) Options ------- num_nodes : int Number of analysis points to run (sets vec length) (default 1) true_airspeed_in : bool Flip to true if input vector is Utrue, not Ueas. If this is true, fltcond|Utrue will be an input and fltcond|Ueas will be an output.
6259904fd53ae8145f9198e8
class RedisManagementClient: <NEW_LINE> <INDENT> def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: <NEW_LINE> <INDENT> self._config = RedisManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) <NEW_LINE> self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self._serialize.client_side_validation = False <NEW_LINE> self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.redis = RedisOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.firewall_rules = FirewallRulesOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.patch_schedules = PatchSchedulesOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.linked_server = LinkedServerOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> <DEDENT> def _send_request( self, request: HttpRequest, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: <NEW_LINE> <INDENT> request_copy = deepcopy(request) <NEW_LINE> request_copy.url = self._client.format_url(request_copy.url) <NEW_LINE> return self._client.send_request(request_copy, **kwargs) <NEW_LINE> <DEDENT> async def close(self) -> None: <NEW_LINE> <INDENT> await self._client.close() <NEW_LINE> <DEDENT> async def __aenter__(self) -> "RedisManagementClient": <NEW_LINE> <INDENT> await self._client.__aenter__() <NEW_LINE> return self <NEW_LINE> <DEDENT> async def __aexit__(self, *exc_details) -> None: <NEW_LINE> <INDENT> await self._client.__aexit__(*exc_details)
REST API for Azure Redis Cache Service. :ivar operations: Operations operations :vartype operations: azure.mgmt.redis.aio.operations.Operations :ivar redis: RedisOperations operations :vartype redis: azure.mgmt.redis.aio.operations.RedisOperations :ivar firewall_rules: FirewallRulesOperations operations :vartype firewall_rules: azure.mgmt.redis.aio.operations.FirewallRulesOperations :ivar patch_schedules: PatchSchedulesOperations operations :vartype patch_schedules: azure.mgmt.redis.aio.operations.PatchSchedulesOperations :ivar linked_server: LinkedServerOperations operations :vartype linked_server: azure.mgmt.redis.aio.operations.LinkedServerOperations :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations :vartype private_endpoint_connections: azure.mgmt.redis.aio.operations.PrivateEndpointConnectionsOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations :vartype private_link_resources: azure.mgmt.redis.aio.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: Gets subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. :type subscription_id: str :param base_url: Service URL. Default value is 'https://management.azure.com'. :type base_url: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
6259904fdc8b845886d54a44
class TestPipeline(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.pipeline = MyPipeline() <NEW_LINE> self.pipeline.input_image = ["toto", "tutu"] <NEW_LINE> self.pipeline.dynamic_parameter = [3, 1] <NEW_LINE> self.pipeline.other_input = 5 <NEW_LINE> <DEDENT> def test_iterative_pipeline_connection(self): <NEW_LINE> <INDENT> self.pipeline() <NEW_LINE> if sys.version_info >= (2, 7): <NEW_LINE> <INDENT> self.assertIn("toto:5.0:3.0", self.pipeline.output_image) <NEW_LINE> self.assertIn("tutu:5.0:1.0", self.pipeline.output_image) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertTrue("toto:5.0:3.0" in self.pipeline.output_image) <NEW_LINE> self.assertTrue("tutu:5.0:1.0" in self.pipeline.output_image) <NEW_LINE> <DEDENT> self.assertEqual(self.pipeline.other_output, [self.pipeline.other_input, self.pipeline.other_input])
Class to test a pipeline with an iterative node
6259904f15baa72349463412
class CRUUnit(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, strides, group_width, bn_use_global_stats, conv1_params=None, conv2_params=None, **kwargs): <NEW_LINE> <INDENT> super(CRUUnit, self).__init__(**kwargs) <NEW_LINE> assert (strides == 1) or ((conv1_params is None) and (conv2_params is None)) <NEW_LINE> self.resize_input = (in_channels != out_channels) <NEW_LINE> self.resize_identity = (in_channels != out_channels) or (strides != 1) <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> if self.resize_input: <NEW_LINE> <INDENT> self.input_conv = pre_conv1x1_block( in_channels=in_channels, out_channels=out_channels, bn_use_global_stats=bn_use_global_stats) <NEW_LINE> <DEDENT> self.body = CRUBottleneck( in_channels=out_channels, out_channels=out_channels, strides=strides, group_width=group_width, bn_use_global_stats=bn_use_global_stats, conv1_params=conv1_params, conv2_params=conv2_params) <NEW_LINE> if self.resize_identity: <NEW_LINE> <INDENT> self.identity_conv = cru_conv1x1_block( in_channels=in_channels, out_channels=out_channels, strides=strides, bn_use_global_stats=bn_use_global_stats, conv_params=self.input_conv.conv.params) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> if self.resize_identity: <NEW_LINE> <INDENT> identity = self.identity_conv(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> identity = x <NEW_LINE> <DEDENT> if self.resize_input: <NEW_LINE> <INDENT> x = self.input_conv(x) <NEW_LINE> <DEDENT> x = self.body(x) <NEW_LINE> x = x + identity <NEW_LINE> return x
CRU-Net collective residual unit. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. strides : int or tuple/list of 2 int Strides of the convolution. group_width: int Group width parameter. bn_use_global_stats : bool Whether global moving statistics is used instead of local batch-norm for BatchNorm layers. conv1_params : ParameterDict, default None Weights for the convolution layer #1. conv2_params : ParameterDict, default None Weights for the convolution layer #2.
6259904f21a7993f00c673ef
class LinkAggregationStatus_TLV(TLV): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> TLV.__init__(self) <NEW_LINE> self.name = "Port Link Aggregation Enabled" <NEW_LINE> self.field = "switch_port_link_aggregation_enabled" <NEW_LINE> self.value = "%s" % str((data[0] & 0x02) != 0)
LinkAggregationStatus_TLV class
6259904f8e7ae83300eea518
class Polyped(object): <NEW_LINE> <INDENT> def __init__(self,g,d,x,lda,eps=1e-10): <NEW_LINE> <INDENT> self.g = g <NEW_LINE> self.d = d <NEW_LINE> self.x = x <NEW_LINE> self.lda = lda <NEW_LINE> self.fon,self.foff,self.feet,self.events = footfallPattern(d,g,fullOutput=True) <NEW_LINE> <DEDENT> def footPositions(self,t): <NEW_LINE> <INDENT> i = (where(self.events>=t)[0][0]-1)%(len(self.events)) <NEW_LINE> f = array(self.feet[i],dtype=bool) <NEW_LINE> return(self.x[f] - outer( (( (t-self.fon[f])*(t>=self.fon[f]) + (2*pi-self.fon[f]+t)*(t<self.fon[f]) )*self.lda)/self.d[f],Polyped.e0) ) <NEW_LINE> <DEDENT> def support(self,t): <NEW_LINE> <INDENT> y = self.footPositions(t) <NEW_LINE> if y.shape[0]==0: <NEW_LINE> <INDENT> return(nan) <NEW_LINE> <DEDENT> return(MultiPoint(y).convex_hull) <NEW_LINE> <DEDENT> def margin(self,t): <NEW_LINE> <INDENT> h = self.support(t) <NEW_LINE> if ( not isinstance(h,Polygon) and not isinstance(h,Point) and not isinstance(h,LineString) and isnan(h) ): <NEW_LINE> <INDENT> return(-inf) <NEW_LINE> <DEDENT> l = h.boundary <NEW_LINE> return(((-1)**(h.contains(Polyped.origin)+1))*l.distance(Polyped.origin))
Represents the polyped locomotion system. Compute limb configurations, polygons of support and stability margins of a McGhee and Frank parametrized polyped. Requires the relative phases of the limbs, the duty cycle of the limbs, the the touchdown positions of the feet and the stride lengths. TYPICAL USAGE ============= >>> from numpy import array,ones,pi >>> g = 2.0*pi*ones((3))/4.0 >>> d = 2.0*pi*ones((4))*3.0/4.0 >>> x = array([[-1,1],[1,-1],[-1,-1],[1,1]]) >>> lda = 0.7 >>> p = Polyped(g,d,x,lda) >>> p.footPositions(pi) array([[-1.46666667, 1. ], [ 0.76666667, -1. ], [ 0.3 , 1. ]]) @ivar g: relative phases @type g: float, array @ivar d: duty cycle in radians @type d: float, array @ivar x: foot touchdown position @type x: float, array @ivar lda: stride length @type lda: float @ivar fon: foot touch-down phases @type fon: float, array @ivar foff: foot lift-off phases @type foff: float, array @ivar feet: foot contact patterns associated with events @type feet: float, array @ivar events: phases of different event boundaries @type events: float, array @cvar origin: The location of the centre of mass, origin by default @type origin: float, array @cvar e0: direction of movement, should be unit vector @type e0: float, array
6259904ff7d966606f7492fb
class modulation(s.str): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(enum={"am", "fm", "lsb", "usb", "dmr", "dstar"})
Signal modulation type.
6259904fb57a9660fecd2f03
class LazyFailureArcTest(VoCRF): <NEW_LINE> <INDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> super(LazyFailureArcTest, self).__init__(*args, **kw) <NEW_LINE> self.H = len(self.states)*self.A <NEW_LINE> <DEDENT> def test_gradient(self, T): <NEW_LINE> <INDENT> D = 100 <NEW_LINE> sparse = LazyRegularizedAdagrad(D*self.A, C=0, L=2, eta=1.0, fudge=1) <NEW_LINE> sparse.w[:] = np.random.uniform(-1, 1, size=sparse.d) <NEW_LINE> sparse.step = 0 <NEW_LINE> groups = self.group_structure() <NEW_LINE> dense = OnlineProx(groups, self.H, C=0, L=2, eta=1.0, fudge=1) <NEW_LINE> dense.w[:] = np.random.uniform(-1, 1, size=dense.d) <NEW_LINE> sparse_W_copy = np.array(sparse.w, copy=1) <NEW_LINE> dense_W_copy = np.array(dense.w, copy=1) <NEW_LINE> x = MockInstance(T, self.A, D = D, K = 5) <NEW_LINE> S = ScoringModel(x, self.A, self.feature_backoff, sparse, dense) <NEW_LINE> self.gradient(T, x.tags, S) <NEW_LINE> S.backprop() <NEW_LINE> def func(): <NEW_LINE> <INDENT> S = ScoringModel(x, self.A, self.feature_backoff, sparse, dense) <NEW_LINE> return self.objective(T, x.tags, S) <NEW_LINE> <DEDENT> if 0: <NEW_LINE> <INDENT> g = sparse_W_copy - sparse.finalize() <NEW_LINE> sparse.w[:] = sparse_W_copy <NEW_LINE> dense.w[:] = dense_W_copy <NEW_LINE> [keys] = np.nonzero(g) <NEW_LINE> fdcheck(func, sparse.w, g, keys) <NEW_LINE> <DEDENT> g = dense_W_copy - dense.w <NEW_LINE> sparse.w[:] = sparse_W_copy <NEW_LINE> dense.w[:] = dense_W_copy <NEW_LINE> c = fdcheck(func, dense.w, g) <NEW_LINE> assert c.pearson >= 0.999999 <NEW_LINE> assert c.max_err <= 1e-8 <NEW_LINE> assert np.allclose(c.expect, c.got) <NEW_LINE> print('[test gradient]:', colors.light.green % 'pass') <NEW_LINE> <DEDENT> def test_overfitting(self, T, y=None): <NEW_LINE> <INDENT> D = 100 <NEW_LINE> groups = [] <NEW_LINE> dense = OnlineProx(groups, self.H, C=0, L=2, eta=1.0, fudge=1) <NEW_LINE> dense.w[:] = np.random.uniform(-1, 1, size=dense.d) <NEW_LINE> sparse = LazyRegularizedAdagrad(D*self.A, C=0, L=2, eta=1.0, fudge=1) <NEW_LINE> sparse.w[:] = np.random.uniform(-1, 1, size=sparse.d) <NEW_LINE> x = MockInstance(T, self.A, D=D, K=5, y=y) <NEW_LINE> print() <NEW_LINE> for _ in range(10): <NEW_LINE> <INDENT> S = ScoringModel(x, self.A, self.feature_backoff, sparse, dense) <NEW_LINE> self.gradient(T, x.tags, S) <NEW_LINE> S.backprop() <NEW_LINE> y = self.predict(x.N, S) <NEW_LINE> <DEDENT> y = self.predict(x.N, S) <NEW_LINE> assert (y==x.tags).all() <NEW_LINE> print('[test overfitting]', colors.light.green % 'pass')
Test model with the lazy weight updater.
6259904fdd821e528d6da361
class Acoustic2ndOrder1D(fld.Field1D): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if not (hasattr(kwargs['material'], 'd_rho_p') and hasattr(kwargs['material'], 'd_rho2_p')): <NEW_LINE> <INDENT> wn.warn('Material with properties d_rho_p and d_rho2_p is required for 2nd order ' 'nonlinear acoustic simulation.') <NEW_LINE> <DEDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.pressure = fld.FieldComponent(self.num_points) <NEW_LINE> self.velocity = fld.FieldComponent(self.num_points) <NEW_LINE> self.density = fld.FieldComponent(self.num_points) <NEW_LINE> self.a_d_v = None <NEW_LINE> self.a_v_p = None <NEW_LINE> self.a_v_v = None <NEW_LINE> self.a_v_v2 = None <NEW_LINE> self.stat_density = None <NEW_LINE> self.d_rho_p = None <NEW_LINE> self.d_rho2_p = None <NEW_LINE> <DEDENT> def assemble_matrices(self): <NEW_LINE> <INDENT> self.a_d_v = self.d_x(factors=(self.t.increment / self.x.increment * np.ones(self.x.samples))) <NEW_LINE> self.a_v_p = self.d_x(factors=(self.t.increment / self.x.increment) * np.ones(self.x.samples), variant='backward') <NEW_LINE> self.a_v_v = self.d_x2(factors=(self.t.increment / self.x.increment ** 2 * self.material_vector('absorption_coef'))) <NEW_LINE> self.a_v_v2 = self.d_x(factors=(self.t.increment / self.x.increment / 2) * np.ones(self.x.samples), variant='central') <NEW_LINE> self.stat_density = self.material_vector('density') <NEW_LINE> self.d_rho_p = self.material_vector('d_rho_p') <NEW_LINE> self.d_rho2_p = self.material_vector('d_rho2_p') <NEW_LINE> self.matrices_assembled = True <NEW_LINE> <DEDENT> def sim_step(self): <NEW_LINE> <INDENT> self.pressure.apply_bounds(self.step) <NEW_LINE> self.pressure.write_outputs() <NEW_LINE> self.velocity.values -= (self.a_v_p.dot(self.pressure.values) / (self.stat_density + self.density.values) + self.a_v_v2.dot(self.velocity.values) - self.a_v_v.dot(self.velocity.values) / (self.stat_density + self.density.values)) <NEW_LINE> self.velocity.apply_bounds(self.step) <NEW_LINE> self.velocity.write_outputs() <NEW_LINE> self.density.values -= self.a_d_v.dot((self.density.values + self.stat_density) * self.velocity.values) <NEW_LINE> self.density.apply_bounds(self.step) <NEW_LINE> self.density.write_outputs() <NEW_LINE> self.pressure.values = self.d_rho_p * self.density.values + self.d_rho2_p / 2 * self.density.values**2
Class for simulation of one-dimensional nonlinear acoustic fields using second order approximation.
6259904fa8ecb03325872699
class diskstats(File): <NEW_LINE> <INDENT> __keys = ('read__completed', 'read__merged', 'read__sectors', 'read__milliseconds', 'write__completed', 'write__merged', 'write__sectors', 'write__milliseconds', 'io__in_progress', 'io__milliseconds', 'io__weighted_milliseconds') <NEW_LINE> def _parse(self, content): <NEW_LINE> <INDENT> lines = content.splitlines() <NEW_LINE> result = Dict() <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> values = line.split() <NEW_LINE> major, minor, device = values[:3] <NEW_LINE> major, minor = int(major), int(minor) <NEW_LINE> values = map(int, values[3:]) <NEW_LINE> result[device] = Dict(read=Dict(), write=Dict(), io=Dict()) <NEW_LINE> for index, value in enumerate(values): <NEW_LINE> <INDENT> key1, key2 = self.__keys[index].split('__', 1) <NEW_LINE> result[device][key1][key2] = value <NEW_LINE> <DEDENT> <DEDENT> return result
/proc/diskstats
6259904f91af0d3eaad3b2ab
class Rule: <NEW_LINE> <INDENT> def __init__(self, target_state: str, signal: str, action: Callable, action_takes_parameter: bool): <NEW_LINE> <INDENT> self.target_state = target_state <NEW_LINE> self.signal = signal <NEW_LINE> self.action = action <NEW_LINE> self.action_takes_parameter = action_takes_parameter <NEW_LINE> <DEDENT> def is_me(self, inpt): <NEW_LINE> <INDENT> return not re.match(self.signal, inpt) is None
Rule class
6259904fd7e4931a7ef3d4ff
class TestGetBrowserRedirectUrl(unittest.TestCase): <NEW_LINE> <INDENT> def test_valid_url_single_scope(self): <NEW_LINE> <INDENT> redirect_url = pypco.get_browser_redirect_url( 'abc123', 'https://nowhere.com?someurl', ['people'] ) <NEW_LINE> self.assertEqual( "https://api.planningcenteronline.com/oauth/authorize?" "client_id=abc123&redirect_uri=https%3A%2F%2Fnowhere.com%3Fsomeurl&" "response_type=code&scope=people", redirect_url ) <NEW_LINE> <DEDENT> def test_valid_url_multiple_scopes(self): <NEW_LINE> <INDENT> redirect_url = pypco.get_browser_redirect_url( 'abc123', 'https://nowhere.com?someurl', ['people', 'giving'] ) <NEW_LINE> self.assertEqual( "https://api.planningcenteronline.com/oauth/authorize?" "client_id=abc123&redirect_uri=https%3A%2F%2Fnowhere.com%3Fsomeurl&" "response_type=code&scope=people+giving", redirect_url ) <NEW_LINE> <DEDENT> def test_bad_scope(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> pypco.get_browser_redirect_url( 'abc123', 'https://nowhere.com?someurl', ['people', 'giving', 'bogus'] )
Test pypco functionality for getting browser redirect URL.
6259904fd53ae8145f9198e9
class SnowflakeQuery(Task): <NEW_LINE> <INDENT> def __init__( self, account: str, user: str, password: str, database: str = None, schema: str = None, role: str = None, warehouse: str = None, query: str = None, data: tuple = None, autocommit: bool = None, **kwargs ): <NEW_LINE> <INDENT> self.account = account <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> self.database = database <NEW_LINE> self.schema = schema <NEW_LINE> self.role = role <NEW_LINE> self.warehouse = warehouse <NEW_LINE> self.query = query <NEW_LINE> self.data = data <NEW_LINE> self.autocommit = autocommit <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> @defaults_from_attrs("query", "data", "autocommit") <NEW_LINE> def run(self, query: str = None, data: tuple = None, autocommit: bool = None): <NEW_LINE> <INDENT> if not query: <NEW_LINE> <INDENT> raise ValueError("A query string must be provided") <NEW_LINE> <DEDENT> connect_params = { "account": self.account, "user": self.user, "password": self.password, "database": self.database, "schema": self.schema, "role": self.role, "warehouse": self.warehouse, "autocommit": self.autocommit, } <NEW_LINE> connect_params = { param: value for (param, value) in connect_params.items() if value is not None } <NEW_LINE> conn = sf.connect(**connect_params) <NEW_LINE> try: <NEW_LINE> <INDENT> with conn: <NEW_LINE> <INDENT> with conn.cursor() as cursor: <NEW_LINE> <INDENT> executed = cursor.execute(query, params=data) <NEW_LINE> <DEDENT> <DEDENT> conn.close() <NEW_LINE> return executed <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> conn.close() <NEW_LINE> raise error
Task for executing a query against a snowflake database. Args: - account (str): snowflake account name, see snowflake connector package documentation for details - user (str): user name used to authenticate - password (str): password used to authenticate - database (str, optional): name of the default database to use - schema (int, optional): name of the default schema to use - role (str, optional): name of the default role to use - warehouse (str, optional): name of the default warehouse to use - query (str, optional): query to execute against database - data (tuple, optional): values to use in query, must be specified using placeholder is query string - autocommit (bool, optional): set to True to autocommit, defaults to None, which takes snowflake AUTOCOMMIT parameter - **kwargs (dict, optional): additional keyword arguments to pass to the Task constructor
6259904f23e79379d538d983
class ProductSeriesVocabulary(SQLObjectVocabularyBase): <NEW_LINE> <INDENT> implements(IHugeVocabulary) <NEW_LINE> displayname = 'Select a Release Series' <NEW_LINE> step_title = 'Search' <NEW_LINE> _table = ProductSeries <NEW_LINE> _order_by = [Product.name, ProductSeries.name] <NEW_LINE> _clauseTables = ['Product'] <NEW_LINE> def toTerm(self, obj): <NEW_LINE> <INDENT> token = '%s/%s' % (obj.product.name, obj.name) <NEW_LINE> return SimpleTerm( obj, token, '%s %s' % (obj.product.name, obj.name)) <NEW_LINE> <DEDENT> def getTermByToken(self, token): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> productname, productseriesname = token.split('/', 1) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise LookupError(token) <NEW_LINE> <DEDENT> result = IStore(self._table).find( self._table, ProductSeries.product == Product.id, Product.name == productname, ProductSeries.name == productseriesname).one() <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> return self.toTerm(result) <NEW_LINE> <DEDENT> raise LookupError(token) <NEW_LINE> <DEDENT> def search(self, query, vocab_filter=None): <NEW_LINE> <INDENT> if not query: <NEW_LINE> <INDENT> return self.emptySelectResults() <NEW_LINE> <DEDENT> user = getUtility(ILaunchBag).user <NEW_LINE> privacy_filter = ProductSet.getProductPrivacyFilter(user) <NEW_LINE> query = ensure_unicode(query).lower().strip('/') <NEW_LINE> if '/' in query: <NEW_LINE> <INDENT> product_query, series_query = query.split('/', 1) <NEW_LINE> substring_search = And( CONTAINSSTRING(Product.name, product_query), CONTAINSSTRING(ProductSeries.name, series_query)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> substring_search = Or( CONTAINSSTRING(Product.name, query), CONTAINSSTRING(ProductSeries.name, query)) <NEW_LINE> <DEDENT> result = IStore(self._table).find( self._table, Product.id == ProductSeries.productID, substring_search, privacy_filter) <NEW_LINE> result = result.order_by(self._order_by) <NEW_LINE> return result
All `IProductSeries` objects vocabulary.
6259904f507cdc57c63a6227
class Review(core_models.TimeStampedModel): <NEW_LINE> <INDENT> review = models.TextField() <NEW_LINE> accuracy = models.IntegerField() <NEW_LINE> cleanliness = models.IntegerField() <NEW_LINE> communication = models.IntegerField() <NEW_LINE> location = models.IntegerField() <NEW_LINE> check_in = models.IntegerField() <NEW_LINE> value = models.IntegerField() <NEW_LINE> user = models.ForeignKey("users.User", related_name="reviews", on_delete=models.CASCADE) <NEW_LINE> room = models.ForeignKey("rooms.Room", related_name="reviews", on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.review} - {self.room}' <NEW_LINE> <DEDENT> def rating_average(self): <NEW_LINE> <INDENT> avg = ( self.accuracy + self.cleanliness + self.communication + self.location + self.check_in + self.value ) / 6 <NEW_LINE> return round(avg, 2) <NEW_LINE> <DEDENT> rating_average.short_description = "AVG."
Review Model Definition
6259904f3c8af77a43b68981
class Protocol(serial.threaded.Protocol): <NEW_LINE> <INDENT> def data_received(self, data): <NEW_LINE> <INDENT> self.parser(data)
Serial protocol.
6259904f26068e7796d4ddcb
class PollingEmitter(EventEmitter): <NEW_LINE> <INDENT> def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): <NEW_LINE> <INDENT> EventEmitter.__init__(self, event_queue, watch, timeout) <NEW_LINE> self._snapshot = DirectorySnapshot(watch.path, watch.is_recursive) <NEW_LINE> self._lock = threading.Lock() <NEW_LINE> <DEDENT> def on_thread_stop(self): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> self._snapshot = None <NEW_LINE> <DEDENT> <DEDENT> def queue_events(self, timeout): <NEW_LINE> <INDENT> time.sleep(timeout) <NEW_LINE> with self._lock: <NEW_LINE> <INDENT> if not self._snapshot: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> new_snapshot = DirectorySnapshot(self.watch.path, self.watch.is_recursive) <NEW_LINE> events = DirectorySnapshotDiff(self._snapshot, new_snapshot) <NEW_LINE> self._snapshot = new_snapshot <NEW_LINE> for src_path in events.files_deleted: <NEW_LINE> <INDENT> self.queue_event(FileDeletedEvent(src_path)) <NEW_LINE> <DEDENT> for src_path in events.files_modified: <NEW_LINE> <INDENT> self.queue_event(FileModifiedEvent(src_path)) <NEW_LINE> <DEDENT> for src_path in events.files_created: <NEW_LINE> <INDENT> self.queue_event(FileCreatedEvent(src_path)) <NEW_LINE> <DEDENT> for src_path, dest_path in events.files_moved: <NEW_LINE> <INDENT> self.queue_event(FileMovedEvent(src_path, dest_path)) <NEW_LINE> <DEDENT> for src_path in events.dirs_deleted: <NEW_LINE> <INDENT> self.queue_event(DirDeletedEvent(src_path)) <NEW_LINE> <DEDENT> for src_path in events.dirs_modified: <NEW_LINE> <INDENT> self.queue_event(DirModifiedEvent(src_path)) <NEW_LINE> <DEDENT> for src_path in events.dirs_created: <NEW_LINE> <INDENT> self.queue_event(DirCreatedEvent(src_path)) <NEW_LINE> <DEDENT> for src_path, dest_path in events.dirs_moved: <NEW_LINE> <INDENT> self.queue_event(DirMovedEvent(src_path, dest_path))
Platform-independent emitter that polls a directory to detect file system changes.
6259904f07f4c71912bb08bc
class StdInputHandler(EventHandler): <NEW_LINE> <INDENT> def __init__(self, worker): <NEW_LINE> <INDENT> EventHandler.__init__(self) <NEW_LINE> self.master_worker = worker <NEW_LINE> <DEDENT> def ev_msg(self, port, msg): <NEW_LINE> <INDENT> if not msg: <NEW_LINE> <INDENT> self.master_worker.set_write_eof() <NEW_LINE> return <NEW_LINE> <DEDENT> self.master_worker.write(msg)
Standard input event handler class.
6259904fb57a9660fecd2f04
class load(ao2mo.load): <NEW_LINE> <INDENT> def __init__(self, eri, dataname='j3c'): <NEW_LINE> <INDENT> ao2mo.load.__init__(self, eri, dataname)
load 3c2e integrals from hdf5 file. It can be used in the context manager: with load(cderifile) as eri: print(eri.shape)
6259904f3eb6a72ae038bae3
class GameForms(messages.Message): <NEW_LINE> <INDENT> items = messages.MessageField(GameForm2, 1, repeated=True)
Return multiple GameForms
6259904f7cff6e4e811b6ec3
@admin.register(models.Photo) <NEW_LINE> class PhotoAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ( "__str__", "get_thumbnail", ) <NEW_LINE> def get_thumbnail(self, obj): <NEW_LINE> <INDENT> return mark_safe(f'<img height="50px" src="{obj.file.url}"/>') <NEW_LINE> <DEDENT> get_thumbnail.short_description = "Thumbnail"
Photo Admin Definition
6259904f435de62698e9d286
class ShibbolethUserBackend(ModelBackend): <NEW_LINE> <INDENT> create_unknown_user = True <NEW_LINE> def authenticate(self, shib_meta): <NEW_LINE> <INDENT> if not shib_meta: return <NEW_LINE> user = None <NEW_LINE> username = self.clean_username(shib_meta['eppn']) <NEW_LINE> UserModel = get_user_model() <NEW_LINE> if self.create_unknown_user: <NEW_LINE> <INDENT> user, created = UserModel.objects.get_or_create(**{ UserModel.USERNAME_FIELD: username, }) <NEW_LINE> if created: <NEW_LINE> <INDENT> user = self.configure_user(user, shib_meta) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = UserModel.objects.get_by_natural_key(username) <NEW_LINE> <DEDENT> except UserModel.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return user <NEW_LINE> <DEDENT> def clean_username(self, username): <NEW_LINE> <INDENT> return username <NEW_LINE> <DEDENT> def configure_user(self, user, shib_meta): <NEW_LINE> <INDENT> user.__setattr__('first_name', shib_meta['givenName'].split(";")[0]) <NEW_LINE> user.__setattr__('last_name', shib_meta['sn'].split(";")[0]) <NEW_LINE> user.__setattr__('email', shib_meta['email'].split(";")[0]) <NEW_LINE> user.save() <NEW_LINE> return user
This backend is to be used in conjunction with the ``ShibbolethUserMiddleware`` found in the middleware module of this package, and is used when the server is handling authentication outside of Django. By default, the ``authenticate`` method creates ``User`` objects for usernames that don't already exist in the database. Subclasses can disable this behavior by setting the ``create_unknown_user`` attribute to ``False``.
6259904f8a43f66fc4bf3620
class AddBandwidthPackageResourcesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ResourceIds = None <NEW_LINE> self.BandwidthPackageId = None <NEW_LINE> self.NetworkType = None <NEW_LINE> self.ResourceType = None <NEW_LINE> self.Protocol = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ResourceIds = params.get("ResourceIds") <NEW_LINE> self.BandwidthPackageId = params.get("BandwidthPackageId") <NEW_LINE> self.NetworkType = params.get("NetworkType") <NEW_LINE> self.ResourceType = params.get("ResourceType") <NEW_LINE> self.Protocol = params.get("Protocol")
AddBandwidthPackageResources request structure.
6259904f15baa72349463414
class SiteGenericMethod(BW2RegionalizationError): <NEW_LINE> <INDENT> pass
This ``Method`` doesn't have links to ``geocollections``, making it site-generic.
6259904fb7558d589546496b
class Settings(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 800 <NEW_LINE> self.screen_height = 600 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> """飞船设置""" <NEW_LINE> self.ship_speed_factor = 1.0 <NEW_LINE> self.ship_limit = 3 <NEW_LINE> """子弹设置""" <NEW_LINE> self.bullet_speed_factor = 1 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = 60, 60, 60 <NEW_LINE> self.bullets_allowed = 7 <NEW_LINE> """外星人设置""" <NEW_LINE> self.alien_speed_factor = 1 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.fleet_direction = 1
"存储 所有设置信息的类
6259904f63d6d428bbee3c54
class ISL29125IROZ(PointHandler): <NEW_LINE> <INDENT> ADDRESS = 0x44 <NEW_LINE> REG_ID = 0x00 <NEW_LINE> REG_RESET = 0x00 <NEW_LINE> REG_CONFIG_1 = 0x01 <NEW_LINE> REG_CONFIG_2 = 0x02 <NEW_LINE> REG_CONFIG_3 = 0x03 <NEW_LINE> REG_LOW_THRESHOLD_LSB = 0x04 <NEW_LINE> REG_LOW_THRESHOLD_MSB = 0x05 <NEW_LINE> REG_HIGH_THRESHOLD_LSB = 0x06 <NEW_LINE> REG_HIGH_THRESHOLD_MSB = 0x07 <NEW_LINE> REG_STATUS_FLAGS = 0x08 <NEW_LINE> REG_GREEN_LSB = 0x09 <NEW_LINE> REG_GREEN_MSB = 0x0A <NEW_LINE> REG_RED_LSB = 0x0B <NEW_LINE> REG_RED_MSB = 0x0C <NEW_LINE> REG_BLUE_LSB = 0x0D <NEW_LINE> REG_BLUE_MSB = 0x0E <NEW_LINE> def __init__(self, bus) -> None: <NEW_LINE> <INDENT> self.bus = bus <NEW_LINE> self.red_light_level = None <NEW_LINE> self.green_light_level = None <NEW_LINE> self.blue_light_level = None <NEW_LINE> self.nextUpdate = datetime.datetime.now() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.bus.write_byte_data(self.ADDRESS, self.REG_RESET, 0x46) <NEW_LINE> self.setup() <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> self.bus.write_byte_data(self.ADDRESS, self.REG_CONFIG_1, 0x1D) <NEW_LINE> self.bus.write_byte_data(self.ADDRESS, self.REG_CONFIG_2, 0x02) <NEW_LINE> <DEDENT> def read_data(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.dev = IIC(ADDRESS, self.bus) <NEW_LINE> b = self.dev.i2c([0x08],7, 0.01) <NEW_LINE> self.green_light_level.value = (b[2] << 8 | b[1]) <NEW_LINE> self.red_light_level.value = (b[3] << 8 | b[4]) <NEW_LINE> self.blue_light_level.value = (b[6] << 8 | b[5]) <NEW_LINE> <DEDENT> except osError as e: <NEW_LINE> <INDENT> self.green_light_level.quality = False <NEW_LINE> self.red_light_level.quality = False <NEW_LINE> self.blue_light_level.quality = False <NEW_LINE> <DEDENT> return -1 <NEW_LINE> <DEDENT> def get_remaining_states(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def assign_points(self, red_light_level, green_light_level, blue_light_level): <NEW_LINE> <INDENT> self.red_light_level = red_light_level <NEW_LINE> self.green_light_level = green_light_level <NEW_LINE> self.blue_light_level = blue_light_level
http://www.intersil.com/content/dam/Intersil/documents/isl2/isl29125.pdf
6259904f8da39b475be0466e
class MyAdminIndexView(admin.AdminIndexView): <NEW_LINE> <INDENT> @expose('/') <NEW_LINE> @login_required <NEW_LINE> def index(self): <NEW_LINE> <INDENT> return super(MyAdminIndexView, self).index()
Custom admin class to force a user to be authenticated.
6259904fdd821e528d6da363
class DailymotionExtractor(BaseFeedExtractor): <NEW_LINE> <INDENT> def flv_url_transform(self, flvurl): <NEW_LINE> <INDENT> return flvurl.replace('80x60', '320x240')
FLV extractor from a dailymotion Feed
6259904f91af0d3eaad3b2ad
class GscTests(GeneralTests): <NEW_LINE> <INDENT> def test_gsc(self): <NEW_LINE> <INDENT> err = 1e-3 <NEW_LINE> tree6_exp = {'A': 0.19025, 'B': 0.19025, 'C': 0.2717, 'D': 0.3478} <NEW_LINE> tree7_exp = {'A':.25, 'B':.25, 'C':.25, 'D':.25} <NEW_LINE> tree8_exp = dict(zip('ABCDEFGH',[.1,.1,.2,.06,.06,.16,.16,.16])) <NEW_LINE> self.assertFloatEqualAbs(GSC(self.tree6).values(), tree6_exp.values(),eps=err) <NEW_LINE> self.assertFloatEqualAbs(GSC(self.tree7).values(), tree7_exp.values(),eps=err) <NEW_LINE> self.assertFloatEqualAbs(GSC(self.tree8).values(), tree8_exp.values(),eps=err)
Tests for GSC functionality
6259904f94891a1f408ba139
class TakeOff_Task(): <NEW_LINE> <INDENT> def __init__(self, init_pose=None, init_velocities=None, init_angle_velocities=None, runtime=5., target_pos=None): <NEW_LINE> <INDENT> self.sim = PhysicsSim(init_pose, init_velocities, init_angle_velocities, runtime) <NEW_LINE> self.action_repeat = 3 <NEW_LINE> self.state_size = self.action_repeat * 6 <NEW_LINE> self.action_low = 0 <NEW_LINE> self.action_high = 900 <NEW_LINE> self.action_size = 4 <NEW_LINE> self.target_pos = target_pos if target_pos is not None else np.array([0., 0., 10.]) <NEW_LINE> <DEDENT> def get_reward(self): <NEW_LINE> <INDENT> reward = np.tanh(1 - 0.0005*(abs(self.sim.pose[:3] - self.target_pos)).sum()) <NEW_LINE> return reward <NEW_LINE> <DEDENT> def step(self, rotor_speeds): <NEW_LINE> <INDENT> reward = 0 <NEW_LINE> pose_all = [] <NEW_LINE> for _ in range(self.action_repeat): <NEW_LINE> <INDENT> done = self.sim.next_timestep(rotor_speeds) <NEW_LINE> reward += self.get_reward() <NEW_LINE> pose_all.append(self.sim.pose) <NEW_LINE> <DEDENT> next_state = np.concatenate(pose_all) <NEW_LINE> return next_state, reward, done <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.sim.reset() <NEW_LINE> state = np.concatenate([self.sim.pose] * self.action_repeat) <NEW_LINE> return state
Task (environment) that defines the goal and provides feedback to the agent.
6259904fa8ecb0332587269b
class Hint(restful.Resource): <NEW_LINE> <INDENT> decorators = [utils.login_required, utils.team_required] <NEW_LINE> resource_fields = { 'hid': fields.Integer, 'challenge_cid': fields.Integer, 'hint': fields.String, 'cost': fields.Integer, } <NEW_LINE> @restful.marshal_with(resource_fields) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> data = flask.request.get_json() <NEW_LINE> hint = controllers.unlock_hint(data['hid']) <NEW_LINE> models.commit() <NEW_LINE> return hint
Wrap hint just for unlocking.
6259904f45492302aabfd95c
class Testcase_100_80_Table(base_tests.SimpleDataPlane): <NEW_LINE> <INDENT> @wireshark_capture <NEW_LINE> def runTest(self): <NEW_LINE> <INDENT> logging.info("Running Testcase 100.80 Table") <NEW_LINE> ports = openflow_ports(4) <NEW_LINE> in_port = ports[0] <NEW_LINE> out_port = ports[1] <NEW_LINE> actions=[ofp.action.output(port = out_port, max_len = 128)] <NEW_LINE> match = ofp.match([ ofp.oxm.eth_type(0x0800) ]) <NEW_LINE> pkt = simple_tcp_packet() <NEW_LINE> delete_all_flows(self.controller) <NEW_LINE> logging.info("Inserting flow to forward packets to controller(packet_in)") <NEW_LINE> request = ofp.message.flow_add( table_id=test_param_get("table", 0), match = match, instructions=[ ofp.instruction.apply_actions(actions)], buffer_id=ofp.OFP_NO_BUFFER, priority=1000) <NEW_LINE> self.controller.message_send(request) <NEW_LINE> logging.info("Inserting a flow to forward packet to controller") <NEW_LINE> reply, _ = self.controller.poll(exp_msg=ofp.OFPT_ERROR, timeout=3) <NEW_LINE> self.assertIsNone(reply, "Switch generated an error when inserting flow") <NEW_LINE> do_barrier(self.controller) <NEW_LINE> pktstr = str(pkt) <NEW_LINE> msg = ofp.message.packet_out(buffer_id = ofp.OFP_NO_BUFFER, in_port = ofp.OFPP_CONTROLLER, actions = [ofp.action.output(port=ofp.OFPP_TABLE)], data = pktstr) <NEW_LINE> self.controller.message_send(msg) <NEW_LINE> logging.info("Sending the output message") <NEW_LINE> verify_packet(self,pktstr,out_port)
Purpose Verify that a packet_out with output:table gets submitted to the flow table. Methodology Configure and connect DUT to controller. After control channel establishment, add a flow matching on the named field (under the given Pre-requisites for the match), action is output to a specific port. Generate a matching packet and send it via packet_out message with output action to port TABLE in its action list. Verify the packet gets forwarded to the specific port.
6259904f76e4537e8c3f0a0f
class TestCompareXLSXFiles(base_test_class.XLSXBaseTest): <NEW_LINE> <INDENT> def test_date_1904_01(self): <NEW_LINE> <INDENT> self.run_lua_test('test_date_1904_01') <NEW_LINE> <DEDENT> def test_date_1904_02(self): <NEW_LINE> <INDENT> self.run_lua_test('test_date_1904_02') <NEW_LINE> <DEDENT> def test_date_1904_03(self): <NEW_LINE> <INDENT> self.run_lua_test('test_date_1904_03', 'date_1904_01.xlsx') <NEW_LINE> <DEDENT> def test_date_1904_04(self): <NEW_LINE> <INDENT> self.run_lua_test('test_date_1904_04', 'date_1904_02.xlsx')
Test file created with xlsxwriter.lua against a file created by Excel. These tests check date writing functions.
6259904fbe383301e0254ca5