code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TestForEmployee(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.test_employee = Employee('john', 'doe', 70000) <NEW_LINE> <DEDENT> def test_give_default_raise(self): <NEW_LINE> <INDENT> self.test_employee.give_raise() <NEW_LINE> self.assertEqual(self.test_employee.salary, 75000) <NEW_LINE> <DEDENT> def test_give_custom_raise(self): <NEW_LINE> <INDENT> self.test_employee.give_raise(30000) <NEW_LINE> self.assertEqual(self.test_employee.salary, 100000)
Test for 'Employee' class.
6259905b004d5f362081faed
class NaN(with_metaclass(Singleton, Number)): <NEW_LINE> <INDENT> is_commutative = True <NEW_LINE> is_extended_real = None <NEW_LINE> is_real = None <NEW_LINE> is_rational = None <NEW_LINE> is_algebraic = None <NEW_LINE> is_transcendental = None <NEW_LINE> is_integer = None <NEW_LINE> is_comparable = False <NEW_LINE> is_finite = None <NEW_LINE> is_zero = None <NEW_LINE> is_prime = None <NEW_LINE> is_positive = None <NEW_LINE> is_negative = None <NEW_LINE> is_number = True <NEW_LINE> __slots__ = [] <NEW_LINE> def __new__(cls): <NEW_LINE> <INDENT> return AtomicExpr.__new__(cls) <NEW_LINE> <DEDENT> def _latex(self, printer): <NEW_LINE> <INDENT> return r"\text{NaN}" <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> @_sympifyit('other', NotImplemented) <NEW_LINE> def __add__(self, other): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> @_sympifyit('other', NotImplemented) <NEW_LINE> def __sub__(self, other): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> @_sympifyit('other', NotImplemented) <NEW_LINE> def __mul__(self, other): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> @_sympifyit('other', NotImplemented) <NEW_LINE> def __div__(self, other): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> __truediv__ = __div__ <NEW_LINE> def floor(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def ceiling(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def _as_mpf_val(self, prec): <NEW_LINE> <INDENT> return _mpf_nan <NEW_LINE> <DEDENT> def _sage_(self): <NEW_LINE> <INDENT> import sage.all as sage <NEW_LINE> return sage.NaN <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return super(NaN, self).__hash__() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return other is S.NaN <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return other is not S.NaN <NEW_LINE> <DEDENT> def _eval_Eq(self, other): <NEW_LINE> <INDENT> return S.false <NEW_LINE> <DEDENT> __gt__ = Expr.__gt__ <NEW_LINE> __ge__ = Expr.__ge__ <NEW_LINE> __lt__ = Expr.__lt__ <NEW_LINE> __le__ = Expr.__le__
Not a Number. This serves as a place holder for numeric values that are indeterminate. Most operations on NaN, produce another NaN. Most indeterminate forms, such as ``0/0`` or ``oo - oo` produce NaN. Two exceptions are ``0**0`` and ``oo**0``, which all produce ``1`` (this is consistent with Python's float). NaN is loosely related to floating point nan, which is defined in the IEEE 754 floating point standard, and corresponds to the Python ``float('nan')``. Differences are noted below. NaN is mathematically not equal to anything else, even NaN itself. This explains the initially counter-intuitive results with ``Eq`` and ``==`` in the examples below. NaN is not comparable so inequalities raise a TypeError. This is in contrast with floating point nan where all inequalities are false. NaN is a singleton, and can be accessed by ``S.NaN``, or can be imported as ``nan``. Examples ======== >>> from sympy import nan, S, oo, Eq >>> nan is S.NaN True >>> oo - oo nan >>> nan + 1 nan >>> Eq(nan, nan) # mathematical equality False >>> nan == nan # structural equality True References ========== .. [1] https://en.wikipedia.org/wiki/NaN
6259905b442bda511e95d859
class Boss(Wasp, Hornet): <NEW_LINE> <INDENT> name = 'Boss' <NEW_LINE> damage_cap = 8 <NEW_LINE> action = Wasp.action <NEW_LINE> def reduce_armor(self, amount): <NEW_LINE> <INDENT> super().reduce_armor(self.damage_modifier(amount)) <NEW_LINE> <DEDENT> def damage_modifier(self, amount): <NEW_LINE> <INDENT> return amount * self.damage_cap/(self.damage_cap + amount)
The leader of the bees. Combines the high damage of the Wasp along with status immunity of Hornets. Damage to the boss is capped up to 8 damage by a single attack.
6259905b21bff66bcd724263
class Zone(models.Model): <NEW_LINE> <INDENT> id = models.TextField(primary_key=True) <NEW_LINE> name = models.TextField(blank=True) <NEW_LINE> image = models.TextField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('zone', kwargs=self.name)
Object representing the zone that each mob resides in
6259905ba219f33f346c7e04
class ObjectViewSet(viewsets.ViewSet): <NEW_LINE> <INDENT> @cls_cached_result(prefix="times", timeout=3600) <NEW_LINE> def _times(self, model, identity): <NEW_LINE> <INDENT> query = TimesQuery(model, identity) <NEW_LINE> times = query.fetch() <NEW_LINE> return times <NEW_LINE> <DEDENT> @list_route(methods=['post']) <NEW_LINE> def times(self, request): <NEW_LINE> <INDENT> times = TimesChangedSerializer(data=request.data) <NEW_LINE> if not times.is_valid(): <NEW_LINE> <INDENT> raise ValidationError(times.errors) <NEW_LINE> <DEDENT> vd = times.validated_data <NEW_LINE> query = Query(vd.get('model')) .identity(vd.get('identity')) .time(vd.get('time')) <NEW_LINE> records = query.fetch() <NEW_LINE> if not records: <NEW_LINE> <INDENT> raise Http404() <NEW_LINE> <DEDENT> created_at = records[0][vd.get('model')]['created_at'] <NEW_LINE> logger.debug("GETTING TIMES") <NEW_LINE> times = self._times(vd['model'], vd['identity']) <NEW_LINE> if created_at not in times: <NEW_LINE> <INDENT> times.append(created_at) <NEW_LINE> <DEDENT> results = GenericSerializer({ 'data': vd, 'times': times }) <NEW_LINE> return Response(results.data) <NEW_LINE> <DEDENT> @list_route(methods=['post']) <NEW_LINE> def search(self, request): <NEW_LINE> <INDENT> search = SearchSerializer(data=request.data) <NEW_LINE> if not search.is_valid(): <NEW_LINE> <INDENT> raise ValidationError(search.errors) <NEW_LINE> <DEDENT> vd = search.validated_data <NEW_LINE> query = Query(vd.get('model')) .identity(vd.get('identity')) .time(vd.get('time')) <NEW_LINE> for f in vd.get('filters', []): <NEW_LINE> <INDENT> query.filter( f['prop'], f['operator'], f['value'], label=f['model'] ) <NEW_LINE> <DEDENT> for o in vd.get('orders', []): <NEW_LINE> <INDENT> query.orderby(o['prop'], o['direction'], label=o['model']) <NEW_LINE> <DEDENT> count = query.count() <NEW_LINE> records = query.page( page=vd['page'], pagesize=vd['pagesize'], index=vd.get('index') ) <NEW_LINE> serializer = GenericSerializer({ 'query': str(query), 'data': vd, 'params': query.params, 'count': count, 'pagesize': vd['pagesize'], 'page': vd['page'], 'records': records }) <NEW_LINE> return Response(serializer.data)
View set for searching, viewing objects.
6259905b32920d7e50bc7645
class Dimensions(HasTraits): <NEW_LINE> <INDENT> dimension_dict = Dict(Str, Float) <NEW_LINE> expansion = Property(String, depends_on='dimension_dict') <NEW_LINE> def __init__(self, dimension_dict, **kwargs): <NEW_LINE> <INDENT> dimension_dict = {k: v for k, v in dimension_dict.items() if v} <NEW_LINE> super( self.__class__, self).__init__( dimension_dict=dimension_dict, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_expansion(cls, expansion): <NEW_LINE> <INDENT> terms = expansion.split("*") <NEW_LINE> dimension_dict = {} <NEW_LINE> try: <NEW_LINE> <INDENT> while terms: <NEW_LINE> <INDENT> dim = terms.pop(0) <NEW_LINE> if terms[0] == "": <NEW_LINE> <INDENT> terms.pop(0) <NEW_LINE> power = float(terms.pop(0)) <NEW_LINE> dimension_dict[dim] = dimension_dict.get(dim, 0) + power <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise InvalidExpansionError(expansion) <NEW_LINE> <DEDENT> return cls(dimension_dict) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _get_expansion(self): <NEW_LINE> <INDENT> if self.dimension_dict: <NEW_LINE> <INDENT> return format_expansion(self.dimension_dict) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "dimensionless" <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Dimensions(%s)" % repr(self.dimension_dict) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.expansion <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.dimension_dict == other.dimension_dict <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(tuple(item for item in self.dimension_dict.items())) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Dimensions): <NEW_LINE> <INDENT> return Dimensions(dict_add(self.dimension_dict, other.dimension_dict)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> <DEDENT> def __div__(self, other): <NEW_LINE> <INDENT> return type(self).__truediv__(self, other) <NEW_LINE> <DEDENT> def __truediv__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Dimensions): <NEW_LINE> <INDENT> return Dimensions(dict_sub(self.dimension_dict, other.dimension_dict)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> <DEDENT> def __pow__(self, other): <NEW_LINE> <INDENT> if isinstance(other, (float, int)): <NEW_LINE> <INDENT> return Dimensions(dict_mul(self.dimension_dict, other)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError
The dimensions of a physical quantity. This is essentially a thin wrapper around a dictionary which we perform certain operations on. Example ------- >>> m = Dimensions({'mass': 1.0}) >>> a = Dimensions({'length': 1.0, 'time': -2.0}) >>> f = Dimensions({'length': 1.0, 'mass': 1.0, 'time': -2.0}) >>> f == m*a True >>> f.expansion "length*mass*time**-2.0"
6259905b38b623060ffaa34e
class PointCloud2Publisher(ROSPublisherTF): <NEW_LINE> <INDENT> ros_class = PointCloud2 <NEW_LINE> def default(self, ci='unused'): <NEW_LINE> <INDENT> points = self.data['point_list'] <NEW_LINE> size = len(points) <NEW_LINE> pc2 = PointCloud2() <NEW_LINE> pc2.header = self.get_ros_header() <NEW_LINE> pc2.height = 1 <NEW_LINE> pc2.width = size <NEW_LINE> pc2.is_dense = False <NEW_LINE> pc2.is_bigendian = False <NEW_LINE> pc2.fields = [PointField('x', 0, PointField.FLOAT32, 1), PointField('y', 4, PointField.FLOAT32, 1), PointField('z', 8, PointField.FLOAT32, 1)] <NEW_LINE> pc2.point_step = 12 <NEW_LINE> pc2.row_step = size * 12 <NEW_LINE> pc2.data = pack_xyz_float32(points) <NEW_LINE> self.publish(pc2) <NEW_LINE> self.send_transform_robot()
Publish the ``point_list`` of the laser scanner.
6259905bd268445f2663a65c
class Period(BaseModel): <NEW_LINE> <INDENT> _schema = PeriodSchema() <NEW_LINE> period_from = None <NEW_LINE> period_to = None <NEW_LINE> delta = None <NEW_LINE> uom = None
Period object.
6259905ba17c0f6771d5d6a2
class Student: <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, age): <NEW_LINE> <INDENT> self.first_name = first_name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def to_json(self, attrs=None): <NEW_LINE> <INDENT> if not attrs: <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> st_attrs = {} <NEW_LINE> for key, value in self.__dict__.items(): <NEW_LINE> <INDENT> if key in attrs: <NEW_LINE> <INDENT> st_attrs[key] = value <NEW_LINE> <DEDENT> <DEDENT> return my_attrs <NEW_LINE> <DEDENT> def reload_from_json(self, json): <NEW_LINE> <INDENT> for key, value in json.items(): <NEW_LINE> <INDENT> setattr(self, key, value)
Represents a Student.
6259905b507cdc57c63a63a6
class PacketBuffer: <NEW_LINE> <INDENT> def __init__(self, characteristic: Characteristic, *, buffer_size: int): <NEW_LINE> <INDENT> self._queue = queue.Queue(buffer_size) <NEW_LINE> self._characteristic = characteristic <NEW_LINE> characteristic._add_notify_callback(self._notify_callback) <NEW_LINE> <DEDENT> def _notify_callback(self, data: Buf) -> None: <NEW_LINE> <INDENT> if self._queue.full(): <NEW_LINE> <INDENT> self._queue.get_nowait() <NEW_LINE> <DEDENT> self._queue.put_nowait(data) <NEW_LINE> <DEDENT> def readinto(self, buf: Buf) -> int: <NEW_LINE> <INDENT> if self._queue.empty(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> packet = self._queue.get_nowait() <NEW_LINE> packet_len = len(packet) <NEW_LINE> buf_len = len(buf) <NEW_LINE> if packet_len > buf_len: <NEW_LINE> <INDENT> return buf_len - packet_len <NEW_LINE> <DEDENT> buf[0:packet_len] = packet <NEW_LINE> return packet_len <NEW_LINE> <DEDENT> def write(self, data: Buf, *, header: Union[Buf, None] = None) -> int: <NEW_LINE> <INDENT> packet = header + data if header else data <NEW_LINE> self._characteristic.value = packet <NEW_LINE> return len(packet) <NEW_LINE> <DEDENT> def deinit(self) -> None: <NEW_LINE> <INDENT> self._characteristic.remove_notify_callback(self._notify_callback) <NEW_LINE> <DEDENT> @property <NEW_LINE> def packet_size(self) -> int: <NEW_LINE> <INDENT> return self.incoming_packet_length <NEW_LINE> <DEDENT> @property <NEW_LINE> def incoming_packet_length(self) -> int: <NEW_LINE> <INDENT> return 512 <NEW_LINE> <DEDENT> @property <NEW_LINE> def outgoing_packet_length(self): <NEW_LINE> <INDENT> return 512
Accumulates a Characteristic's incoming packets in a FIFO buffer and facilitates packet aware outgoing writes. A packet's size is either the characteristic length or the maximum transmission unit (MTU) minus overhead, whichever is smaller. The MTU can change so check `incoming_packet_length` and `outgoing_packet_length` before creating a buffer to store data. When we're the server, we ignore all connections besides the first to subscribe to notifications.
6259905bfff4ab517ebcee25
class ExternalDataWorkflow( Workflow[_ExternalDataWorkflowGetStageContext, _ExternalDataWorkflowSetStageContext] ): <NEW_LINE> <INDENT> def __init__(self, name: str, stages: List[WorkflowStage]) -> None: <NEW_LINE> <INDENT> manager = _ExternalDataWorkflowStageManager(name, stages) <NEW_LINE> super().__init__(name, stages, manager)
A workflow that saves state in the external data of a task. :param name: The name of the workflow. :param stages: The list of stages for the workflow.
6259905b45492302aabfdad8
class population_structure_algorithm(AutoRepr): <NEW_LINE> <INDENT> def __init__(self, population_structure_algorithm): <NEW_LINE> <INDENT> self.a = population_structure_algorithm
Population Structure Algorithm class .. _population_structure_algorithm_class: Args: population_structure_algorithm (str): *required.* human-readable name for algorithm Returns: Population_Structure_Altgorithm: instance of a population structure algorithm
6259905b99cbb53fe68324e0
class Utils(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def sparkline(cls, values): <NEW_LINE> <INDENT> bar = '▁▂▃▄▅▆▇█' <NEW_LINE> barcount = len(bar) - 1 <NEW_LINE> values = list(map(float, values)) <NEW_LINE> mn, mx = min(values), max(values) <NEW_LINE> extent = mx - mn <NEW_LINE> if extent == 0: <NEW_LINE> <INDENT> indices = [0 for n in values] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indices = [int((n - mn) / extent * barcount) for n in values] <NEW_LINE> <DEDENT> unicode_sparkline = ''.join([bar[i] for i in indices]) <NEW_LINE> return unicode_sparkline <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def daterange(cls, start, stop, steps): <NEW_LINE> <INDENT> delta = (stop - start) / steps <NEW_LINE> current = start <NEW_LINE> while current + delta <= stop: <NEW_LINE> <INDENT> yield current, current + delta <NEW_LINE> current += delta
Some handy utils for datagrepper visualization.
6259905b30dc7b76659a0d80
class TweakArea(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "wm.tweak_area" <NEW_LINE> bl_label = "Tweak Area" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> min_x = bpy.props.IntProperty() <NEW_LINE> min_y = bpy.props.IntProperty() <NEW_LINE> def modal(self, context, event): <NEW_LINE> <INDENT> if event.type == 'LEFTMOUSE': <NEW_LINE> <INDENT> self.max_x = event.mouse_x <NEW_LINE> self.max_y = event.mouse_y <NEW_LINE> if bpy.ops.screen.area_join(min_x=self.min_x, min_y=self.min_y, max_x=self.max_x, max_y=self.max_y) == {'CANCELLED'}: <NEW_LINE> <INDENT> if (min(self.min_x,self.max_x) >= context.area.x) and (min(self.min_y,self.max_y) >= context.area.y) and (max(self.min_x,self.max_x) <= (context.area.x + context.area.width)) and (max(self.min_y,self.max_y) <= (context.area.y + context.area.height)): <NEW_LINE> <INDENT> if abs(self.min_x - self.max_x) > abs(self.min_y - self.max_y): <NEW_LINE> <INDENT> bpy.ops.screen.area_split(direction='VERTICAL', factor=0.5, mouse_x=self.min_x, mouse_y=self.min_y) <NEW_LINE> <DEDENT> elif abs(self.min_x - self.max_x) < abs(self.min_y - self.max_y): <NEW_LINE> <INDENT> bpy.ops.screen.area_split(direction='HORIZONTAL', factor=0.5, mouse_x=self.min_x, mouse_y=self.min_y) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.report({'INFO'}, "Areas selected can't be merged together") <NEW_LINE> <DEDENT> <DEDENT> bpy.ops.screen.header_toggle_menus() <NEW_LINE> bpy.ops.screen.header_toggle_menus() <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> if event.type == 'RIGHTMOUSE' or event.type == 'ESC': <NEW_LINE> <INDENT> return {'CANCELLED'} <NEW_LINE> <DEDENT> return {'RUNNING_MODAL'} <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> self.min_x = event.mouse_x <NEW_LINE> self.min_y = event.mouse_y <NEW_LINE> context.window_manager.modal_handler_add(self) <NEW_LINE> return {'RUNNING_MODAL'}
Join the current area with the selected (by clicking)
6259905b097d151d1a2c266d
class FileDataFile(FileDataObject): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_json(cls, fdapi, json_data): <NEW_LINE> <INDENT> return cls(fdapi, json_data) <NEW_LINE> <DEDENT> def __init__(self, fdapi, json_data): <NEW_LINE> <INDENT> FileDataObject.__init__(self, fdapi, json_data) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "FileDataFile({!r})".format(self._json_data)
Provide access to a file and its metadata in the filedata store
6259905b3539df3088ecd89c
class FdnFilterTypes(enum.IntEnum): <NEW_LINE> <INDENT> disabled = 0 <NEW_LINE> lowpass = 1 <NEW_LINE> highpass = 2
Possible filter types for a feedback delay network's feedback path.
6259905b4e4d562566373a08
@enum.unique <NEW_LINE> class ObjectiveSense(enum.Enum): <NEW_LINE> <INDENT> Minimize = -1 <NEW_LINE> Maximize = 1
Enumeration of objective sense values
6259905bd7e4931a7ef3d67f
class Schema(Row): <NEW_LINE> <INDENT> __slots__ = ("catalog", "name", "default_character_set", "default_collation", "sql_path", "default_encryption")
Information Schema SCHEMA Table
6259905be64d504609df9ecf
class tdTestGuestCtrlBase(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.oTest = None; <NEW_LINE> self.oCreds = None; <NEW_LINE> self.timeoutMS = 30 * 1000; <NEW_LINE> self.oGuestSession = None; <NEW_LINE> <DEDENT> def setEnvironment(self, oSession, oTxsSession, oTestVm): <NEW_LINE> <INDENT> self.oTest = tdCtxTest(oSession, oTxsSession, oTestVm); <NEW_LINE> return self.oTest; <NEW_LINE> <DEDENT> def createSession(self, sName): <NEW_LINE> <INDENT> if self.oGuestSession is None: <NEW_LINE> <INDENT> if sName is None: <NEW_LINE> <INDENT> sName = "<untitled>"; <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> reporter.log('Creating session "%s" ...' % (sName,)); <NEW_LINE> self.oGuestSession = self.oTest.oGuest.createSession(self.oCreds.sUser, self.oCreds.sPassword, self.oCreds.sDomain, sName); <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> reporter.logXcpt('Creating a guest session "%s" failed; sUser="%s", pw="%s", sDomain="%s":' % (sName, self.oCreds.sUser, self.oCreds.sPassword, self.oCreds.sDomain)); <NEW_LINE> return (False, None); <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> reporter.log('Waiting for session "%s" to start within %dms...' % (sName, self.timeoutMS)); <NEW_LINE> fWaitFor = [ vboxcon.GuestSessionWaitForFlag_Start ]; <NEW_LINE> waitResult = self.oGuestSession.waitForArray(fWaitFor, self.timeoutMS); <NEW_LINE> if waitResult != vboxcon.GuestSessionWaitResult_Start and waitResult != vboxcon.GuestSessionWaitResult_WaitFlagNotSupported: <NEW_LINE> <INDENT> reporter.log('Session did not start successfully, returned wait result: %d' % (waitResult,)); <NEW_LINE> return (False, None); <NEW_LINE> <DEDENT> reporter.log('Session "%s" successfully started' % (sName,)); <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> reporter.logXcpt('Waiting for guest session "%s" (usr=%s;pw=%s;dom=%s) to start failed:' % (sName, self.oCreds.sUser, self.oCreds.sPassword, self.oCreds.sDomain,)); <NEW_LINE> return (False, None); <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> reporter.log('Warning: Session already set; this is probably not what you want'); <NEW_LINE> <DEDENT> return (True, self.oGuestSession); <NEW_LINE> <DEDENT> def setSession(self, oGuestSession): <NEW_LINE> <INDENT> if self.oGuestSession is not None: <NEW_LINE> <INDENT> self.closeSession(); <NEW_LINE> <DEDENT> self.oGuestSession = oGuestSession; <NEW_LINE> return self.oGuestSession; <NEW_LINE> <DEDENT> def closeSession(self): <NEW_LINE> <INDENT> if self.oGuestSession is not None: <NEW_LINE> <INDENT> sName = self.oGuestSession.name; <NEW_LINE> try: <NEW_LINE> <INDENT> reporter.log('Closing session "%s" ...' % (sName,)); <NEW_LINE> self.oGuestSession.close(); <NEW_LINE> self.oGuestSession = None; <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> reporter.logXcpt('Closing guest session "%s" failed:' % (sName,)); <NEW_LINE> return False; <NEW_LINE> <DEDENT> <DEDENT> return True;
Base class for all guest control tests. Note: This test ASSUMES that working Guest Additions were installed and running on the guest to be tested.
6259905b442bda511e95d85a
class AuthRequired(Exception): <NEW_LINE> <INDENT> def __init__(self, msg = ""): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> super(AuthRequired, self).__init__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Authorization Required: " + self.msg
Raised on HTTP 401 - Authentication Required error. Service requires authentication, pass user credentials in CLAMClient constructor.
6259905b8e71fb1e983bd0ca
class OAuth2MockMixin(object): <NEW_LINE> <INDENT> oauth2_mock_scope = None <NEW_LINE> oauth_response = { 'code': 'ih2o4ibvirjxipejc19cevcp6b0939', 'scope': 'channel.subscribers', 'state': 'FakeState' } <NEW_LINE> auth_response = { 'access_token': '1e84e29qi5u2gozvoqaoebauwnd90n', 'expires_in': 13048, 'refresh_token': 'wrrjc22jp1gjm1irld1iklvaaaq1j77pemhgsi8pye6x1hlrdy', 'scope': oauth2_mock_scope, 'token_type': 'bearer' }
All oauth2 token initiate process looks almost the same from one platform for another. So basically this settings is template for any oauth2 backend test cases
6259905b0fa83653e46f64e6
class Libemos(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://software.ecmwf.int/wiki/display/EMOS/Emoslib" <NEW_LINE> url = "https://software.ecmwf.int/wiki/download/attachments/3473472/libemos-4.4.2-Source.tar.gz" <NEW_LINE> version('4.4.7', '395dcf21cf06872f772fb6b73d8e67b9') <NEW_LINE> version('4.4.2', 'f15a9aff0f40861f3f046c9088197376') <NEW_LINE> variant('eccodes', default=False, description="Use eccodes instead of grib-api for GRIB decoding") <NEW_LINE> variant('build_type', default='RelWithDebInfo', description='The build type to build', values=('Debug', 'Release', 'RelWithDebInfo', 'Production')) <NEW_LINE> depends_on('eccodes', when='+eccodes') <NEW_LINE> depends_on('grib-api', when='~eccodes') <NEW_LINE> depends_on('fftw+float+double') <NEW_LINE> depends_on('[email protected]:', type='build') <NEW_LINE> def cmake_args(self): <NEW_LINE> <INDENT> spec = self.spec <NEW_LINE> args = [] <NEW_LINE> if spec.satisfies('+eccodes'): <NEW_LINE> <INDENT> args.append('-DENABLE_ECCODES=ON') <NEW_LINE> args.append('-DECCODES_PATH=%s' % spec['eccodes'].prefix) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args.append('-DENABLE_ECCODES=OFF') <NEW_LINE> args.append('-DGRIB_API_PATH=%s' % spec['grib-api'].prefix) <NEW_LINE> <DEDENT> args.append('-DCMAKE_Fortran_FLAGS=-ffree-line-length-none') <NEW_LINE> return args
The Interpolation library (EMOSLIB) includes Interpolation software and BUFR & CREX encoding/decoding routines.
6259905b21bff66bcd724265
class BaseDataset(ABC, Dataset): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __len__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __getitem__(self, index): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def load_batches(self, batch_size=10, **kwargs): <NEW_LINE> <INDENT> return DataLoader(self, batch_size=batch_size, **kwargs)
Base class for datasets.
6259905b8da39b475be047e7
class LogsManager(models.Manager): <NEW_LINE> <INDENT> def add_log_bulk(self, data): <NEW_LINE> <INDENT> http_method = data["request_method"].lower() <NEW_LINE> if http_method not in dict(REQUEST_METHODS_CHOICES): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> log_entry = Logs( ip=data["ip"], tms=format_date(data["time"]), request_method=http_method, request_uri=data["path"], request_code=int(data["status"]), request_size=int(data["bytes"]), ) <NEW_LINE> return log_entry
Logs manager add methods to model
6259905badb09d7d5dc0bb6b
class LogitRelaxedBernoulli(Distribution): <NEW_LINE> <INDENT> arg_constraints = {'probs': constraints.unit_interval, 'logits': constraints.real} <NEW_LINE> support = constraints.real <NEW_LINE> def __init__(self, temperature, probs=None, logits=None, validate_args=None): <NEW_LINE> <INDENT> self.temperature = temperature <NEW_LINE> if (probs is None) == (logits is None): <NEW_LINE> <INDENT> raise ValueError("Either `probs` or `logits` must be specified, but not both.") <NEW_LINE> <DEDENT> if probs is not None: <NEW_LINE> <INDENT> is_scalar = isinstance(probs, Number) <NEW_LINE> self.probs, = broadcast_all(probs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_scalar = isinstance(logits, Number) <NEW_LINE> self.logits, = broadcast_all(logits) <NEW_LINE> <DEDENT> self._param = self.probs if probs is not None else self.logits <NEW_LINE> if is_scalar: <NEW_LINE> <INDENT> batch_shape = torch.Size() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> batch_shape = self._param.size() <NEW_LINE> <DEDENT> super(LogitRelaxedBernoulli, self).__init__(batch_shape, validate_args=validate_args) <NEW_LINE> <DEDENT> def _new(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._param.new(*args, **kwargs) <NEW_LINE> <DEDENT> @lazy_property <NEW_LINE> def logits(self): <NEW_LINE> <INDENT> return probs_to_logits(self.probs, is_binary=True) <NEW_LINE> <DEDENT> @lazy_property <NEW_LINE> def probs(self): <NEW_LINE> <INDENT> return logits_to_probs(self.logits, is_binary=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def param_shape(self): <NEW_LINE> <INDENT> return self._param.size() <NEW_LINE> <DEDENT> def rsample(self, sample_shape=torch.Size()): <NEW_LINE> <INDENT> shape = self._extended_shape(sample_shape) <NEW_LINE> probs = clamp_probs(self.probs.expand(shape)) <NEW_LINE> uniforms = clamp_probs(self.probs.new(shape).uniform_()) <NEW_LINE> return (uniforms.log() - (-uniforms).log1p() + probs.log() - (-probs).log1p()) / self.temperature <NEW_LINE> <DEDENT> def log_prob(self, value): <NEW_LINE> <INDENT> if self._validate_args: <NEW_LINE> <INDENT> self._validate_sample(value) <NEW_LINE> <DEDENT> logits, value = broadcast_all(self.logits, value) <NEW_LINE> diff = logits - value.mul(self.temperature) <NEW_LINE> return self.temperature.log() + diff - 2 * diff.exp().log1p()
Creates a LogitRelaxedBernoulli distribution parameterized by :attr:`probs` or :attr:`logits` (but not both), which is the logit of a RelaxedBernoulli distribution. Samples are logits of values in (0, 1). See [1] for more details. Args: temperature (Tensor): relaxation temperature probs (Number, Tensor): the probabilty of sampling `1` logits (Number, Tensor): the log-odds of sampling `1` [1] The Concrete Distribution: A Continuous Relaxation of Discrete Random Variables (Maddison et al, 2017) [2] Categorical Reparametrization with Gumbel-Softmax (Jang et al, 2017)
6259905b99cbb53fe68324e1
class CoercedDict(CoercedCollectionMixin, dict): <NEW_LINE> <INDENT> def _coerce_dict(self, d): <NEW_LINE> <INDENT> res = {} <NEW_LINE> for key, element in six.iteritems(d): <NEW_LINE> <INDENT> res[key] = self._coerce_item(key, element) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def _coerce_item(self, key, item): <NEW_LINE> <INDENT> if not isinstance(key, six.string_types): <NEW_LINE> <INDENT> raise KeyTypeError(six.string_types[0], key) <NEW_LINE> <DEDENT> if hasattr(self, "_element_type") and self._element_type is not None: <NEW_LINE> <INDENT> att_name = "%s[%s]" % (self._field, key) <NEW_LINE> return self._element_type.coerce(self._obj, att_name, item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return item <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> super(CoercedDict, self).__setitem__(key, self._coerce_item(key, value)) <NEW_LINE> <DEDENT> def update(self, other=None, **kwargs): <NEW_LINE> <INDENT> if other is not None: <NEW_LINE> <INDENT> super(CoercedDict, self).update(self._coerce_dict(other), **self._coerce_dict(kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(CoercedDict, self).update(**self._coerce_dict(kwargs)) <NEW_LINE> <DEDENT> <DEDENT> def setdefault(self, key, default=None): <NEW_LINE> <INDENT> return super(CoercedDict, self).setdefault(key, self._coerce_item(key, default))
Dict which coerces its values Dict implementation which overrides all element-adding methods and coercing the element(s) being added to the required element type
6259905b24f1403a926863cf
class AddFormEditable(object): <NEW_LINE> <INDENT> implements(IAddForm) <NEW_LINE> adapts(IPage) <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return getattr(self.context, 'title', u"") <NEW_LINE> <DEDENT> @title.setter <NEW_LINE> def title(self, value): <NEW_LINE> <INDENT> self.context.title = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return getattr(self.context, 'description', u"") <NEW_LINE> <DEDENT> @description.setter <NEW_LINE> def description(self, value): <NEW_LINE> <INDENT> self.context.description = value
Form adapter to make IAddForm work
6259905b3eb6a72ae038bc61
class Match(object): <NEW_LINE> <INDENT> def __init__(self, string, start, end=None): <NEW_LINE> <INDENT> self.string = string <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.captures = [] <NEW_LINE> self.context = None <NEW_LINE> <DEDENT> def _setEnd(self, end): <NEW_LINE> <INDENT> self.end = end <NEW_LINE> return self <NEW_LINE> <DEDENT> def getValue(self, default=None): <NEW_LINE> <INDENT> start, end = (self.start, self.end) <NEW_LINE> return default if self.end is None else self.string[start:end] <NEW_LINE> <DEDENT> def _addCapture(self, capture): <NEW_LINE> <INDENT> self.captures.append( capture ) <NEW_LINE> return self <NEW_LINE> <DEDENT> def _setCaptures(self, captures): <NEW_LINE> <INDENT> if isinstance(captures, list): <NEW_LINE> <INDENT> self.captures = captures <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.captures = [captures] <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def _addSubmatch(self, match): <NEW_LINE> <INDENT> self.captures.extend(match.captures) <NEW_LINE> return self <NEW_LINE> <DEDENT> def hasCaptures(self): <NEW_LINE> <INDENT> return len(self.captures) > 0 <NEW_LINE> <DEDENT> def getCapture(self, index): <NEW_LINE> <INDENT> if not self.hasCaptures() or -index < -len(self.captures) or len(self.captures) <= index: <NEW_LINE> <INDENT> raise IndexError("Invalid capture index for Match") <NEW_LINE> <DEDENT> return self.captures[index] <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(str(self), attr) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if isinstance(key, int) and 0 <= key and key < len(self): <NEW_LINE> <INDENT> return self.getCapture(key) <NEW_LINE> <DEDENT> raise IndexError("Invalid index for Match") <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> import six <NEW_LINE> if isinstance(other, six.string_types): <NEW_LINE> <INDENT> return str(self) == other <NEW_LINE> <DEDENT> if isinstance(other, Match): <NEW_LINE> <INDENT> return str(self) == str(other) <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.captures) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.getValue()
The ``Match`` object tracks the text that was matched by a PEG pattern, and provides access to any match captures. :ivar string: The original string that was matched. :ivar start: The location in the string where the pattern match starts. :ivar end: The location in the string where the pattern match ends. :ivar captures: The Pattern captures. Note that the ``match.getCapture(n)`` method is available to access captures, or simply ``match[n]``. ``len(match)`` indicates the number of captures.
6259905b56ac1b37e63037e8
class ScheduleModuleTest(ModuleCase): <NEW_LINE> <INDENT> def test_schedule_list(self): <NEW_LINE> <INDENT> expected = {'schedule': {}} <NEW_LINE> ret = self.run_function('schedule.list') <NEW_LINE> self.assertEqual(ret, expected) <NEW_LINE> <DEDENT> def test_schedule_reload(self): <NEW_LINE> <INDENT> expected = {'comment': [], 'result': True} <NEW_LINE> ret = self.run_function('schedule.reload') <NEW_LINE> self.assertEqual(ret, expected)
Test the schedule module
6259905bf548e778e596cb94
class BusinessError(SystemError): <NEW_LINE> <INDENT> pass
Error when in normal business
6259905ba79ad1619776b5be
class TagViewSet(BaseRecipeAttrViewSet): <NEW_LINE> <INDENT> queryset = Tag.objects.all() <NEW_LINE> serializer_class = serializers.TagSerializer
Manages tags in the database
6259905b30dc7b76659a0d81
class Object(Schema): <NEW_LINE> <INDENT> def __init__(self, properties: Dict[str, BaseType]=None, additional_properties: Union[bool, BaseType]=True, required: List[str]=None, min_properties: int=None, max_properties: int=None, dependencies: Dict[str, Union[List[str], Schema]]=None, pattern_properties: Dict[str, BaseType]=None, enum: List[any]=None, title: str=None, description: str=None, default: any=None): <NEW_LINE> <INDENT> super(Object, self).__init__(properties, additional_properties, required, min_properties, max_properties, dependencies, pattern_properties, enum, title, description, default) <NEW_LINE> self.type = Type.Object
Objects are the mapping type in JSON. They map “keys” to “values”. In JSON, the “keys” must always be strings. Each of these pairs is conventionally referred to as a “property”.
6259905bac7a0e7691f73ae4
class RegexTree: <NEW_LINE> <INDENT> def __init__(self: 'RegexTree', symbol: str, children: list) -> None: <NEW_LINE> <INDENT> self.symbol, self.children = symbol, children[:] <NEW_LINE> <DEDENT> def __repr__(self: 'RegexTree') -> str: <NEW_LINE> <INDENT> return 'RegexTree({}, {})'.format( repr(self.symbol), repr(self.children)) <NEW_LINE> <DEDENT> def __eq__(self: 'RegexTree', other: object) -> bool: <NEW_LINE> <INDENT> return (isinstance(other, RegexTree) and self.symbol == other.symbol and self.children == other.children)
Root of a regular expression tree
6259905b91f36d47f2231990
class MinCommonNNNeighborsBR(BondRule): <NEW_LINE> <INDENT> def __init__(self, num_neighbors, invert=False): <NEW_LINE> <INDENT> super(MinCommonNNNeighborsBR, self).__init__(invert=invert) <NEW_LINE> self.num_neighbors = num_neighbors <NEW_LINE> <DEDENT> def _check_percolating(self, percolator, site1, site2): <NEW_LINE> <INDENT> common_nb = percolator.get_common_neighbors(site1, site2) <NEW_LINE> occupied = [nb for nb in common_nb if percolator.cluster[nb] >= 0] <NEW_LINE> max_nn = 0 <NEW_LINE> for i, nb1 in enumerate(occupied): <NEW_LINE> <INDENT> nn = 1 <NEW_LINE> for nb2 in occupied[i+1:]: <NEW_LINE> <INDENT> if nb2 in percolator.neighbors[nb1]: <NEW_LINE> <INDENT> nn += 1 <NEW_LINE> <DEDENT> <DEDENT> max_nn = max(max_nn, nn) <NEW_LINE> <DEDENT> return (max_nn >= self.num_neighbors)
This rule is True when two sites have at least a specified number of common neighbors that are percolating, and those common neighbors are themselves nearest neighbors.
6259905bcb5e8a47e493cc87
class Frame_extractor(): <NEW_LINE> <INDENT> def __init__( self, pickle_output = None): <NEW_LINE> <INDENT> self.appropriate_udeprels = [ "nsubj", "csubj", "obj", "iobj", "ccomp", "xcomp", "expl" ] <NEW_LINE> self.appropriate_deprels = [ "obl:arg", "obl:agent" ] <NEW_LINE> self.verb_record_class = Verb_record <NEW_LINE> self.frame_type_class = Frame_type <NEW_LINE> self.frame_inst_class = Frame_inst <NEW_LINE> self.frame_type_arg_class = Frame_type_arg <NEW_LINE> self.frame_inst_arg_class = Frame_inst_arg <NEW_LINE> self.pickle_output = pickle_output <NEW_LINE> self.dict_of_verbs = {} <NEW_LINE> <DEDENT> def process_tree( self, tree): <NEW_LINE> <INDENT> frame_insts = [] <NEW_LINE> for node in tree.descendants: <NEW_LINE> <INDENT> frame_inst = self._process_node( node) <NEW_LINE> if frame_inst is not None: <NEW_LINE> <INDENT> frame_insts.append( frame_inst) <NEW_LINE> <DEDENT> <DEDENT> return frame_insts <NEW_LINE> <DEDENT> def _process_node( self, node): <NEW_LINE> <INDENT> if node.upos == "VERB": <NEW_LINE> <INDENT> if node.lemma in self.dict_of_verbs: <NEW_LINE> <INDENT> verb_record = self.dict_of_verbs[ node.lemma ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> verb_record = self.verb_record_class( node.lemma) <NEW_LINE> self.dict_of_verbs[ node.lemma ] = verb_record <NEW_LINE> <DEDENT> frame_inst = self._process_frame( verb_record, node) <NEW_LINE> return frame_inst <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _process_frame( self, verb_record, verb_node): <NEW_LINE> <INDENT> frame_type = self.frame_type_class() <NEW_LINE> frame_type.set_verb_features( verb_node) <NEW_LINE> frame_inst = self.frame_inst_class() <NEW_LINE> self._process_args( frame_type, frame_inst, verb_node) <NEW_LINE> frame_type.sort_args() <NEW_LINE> frame_inst.process_sentence( verb_node) <NEW_LINE> frame_type.add_inst( frame_inst) <NEW_LINE> verb_record.consider_new_frame_type( frame_type, frame_inst) <NEW_LINE> return frame_inst <NEW_LINE> <DEDENT> def _process_args( self, frame_type, frame_inst, verb_node): <NEW_LINE> <INDENT> for child_node in verb_node.children: <NEW_LINE> <INDENT> if child_node.udeprel in self.appropriate_udeprels or child_node.deprel in self.appropriate_deprels: <NEW_LINE> <INDENT> frame_type_arg = self.frame_type_arg_class( child_node) <NEW_LINE> frame_type.add_arg( frame_type_arg) <NEW_LINE> frame_inst_arg = self.frame_inst_arg_class( child_node) <NEW_LINE> frame_inst.add_arg( frame_inst_arg) <NEW_LINE> frame_type_arg.add_inst( frame_inst_arg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_dict_of_verbs( self): <NEW_LINE> <INDENT> return self.dict_of_verbs
tool used by frame_aligner to extract frames from each verb node may be overloaded with language-specific extractors redefining appropropriate (u)deprels and genereal classes in __init__ the rest of extractor is designed language-independent to remain untouched
6259905bb7558d5895464a2d
class ApplicationWithConfig(Application): <NEW_LINE> <INDENT> def __init__(self, description): <NEW_LINE> <INDENT> self.config = None <NEW_LINE> super().__init__(description) <NEW_LINE> <DEDENT> def _add_arguments(self, arg_parser): <NEW_LINE> <INDENT> super()._add_arguments(arg_parser) <NEW_LINE> arg_parser.add_argument( "config_path", help="path to yarely configuration file" ) <NEW_LINE> <DEDENT> def _handle_arguments(self, args): <NEW_LINE> <INDENT> super()._handle_arguments(args) <NEW_LINE> self._read_config(args.config_path) <NEW_LINE> <DEDENT> def _read_config(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.config = YarelyConfig(path) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> msg = "Error initialising configuration from '{path}'" <NEW_LINE> msg = msg.format(path=path) <NEW_LINE> log.exception(msg) <NEW_LINE> raise ApplicationConfigurationError(msg) from e <NEW_LINE> <DEDENT> logging.captureWarnings(True) <NEW_LINE> version_template = "Python version: {version!r}" <NEW_LINE> uname_template = "Host details: {uname!r}" <NEW_LINE> argv_template = "Command line: {argv!r}" <NEW_LINE> log.info(version_template.format(version=sys.version)) <NEW_LINE> log.info(uname_template.format(uname=platform.uname())) <NEW_LINE> log.info(argv_template.format(argv=sys.argv))
A base class for applications expecting the path to the configuration file as a command line argument.
6259905b3cc13d1c6d466d43
class Registry(Components, dict): <NEW_LINE> <INDENT> has_listeners = False <NEW_LINE> _settings = None <NEW_LINE> def registerSubscriptionAdapter(self, *arg, **kw): <NEW_LINE> <INDENT> result = Components.registerSubscriptionAdapter(self, *arg, **kw) <NEW_LINE> self.has_listeners = True <NEW_LINE> return result <NEW_LINE> <DEDENT> def registerSelfAdapter(self, required=None, provided=None, name=u'', info=u'', event=True): <NEW_LINE> <INDENT> return self.registerAdapter(lambda x: x, required=required, provided=provided, name=name, info=info, event=event) <NEW_LINE> <DEDENT> def queryAdapterOrSelf(self, object, interface, default=None): <NEW_LINE> <INDENT> if not interface.providedBy(object): <NEW_LINE> <INDENT> return self.queryAdapter(object, interface, default=default) <NEW_LINE> <DEDENT> return object <NEW_LINE> <DEDENT> def registerHandler(self, *arg, **kw): <NEW_LINE> <INDENT> result = Components.registerHandler(self, *arg, **kw) <NEW_LINE> self.has_listeners = True <NEW_LINE> return result <NEW_LINE> <DEDENT> def notify(self, *events): <NEW_LINE> <INDENT> if self.has_listeners: <NEW_LINE> <INDENT> [ _ for _ in self.subscribers(events, None) ] <NEW_LINE> <DEDENT> <DEDENT> def _get_settings(self): <NEW_LINE> <INDENT> return self._settings <NEW_LINE> <DEDENT> def _set_settings(self, settings): <NEW_LINE> <INDENT> self.registerUtility(settings, ISettings) <NEW_LINE> self._settings = settings <NEW_LINE> <DEDENT> settings = property(_get_settings, _set_settings)
A registry object is an :term:`application registry`. The existence of a registry implementation detail of :app:`pyramid`. It is used by the framework itself to perform mappings of URLs to view callables, as well as servicing other various duties. Despite being an implementation detail of the framework, it has a number of attributes that may be useful within application code. For information about the purpose and usage of the application registry, see :ref:`zca_chapter`. The application registry is usually accessed as ``request.registry`` in application code.
6259905b097d151d1a2c2670
class DeleteJobOp(ResourceOp): <NEW_LINE> <INDENT> def __init__(self, name: str = None, k8s_name: str = None, job_name: str = None): <NEW_LINE> <INDENT> k8s_name = k8s_name or job_name <NEW_LINE> if not k8s_name: <NEW_LINE> <INDENT> raise ValueError("You need to provide a k8s_name or a job_name.") <NEW_LINE> <DEDENT> super().__init__( k8s_resource={ "apiVersion": "databricks.microsoft.com/v1alpha1", "kind": "Djob", "metadata": { "name": k8s_name } }, action="delete", name=name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def resource(self): <NEW_LINE> <INDENT> return self._resource
Represents an Op which will be translated into a Databricks Spark Job deletion resource template. Example: import databricks databricks.DeleteJobOp( name = "deletejob", job_name = "test-job" )
6259905b498bea3a75a590fe
class Player(Deck): <NEW_LINE> <INDENT> def __init__(self, name=''): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.cards = [] <NEW_LINE> <DEDENT> def get_winining_value(self): <NEW_LINE> <INDENT> winining_value = 0 <NEW_LINE> suite_number = [4, 3, 2, 1] <NEW_LINE> for i in range(len(self.cards)): <NEW_LINE> <INDENT> winining_value = winining_value + suite_number[self.cards[i].suit] * self.cards[i].rank <NEW_LINE> <DEDENT> return winining_value
represents a hand of playing cards
6259905b3539df3088ecd89f
class FakeUcsDriver(base.BaseDriver): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> if not importutils.try_import('UcsSdk'): <NEW_LINE> <INDENT> raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import UcsSdk library")) <NEW_LINE> <DEDENT> self.power = ucs_power.Power() <NEW_LINE> self.deploy = fake.FakeDeploy() <NEW_LINE> self.management = ucs_mgmt.UcsManagement()
Fake UCS driver.
6259905b7b25080760ed87e1
class Embedding(Layer): <NEW_LINE> <INDENT> def __init__(self, input_dim, output_dim, init='uniform', weights=None): <NEW_LINE> <INDENT> self.init = initializations.get(init) <NEW_LINE> self.input_dim = input_dim <NEW_LINE> self.output_dim = output_dim <NEW_LINE> self.normalize = normalize <NEW_LINE> self.input = T.imatrix() <NEW_LINE> self.W = self.init((self.input_dim, self.output_dim)) <NEW_LINE> self.params = [self.W] <NEW_LINE> if weights is not None: <NEW_LINE> <INDENT> self.set_weights(weights) <NEW_LINE> <DEDENT> <DEDENT> def output(self, train=False): <NEW_LINE> <INDENT> X = self.get_input(train) <NEW_LINE> out = self.W[X] <NEW_LINE> return out
Turn a list of integers >=0 into a dense vector of fixed size. eg. [4, 50, 123, 26] -> [0.25, 0.1] @input_dim: size of vocabulary (highest input integer + 1) @out_dim: size of dense representation
6259905b1b99ca4002290039
class Location(db.Model): <NEW_LINE> <INDENT> __tablename__ = "gb_map_locations" <NEW_LINE> marker_id = db.Column(db.Integer, autoincrement=True, primary_key=True) <NEW_LINE> gb_lat = db.Column(db.String(20), nullable=True) <NEW_LINE> gb_long = db.Column(db.String(20), nullable=True) <NEW_LINE> gb_id = db.Column(db.Integer, db.ForeignKey('ghostbikes.ghostbike_id')) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return f"<Location gb_id{self.gb_id} gb_lat={self.gb_lat} gb_long={self.gb_long}>"
ghost bike location model
6259905b16aa5153ce401ae7
class TestDbQuotaDriver(base.BaseTestCase): <NEW_LINE> <INDENT> def test_get_tenant_quotas_arg(self): <NEW_LINE> <INDENT> driver = quota_db.DbQuotaDriver() <NEW_LINE> ctx = context.Context('', 'bar') <NEW_LINE> foo_quotas = {'network': 5} <NEW_LINE> default_quotas = {'network': 10} <NEW_LINE> target_tenant = 'foo' <NEW_LINE> with mock.patch.object(quota_db.DbQuotaDriver, 'get_tenant_quotas', return_value=foo_quotas) as get_tenant_quotas: <NEW_LINE> <INDENT> quotas = driver._get_quotas(ctx, target_tenant, default_quotas) <NEW_LINE> self.assertEqual(quotas, foo_quotas) <NEW_LINE> get_tenant_quotas.assert_called_once_with(ctx, default_quotas, target_tenant)
Test for neutron.db.quota_db.DbQuotaDriver.
6259905b3617ad0b5ee0774e
class IrmaError(Exception): <NEW_LINE> <INDENT> pass
Error on cli script
6259905b8e7ae83300eea691
class GLView(ViewBase, Component, GLContext): <NEW_LINE> <INDENT> _default_size = (100, 100) <NEW_LINE> def __init__(self, **kwds): <NEW_LINE> <INDENT> Component.__init__(self, **kwds) <NEW_LINE> ViewBase.__init__(self) <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> ViewBase.destroy(self) <NEW_LINE> Component.destroy(self) <NEW_LINE> <DEDENT> def _render(self): <NEW_LINE> <INDENT> glMatrixMode(GL_MODELVIEW) <NEW_LINE> glLoadIdentity() <NEW_LINE> self.render() <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def viewport_changed(self): <NEW_LINE> <INDENT> glMatrixMode(GL_PROJECTION) <NEW_LINE> glLoadIdentity() <NEW_LINE> self.init_projection() <NEW_LINE> <DEDENT> def init_projection(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.with_context(self.render, flush = True) <NEW_LINE> <DEDENT> def _init_context(self): <NEW_LINE> <INDENT> self.init_context() <NEW_LINE> self._update_viewport() <NEW_LINE> <DEDENT> def _update_viewport(self): <NEW_LINE> <INDENT> width, height = self.size <NEW_LINE> glViewport(0, 0, int(width), int(height)) <NEW_LINE> self.viewport_changed()
A GLView is a Component providing an OpenGL 3D display area. Constructors: GLView(config_attr = value..., share = None) GLView(config, share = None)
6259905b55399d3f05627b23
class Input_fun(threading.Thread): <NEW_LINE> <INDENT> message = '' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Input_fun, self).__init__() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> event.clear() <NEW_LINE> info = input("input something:").strip() <NEW_LINE> if not info: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> Input_fun.message = info <NEW_LINE> event.set()
这个继承式调用类用来跟第一个类进行交互的:此线程类专为得到管理用户的输入命令。 使用了单独的线程得到用户的命令,并与主机交互
6259905b7047854f463409c3
class VirtualHostLocationBase(AppConfigBase): <NEW_LINE> <INDENT> MODES = [ 'exact', 'regex', ] <NEW_LINE> uri = '' <NEW_LINE> mode = ''
Virtual host location object.
6259905b1f037a2d8b9e536d
class BastionHostListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[BastionHost]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["BastionHost"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(BastionHostListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link
Response for ListBastionHosts API service call. :param value: List of Bastion Hosts in a resource group. :type value: list[~azure.mgmt.network.v2020_06_01.models.BastionHost] :param next_link: URL to get the next set of results. :type next_link: str
6259905b99cbb53fe68324e4
class _IteratorInitHook(tf.estimator.SessionRunHook): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(_IteratorInitHook, self).__init__() <NEW_LINE> self.iterator_initializer_fn = None <NEW_LINE> <DEDENT> def after_create_session(self, session, coord): <NEW_LINE> <INDENT> del coord <NEW_LINE> self.iterator_initializer_fn(session)
Hook to initialize data iterator after session is created.
6259905bb5575c28eb7137ce
class AccountSer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Account <NEW_LINE> fields = '__all__'
Serializer account
6259905bfff4ab517ebcee29
class Config: <NEW_LINE> <INDENT> APP_TITLE = os.environ.get("APP_TITLE", "FastAPI template") <NEW_LINE> APP_DESCRIPTION = os.environ.get("APP_DESCRIPTION", "A template for FastAPI.") <NEW_LINE> VERSION = os.environ.get("VERSION", "0.0.0") <NEW_LINE> OPENAPI_URL = os.environ.get("OPENAPI_URL", "/openapi.json") <NEW_LINE> REDIS_URL = os.environ.get("REDIS_URL", "redis://:password@localhost:6379/0") <NEW_LINE> DB_URL = os.environ.get( "DB_URL", "postgresql://postgres:password@localhost:5432/database" )
Service configurations
6259905b7cff6e4e811b7048
class _PortMapping(CFObject): <NEW_LINE> <INDENT> external = CFField('external').kind(int).name_field() <NEW_LINE> container = CFField('container').kind(int).value_field() <NEW_LINE> kind = CFField('kind').default('tcp') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(_PortMapping, self).__init__('Port Mapping')
A port mapping of an internal container port to the outside world.
6259905bbe8e80087fbc0688
class Menu(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> location = models.ForeignKey(MenuLocation, related_name='menus') <NEW_LINE> active = models.BooleanField('Always Use For Location') <NEW_LINE> url = models.CharField(max_length=100, null=True, blank=True) <NEW_LINE> content_type = models.ForeignKey(ContentType, null=True, blank=True) <NEW_LINE> object_id = models.PositiveIntegerField(null=True, blank=True) <NEW_LINE> content_object = generic.GenericForeignKey('content_type', 'object_id') <NEW_LINE> created_at = CreationDateTimeField() <NEW_LINE> modified_at = ModificationDateTimeField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def active_url(self): <NEW_LINE> <INDENT> if self.url: <NEW_LINE> <INDENT> return self.url <NEW_LINE> <DEDENT> elif self.content_object and hasattr(self.content_object,'get_absolute_url'): <NEW_LINE> <INDENT> return self.content_object.get_absolute_url() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def item_for_page(self, path): <NEW_LINE> <INDENT> return obj_for_page(MenuItem.objects.filter(menu=self), path)
A collection of MenuItems that can be rendered in a given MenuLocation based on a certain criteria and the page being rendered.
6259905b91f36d47f2231991
class ComputeTargetTcpProxiesDeleteRequest(_messages.Message): <NEW_LINE> <INDENT> project = _messages.StringField(1, required=True) <NEW_LINE> requestId = _messages.StringField(2) <NEW_LINE> targetTcpProxy = _messages.StringField(3, required=True)
A ComputeTargetTcpProxiesDeleteRequest object. Fields: project: Project ID for this request. requestId: An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and then the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. targetTcpProxy: Name of the TargetTcpProxy resource to delete.
6259905b004d5f362081faf0
class ApplyAnnotation(base.Command): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> concept_parsers.ConceptParser( [flags.CreateAssetResourceArg(positional=True), flags.CreateAnnotationResourceArg()]).AddToParser(parser) <NEW_LINE> flags.AddSubAssetFlag(parser) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> asset_resource = args.CONCEPTS.asset.Parse() <NEW_LINE> annotation_resource = args.CONCEPTS.annotation.Parse() <NEW_LINE> sub_asset = args.sub_asset <NEW_LINE> return assets.ApplyAnnotationTag(asset_resource, annotation_resource, sub_asset)
Apply an annotation to a given asset.
6259905b8a43f66fc4bf3792
@modify_settings(MIDDLEWARE_CLASSES={ 'append': 'django.middleware.cache.FetchFromCacheMiddleware', 'prepend': 'django.middleware.cache.UpdateCacheMiddleware', }) <NEW_LINE> class MiddlewareTestCase(TestCase): <NEW_LINE> <INDENT> @modify_settings(MIDDLEWARE_CLASSES={ 'append': 'django.middleware.cache.FetchFromCacheMiddleware', 'prepend': 'django.middleware.cache.UpdateCacheMiddleware', }) <NEW_LINE> def test_cache_middleware(self): <NEW_LINE> <INDENT> with self.modify_settings(MIDDLEWARE_CLASSES={ 'append': 'django.middleware.cache.FetchFromCacheMiddleware', 'prepend': 'django.middleware.cache.UpdateCacheMiddleware', 'remove': [ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ], }): <NEW_LINE> <INDENT> response = self.client.get('/') <NEW_LINE> self.assertIn("http://127.0.0.1:8000/", response)
Performing middleware load tests
6259905b38b623060ffaa351
@dataclass <NEW_LINE> class DeviceRequestParameter(BackboneElement): <NEW_LINE> <INDENT> resource_type: ClassVar[str] = "DeviceRequestParameter" <NEW_LINE> code: Optional[CodeableConcept] = None <NEW_LINE> valueCodeableConcept: Optional[CodeableConcept] = field(default=None, metadata=dict(one_of_many='value',)) <NEW_LINE> valueQuantity: Optional[Quantity] = field(default=None, metadata=dict(one_of_many='value',)) <NEW_LINE> valueRange: Optional[Range] = field(default=None, metadata=dict(one_of_many='value',)) <NEW_LINE> valueBoolean: Optional[bool] = field(default=None, metadata=dict(one_of_many='value',))
Device details. Specific parameters for the ordered item. For example, the prism value for lenses.
6259905b16aa5153ce401ae8
class HasExpectedFormat(object): <NEW_LINE> <INDENT> def __init__(self, load_type): <NEW_LINE> <INDENT> results = sfv_util.get_source_target_column_values_from_ref_column_mapping( Constants.UDL2_JSON_LZ_TABLE, load_type) if None is None else [] <NEW_LINE> self.mapping = dict([(row[0], row[1].split('.')) for row in results]) <NEW_LINE> <DEDENT> def execute(self, dir_path, file_name, batch_sid): <NEW_LINE> <INDENT> complete_path = os.path.join(dir_path, file_name) <NEW_LINE> with open_udl_file(complete_path) as f: <NEW_LINE> <INDENT> json_object = json.load(f) <NEW_LINE> mapping = self.mapping <NEW_LINE> for field in mapping.keys(): <NEW_LINE> <INDENT> path = mapping[field] <NEW_LINE> if not self.does_json_path_exist(json_object, path): <NEW_LINE> <INDENT> return (ErrorCode.SRC_JSON_INVALID_FORMAT, dir_path, file_name, batch_sid, field) <NEW_LINE> <DEDENT> <DEDENT> return (ErrorCode.STATUS_OK, dir_path, file_name, batch_sid) <NEW_LINE> <DEDENT> <DEDENT> def does_json_path_exist(self, json_object, path): <NEW_LINE> <INDENT> current_position = json_object <NEW_LINE> for component in path: <NEW_LINE> <INDENT> for key in current_position.keys(): <NEW_LINE> <INDENT> if component.lower() == key.lower(): <NEW_LINE> <INDENT> current_position = current_position[key] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Make sure the JSON file is formatted to our standards
6259905b99cbb53fe68324e5
class OutStream(object): <NEW_LINE> <INDENT> flush_interval = 0.05 <NEW_LINE> topic=None <NEW_LINE> def __init__(self, session, pub_socket, name): <NEW_LINE> <INDENT> self.session = session <NEW_LINE> self.pub_socket = pub_socket <NEW_LINE> self.name = name <NEW_LINE> self.parent_header = {} <NEW_LINE> self._new_buffer() <NEW_LINE> <DEDENT> def set_parent(self, parent): <NEW_LINE> <INDENT> self.parent_header = extract_header(parent) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.pub_socket = None <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> if self.pub_socket is None: <NEW_LINE> <INDENT> raise ValueError(u'I/O operation on closed file') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = self._buffer.getvalue() <NEW_LINE> if data: <NEW_LINE> <INDENT> content = {u'name':self.name, u'data':data} <NEW_LINE> msg = self.session.send(self.pub_socket, u'stream', content=content, parent=self.parent_header, ident=self.topic) <NEW_LINE> if hasattr(self.pub_socket, 'flush'): <NEW_LINE> <INDENT> self.pub_socket.flush() <NEW_LINE> <DEDENT> self._buffer.close() <NEW_LINE> self._new_buffer() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def isatty(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> raise IOError('Read not supported on a write only stream.') <NEW_LINE> <DEDENT> def read(self, size=-1): <NEW_LINE> <INDENT> raise IOError('Read not supported on a write only stream.') <NEW_LINE> <DEDENT> def readline(self, size=-1): <NEW_LINE> <INDENT> raise IOError('Read not supported on a write only stream.') <NEW_LINE> <DEDENT> def write(self, string): <NEW_LINE> <INDENT> if self.pub_socket is None: <NEW_LINE> <INDENT> raise ValueError('I/O operation on closed file') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(string, unicode): <NEW_LINE> <INDENT> enc = sys.stdin.encoding or sys.getdefaultencoding() <NEW_LINE> string = string.decode(enc, 'replace') <NEW_LINE> <DEDENT> self._buffer.write(string) <NEW_LINE> current_time = time.time() <NEW_LINE> if self._start <= 0: <NEW_LINE> <INDENT> self._start = current_time <NEW_LINE> <DEDENT> elif current_time - self._start > self.flush_interval: <NEW_LINE> <INDENT> self.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def writelines(self, sequence): <NEW_LINE> <INDENT> if self.pub_socket is None: <NEW_LINE> <INDENT> raise ValueError('I/O operation on closed file') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for string in sequence: <NEW_LINE> <INDENT> self.write(string) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _new_buffer(self): <NEW_LINE> <INDENT> self._buffer = StringIO() <NEW_LINE> self._start = -1
A file like object that publishes the stream to a 0MQ PUB socket.
6259905b0a50d4780f7068c1
class UserSchema(Schema): <NEW_LINE> <INDENT> id = fields.Integer() <NEW_LINE> username = fields.String( required=True, error_messages={'required': 'Username is required.'}, validate=UserError.validate_username) <NEW_LINE> email = fields.Email( required=True, error_messages={'required': 'Email is required.'}, validate=UserError.validate_email) <NEW_LINE> name = fields.String( required=True, error_messages={'required': 'Name is required.'}, validate=UserError.validate_name) <NEW_LINE> password = fields.String( required=True, error_messages={'required': 'Password is required.'}, validate=UserError.validate_password, load_only=True) <NEW_LINE> created = fields.DateTime() <NEW_LINE> updated = fields.DateTime()
User Schema
6259905b76e4537e8c3f0b92
class RollingEarning: <NEW_LINE> <INDENT> def __init__(self,df:pd.DataFrame,interval:int): <NEW_LINE> <INDENT> self.df=df <NEW_LINE> self.interval=interval <NEW_LINE> <DEDENT> def getData(self): <NEW_LINE> <INDENT> data=self.df.copy()[['net_value']] <NEW_LINE> data['interavalReturn']=(data['net_value'].shift(-1*self.interval)-data['net_value'])/data['net_value'] <NEW_LINE> data=data.dropna() <NEW_LINE> return data <NEW_LINE> <DEDENT> def drawPicture(self): <NEW_LINE> <INDENT> data=RollingEarning.getData(self) <NEW_LINE> plt.figure(dpi=200, figsize=(12, 8)) <NEW_LINE> plt.hist(data['interavalReturn'],bins=100) <NEW_LINE> plt.title("定投"+str(self.interval)+"天收益曲线分布") <NEW_LINE> plt.savefig(r"./定投"+str(self.interval)+"天收益曲线分布.png")
RollingEarning 滚动计算持有固定时间的收益率分布
6259905ba17c0f6771d5d6a5
class GroupedRowTransform(Factor): <NEW_LINE> <INDENT> window_length = 0 <NEW_LINE> def __new__(cls, transform, transform_args, factor, groupby, dtype, missing_value, mask, **kwargs): <NEW_LINE> <INDENT> if mask is NotSpecified: <NEW_LINE> <INDENT> mask = factor.mask <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask = mask & factor.mask <NEW_LINE> <DEDENT> if groupby is NotSpecified: <NEW_LINE> <INDENT> groupby = Everything(mask=mask) <NEW_LINE> <DEDENT> return super(GroupedRowTransform, cls).__new__( GroupedRowTransform, transform=transform, transform_args=transform_args, inputs=(factor, groupby), missing_value=missing_value, mask=mask, dtype=dtype, **kwargs ) <NEW_LINE> <DEDENT> def _init(self, transform, transform_args, *args, **kwargs): <NEW_LINE> <INDENT> self._transform = transform <NEW_LINE> self._transform_args = transform_args <NEW_LINE> return super(GroupedRowTransform, self)._init(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _static_identity(cls, transform, transform_args, *args, **kwargs): <NEW_LINE> <INDENT> return ( super(GroupedRowTransform, cls)._static_identity(*args, **kwargs), transform, transform_args, ) <NEW_LINE> <DEDENT> def _compute(self, arrays, dates, assets, mask): <NEW_LINE> <INDENT> data = arrays[0] <NEW_LINE> group_labels, null_label = self.inputs[1]._to_integral(arrays[1]) <NEW_LINE> group_labels = where(mask, group_labels, null_label) <NEW_LINE> return where( group_labels != null_label, naive_grouped_rowwise_apply( data=data, group_labels=group_labels, func=self._transform, func_args=self._transform_args, out=empty_like(data, dtype=self.dtype), ), self.missing_value, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def transform_name(self): <NEW_LINE> <INDENT> return self._transform.__name__ <NEW_LINE> <DEDENT> def graph_repr(self): <NEW_LINE> <INDENT> return type(self).__name__ + '(%r)' % self.transform_name
A Factor that transforms an input factor by applying a row-wise shape-preserving transformation on classifier-defined groups of that Factor. This is most often useful for normalization operators like ``zscore`` or ``demean`` or for performing ranking using ``rank``. Parameters ---------- transform : function[ndarray[ndim=1] -> ndarray[ndim=1]] Function to apply over each row group. factor : zipline.pipeline.Factor The factor providing baseline data to transform. mask : zipline.pipeline.Filter Mask of entries to ignore when calculating transforms. groupby : zipline.pipeline.Classifier Classifier partitioning ``factor`` into groups to use when calculating means. transform_args : tuple[hashable] Additional positional arguments to forward to ``transform``. Notes ----- Users should rarely construct instances of this factor directly. Instead, they should construct instances via factor normalization methods like ``zscore`` and ``demean`` or using ``rank`` with ``groupby``. See Also -------- zipline.pipeline.Factor.zscore zipline.pipeline.Factor.demean zipline.pipeline.Factor.rank
6259905b507cdc57c63a63ab
class BLFParseError(Exception): <NEW_LINE> <INDENT> pass
BLF file could not be parsed correctly.
6259905bd53ae8145f919a68
class TldClient(base.DnsClientV2Base): <NEW_LINE> <INDENT> @base.handle_errors <NEW_LINE> def create_tld(self, tld_name=None, description=None, params=None): <NEW_LINE> <INDENT> tld = { "name": tld_name or data_utils.rand_name(name="tld"), "description": description or data_utils.rand_name( name="description") } <NEW_LINE> resp, body = self._create_request('tlds', data=tld, params=params) <NEW_LINE> self.expected_success(201, resp.status) <NEW_LINE> return resp, body <NEW_LINE> <DEDENT> @base.handle_errors <NEW_LINE> def show_tld(self, uuid, params=None): <NEW_LINE> <INDENT> return self._show_request('tlds', uuid, params=params) <NEW_LINE> <DEDENT> @base.handle_errors <NEW_LINE> def list_tlds(self, params=None): <NEW_LINE> <INDENT> return self._list_request('tlds', params=params) <NEW_LINE> <DEDENT> @base.handle_errors <NEW_LINE> def delete_tld(self, uuid, params=None): <NEW_LINE> <INDENT> resp, body = self._delete_request('tlds', uuid, params=params) <NEW_LINE> self.expected_success(204, resp.status) <NEW_LINE> return resp, body <NEW_LINE> <DEDENT> @base.handle_errors <NEW_LINE> def update_tld(self, uuid, tld_name=None, description=None, params=None): <NEW_LINE> <INDENT> tld = { "name": tld_name or data_utils.rand_name(name="tld"), "description": description or data_utils.rand_name( name="description") } <NEW_LINE> resp, body = self._update_request('tlds', uuid, tld, params=params) <NEW_LINE> self.expected_success(200, resp.status) <NEW_LINE> return resp, body
API V2 Tempest REST client for Designate Tld API
6259905b2ae34c7f260ac6ed
class BlogPostAdmin(DisplayableAdmin, OwnableAdmin): <NEW_LINE> <INDENT> fieldsets = blogpost_fieldsets <NEW_LINE> list_display = ("title", "user", "status", "admin_link") <NEW_LINE> radio_fields = blogpost_radio_fields <NEW_LINE> def save_form(self, request, form, change): <NEW_LINE> <INDENT> OwnableAdmin.save_form(self, request, form, change) <NEW_LINE> return DisplayableAdmin.save_form(self, request, form, change)
Admin class for blog posts.
6259905bb5575c28eb7137cf
class PluginGithubRepos(GithubRepos): <NEW_LINE> <INDENT> _TABLE_NAME = 'plugin_github_repos' <NEW_LINE> _ROW_SCHEMA = dict(GithubRepos._ROW_SCHEMA, **{ 'is_fork': False, 'from_vim_scripts': [], 'from_submission': None, }) <NEW_LINE> _BLACKLISTED_GITHUB_REPOS = set([ 'github/gitignore', 'kablamo/dotfiles', 'aemoncannon/ensime', 'experiment/vim', 'ggreer/the_silver_searcher', 'pry/pry', 'sitaramc/gitolite', 'sstephenson/bats', 'git.wincent.com/command-t', 'contrib/mpvim', 'svn/trunk', 'mozilla/rust', ])
GitHub repositories of Vim plugins.
6259905b097d151d1a2c2673
class ReactPageSerializer(PageSerializer): <NEW_LINE> <INDENT> excerpt = ExcerptField(read_only=True) <NEW_LINE> published_at = PublishedAtField(read_only=True) <NEW_LINE> parent = PageParentIdField(read_only=True) <NEW_LINE> url_path = PagePathField(read_only=True) <NEW_LINE> image = ImageField(read_only=True)
Serializes a Page.
6259905b0c0af96317c57862
class Tip(Node): <NEW_LINE> <INDENT> pass
A member of the syntax tree which doesn't have any children.
6259905b4a966d76dd5f04f8
class Guard: <NEW_LINE> <INDENT> def __call__(self, func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def guarded(*args, **kwargs): <NEW_LINE> <INDENT> res = self.allowed(*args, **kwargs) <NEW_LINE> if res: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return guarded <NEW_LINE> <DEDENT> def allowed(self, *args, **kwargs): <NEW_LINE> <INDENT> return True
Prevent execution of a function unless arguments pass self.allowed() >>> class OnlyInts(Guard): ... def allowed(self, *args, **kwargs): ... return all(isinstance(arg, int) for arg in args) >>> @OnlyInts() ... def the_func(val): ... print(val) >>> the_func(1) 1 >>> the_func('1') >>> the_func(1, '1') is None True
6259905bdd821e528d6da483
class DescribeProxyStatisticsRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ProxyId = None <NEW_LINE> self.StartTime = None <NEW_LINE> self.EndTime = None <NEW_LINE> self.MetricNames = None <NEW_LINE> self.Granularity = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ProxyId = params.get("ProxyId") <NEW_LINE> self.StartTime = params.get("StartTime") <NEW_LINE> self.EndTime = params.get("EndTime") <NEW_LINE> self.MetricNames = params.get("MetricNames") <NEW_LINE> self.Granularity = params.get("Granularity")
DescribeProxyStatistics request structure.
6259905b29b78933be26abc7
class MaxSizeDeferredQueue(MaxSizeQueue): <NEW_LINE> <INDENT> def __init__(self, maxSize, backlog = 0): <NEW_LINE> <INDENT> MaxSizeQueue.__init__(self, maxSize) <NEW_LINE> self.__backlogSize = backlog <NEW_LINE> self.__backlog = deque() <NEW_LINE> <DEDENT> def push(self, *items): <NEW_LINE> <INDENT> result = MaxSizeQueue.push(self, *items) <NEW_LINE> while not self.isEmpty() and self.__backlog: <NEW_LINE> <INDENT> deferred = self.__backlog.popleft() <NEW_LINE> deferred.callback(self.shift()) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def shift(self): <NEW_LINE> <INDENT> if self.isEmpty(): <NEW_LINE> <INDENT> if len(self.__backlog) == self.__backlogSize: <NEW_LINE> <INDENT> raise defer.QueueUnderflow() <NEW_LINE> <DEDENT> deferred = defer.Deferred() <NEW_LINE> self.__backlog.append(deferred) <NEW_LINE> return deferred <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return MaxSizeQueue.shift(self)
A queue with a maximum size and the ability to wait for items.
6259905be5267d203ee6cec3
class OrderItemForm(Form): <NEW_LINE> <INDENT> dish_id = SelectField("Dish") <NEW_LINE> comment = StringField("Comment") <NEW_LINE> submit_button = SubmitField("Submit") <NEW_LINE> def populate(self, location: Location) -> None: <NEW_LINE> <INDENT> self.dish_id.choices = [(dish.id, dish.name) for dish in location.dishes] <NEW_LINE> if not self.is_submitted() and self.comment.data is None: <NEW_LINE> <INDENT> self.comment.data = request.args.get("comment")
New Item in an Order
6259905b91af0d3eaad3b42f
class DownloadDataException(Exception): <NEW_LINE> <INDENT> pass
raised when data cannot be downloaded
6259905b8da39b475be047ed
class Message(models.Model): <NEW_LINE> <INDENT> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> text = HTMLField() <NEW_LINE> sender = models.ForeignKey(User, verbose_name="sending django authentication user", related_name='sent_messages') <NEW_LINE> receiver = models.ForeignKey(User, verbose_name="receiving django authentication user", related_name='received_messages') <NEW_LINE> sent = models.BooleanField(default=False) <NEW_LINE> thread = models.ForeignKey(MessageThread, default=DEFAULT_THREAD_ID, related_name='messages') <NEW_LINE> key = UUIDField(auto=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('created',) <NEW_LINE> unique_together = ('sender', 'created') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s %s' % (self.text, self.sent)
Message between two users
6259905b097d151d1a2c2674
class Detect(Function): <NEW_LINE> <INDENT> def __init__(self, cfg): <NEW_LINE> <INDENT> self.num_classes = cfg.NUM_CLASSES <NEW_LINE> self.top_k = cfg.TOP_K <NEW_LINE> self.nms_thresh = cfg.NMS_THRESH <NEW_LINE> self.conf_thresh = cfg.CONF_THRESH <NEW_LINE> self.variance = cfg.VARIANCE <NEW_LINE> <DEDENT> def forward(self, loc_data, conf_data, prior_data): <NEW_LINE> <INDENT> num = loc_data.size(0) <NEW_LINE> num_priors = prior_data.size(0) <NEW_LINE> conf_preds = conf_data.view( num, num_priors, self.num_classes).transpose(2, 1) <NEW_LINE> batch_priors = prior_data.view(-1, num_priors, 4).expand(num, num_priors, 4) <NEW_LINE> batch_priors = batch_priors.contiguous().view(-1, 4) <NEW_LINE> decoded_boxes = decode(loc_data.view(-1, 4), batch_priors, self.variance) <NEW_LINE> decoded_boxes = decoded_boxes.view(num, num_priors, 4) <NEW_LINE> output = torch.zeros(num, self.num_classes, self.top_k, 5) <NEW_LINE> for i in range(num): <NEW_LINE> <INDENT> boxes = decoded_boxes[i].clone() <NEW_LINE> conf_scores = conf_preds[i].clone() <NEW_LINE> for cl in range(1, self.num_classes): <NEW_LINE> <INDENT> c_mask = conf_scores[cl].gt(self.conf_thresh) <NEW_LINE> scores = conf_scores[cl][c_mask] <NEW_LINE> if scores.dim() == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> l_mask = c_mask.unsqueeze(1).expand_as(boxes) <NEW_LINE> boxes_ = boxes[l_mask].view(-1, 4) <NEW_LINE> ids, count = nms( boxes_, scores, self.nms_thresh, self.top_k) <NEW_LINE> output[i, cl, :count] = torch.cat((scores[ids[:count]].unsqueeze(1), boxes_[ids[:count]]), 1) <NEW_LINE> <DEDENT> <DEDENT> return output
At test time, Detect is the final layer of SSD. Decode location preds, apply non-maximum suppression to location predictions based on conf scores and threshold to a top_k number of output predictions for both confidence score and locations.
6259905b462c4b4f79dbd00d
class Group(MultiCommand): <NEW_LINE> <INDENT> def __init__(self, name=None, commands=None, **attrs): <NEW_LINE> <INDENT> MultiCommand.__init__(self, name, **attrs) <NEW_LINE> self.commands = commands or {} <NEW_LINE> <DEDENT> def add_command(self, cmd, name=None): <NEW_LINE> <INDENT> name = name or cmd.name <NEW_LINE> if name is None: <NEW_LINE> <INDENT> raise TypeError('Command has no name.') <NEW_LINE> <DEDENT> _check_multicommand(self, name, cmd, register=True) <NEW_LINE> self.commands[name] = cmd <NEW_LINE> <DEDENT> def command(self, *args, **kwargs): <NEW_LINE> <INDENT> def decorator(f): <NEW_LINE> <INDENT> cmd = dedupe_wait_list(*args, **kwargs)(f) <NEW_LINE> self.add_command(cmd) <NEW_LINE> return cmd <NEW_LINE> <DEDENT> return decorator <NEW_LINE> <DEDENT> def group(self, *args, **kwargs): <NEW_LINE> <INDENT> def decorator(f): <NEW_LINE> <INDENT> cmd = group(*args, **kwargs)(f) <NEW_LINE> self.add_command(cmd) <NEW_LINE> return cmd <NEW_LINE> <DEDENT> return decorator <NEW_LINE> <DEDENT> def get_command(self, ctx, cmd_name): <NEW_LINE> <INDENT> return self.commands.get(cmd_name) <NEW_LINE> <DEDENT> def list_commands(self, ctx): <NEW_LINE> <INDENT> return sorted(self.commands)
A group allows a command to have subcommands attached. This is the most common way to implement nesting in Click. :param commands: a dictionary of commands.
6259905b7d43ff2487427f13
class ModuleLoader(object): <NEW_LINE> <INDENT> def _findPluginFilePaths(self, namespace): <NEW_LINE> <INDENT> already_seen = set() <NEW_LINE> for path in sys.path: <NEW_LINE> <INDENT> namespace_rel_path = namespace.replace(".", os.path.sep) <NEW_LINE> namespace_path = os.path.join(path, namespace_rel_path) <NEW_LINE> if os.path.exists(namespace_path): <NEW_LINE> <INDENT> for possible in os.listdir(namespace_path): <NEW_LINE> <INDENT> if os.path.isdir(os.path.join(namespace_path, possible)): <NEW_LINE> <INDENT> pkg_init = os.path.join(namespace_path, possible, '__init__.py') <NEW_LINE> if not os.path.exists(pkg_init): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> base = possible <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> base, ext = os.path.splitext(possible) <NEW_LINE> if base == '__init__' or ext != '.py': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> if base not in already_seen: <NEW_LINE> <INDENT> already_seen.add(base) <NEW_LINE> yield os.path.join(namespace, possible) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _findPluginModules(self, namespace): <NEW_LINE> <INDENT> for filepath in self._findPluginFilePaths(namespace): <NEW_LINE> <INDENT> path_segments = list(filepath.split(os.path.sep)) <NEW_LINE> path_segments = [p for p in path_segments if p] <NEW_LINE> path_segments[-1] = os.path.splitext(path_segments[-1])[0] <NEW_LINE> import_path = '.'.join(path_segments) <NEW_LINE> try: <NEW_LINE> <INDENT> module = import_module(import_path) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> raise Exception(import_path) <NEW_LINE> module = None <NEW_LINE> <DEDENT> if module is not None: <NEW_LINE> <INDENT> yield module <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load(self, namespace): <NEW_LINE> <INDENT> modules = self._findPluginModules(namespace) <NEW_LINE> return list(modules)
Performs the work of locating and loading straight plugins. This looks for plugins in every location in the import path.
6259905bd6c5a102081e3728
class RunwithNoData(RenderRunbooksIntegrationTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> self.runbooks = "" <NEW_LINE> self.facts = "" <NEW_LINE> self.assertIsNone(render_runbooks(self.runbooks, self.facts))
Test when given no data
6259905b3c8af77a43b68a44
class AccessKey(db.Model): <NEW_LINE> <INDENT> id = db.Column( UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, unique=True, nullable=False, ) <NEW_LINE> access_key = db.Column(db.String(128)) <NEW_LINE> secret_key = db.Column(db.String(128)) <NEW_LINE> customer_id = db.Column(UUID(as_uuid=True), db.ForeignKey("customer.id")) <NEW_LINE> created_at = db.Column(db.DateTime) <NEW_LINE> last_used = db.Column(db.DateTime)
Access credentials Access and secret key pairs for using the API
6259905b10dbd63aa1c7217e
class TrapezoidalChannelTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_practice_problem_9(self): <NEW_LINE> <INDENT> s = sections.Trapezoid(l_slope=1/3, b_width=6.0, r_slope=1/3, n=0.013) <NEW_LINE> channel = links.Reach(section=s, slope=0.002) <NEW_LINE> produced = channel.normal_flow(depth=2.0) <NEW_LINE> expected = 150.0 <NEW_LINE> self.assertAlmostEqual(produced, expected, -1)
From Practice Problems for the Civil Engineering PE Exam by Michael R. Lindeburg, PE: Chapter 19
6259905bcc0a2c111447c5d2
class Worm: <NEW_LINE> <INDENT> def __init__(self, start): <NEW_LINE> <INDENT> self.nodes = [start] <NEW_LINE> self.head = start <NEW_LINE> start.owner = self <NEW_LINE> self.edges = [] <NEW_LINE> self.tabu = [] <NEW_LINE> self.length = 0 <NEW_LINE> self.dead = False <NEW_LINE> self.dirs = np.zeros(len(Direction)) <NEW_LINE> <DEDENT> def swallowEdge(self, edge): <NEW_LINE> <INDENT> if edge.owner != None: raise Exception("This edge is owned by some other worm") <NEW_LINE> if edge in self.tabu: raise Exception("Can't take this edge!") <NEW_LINE> self.tabu.append(edge) <NEW_LINE> self.length += len(edge.points) <NEW_LINE> self.dirs += edge.getDirs(self.head) <NEW_LINE> self.edges.append(edge) <NEW_LINE> self.head = edge.getOtherNode(self.head) <NEW_LINE> self.nodes.append(self.head) <NEW_LINE> if not edge.betweenBifs: edge.owner = self <NEW_LINE> else: edge.used += 1 <NEW_LINE> <DEDENT> def loseEdge(self, edge): <NEW_LINE> <INDENT> if edge not in self.edges: raise Exception("I don't own this edge") <NEW_LINE> self.length = 0 <NEW_LINE> self.head = self.nodes[0] <NEW_LINE> self.nodes=[self.head] <NEW_LINE> self.dirs = np.zeros(len(Direction)) <NEW_LINE> remEdges = [] <NEW_LINE> i = 0 <NEW_LINE> while i < len(self.edges) and self.edges[i] != edge: <NEW_LINE> <INDENT> self.dirs += edge.getDirs(self.head) <NEW_LINE> self.head = self.edges[i].getOtherNode(self.head) <NEW_LINE> self.nodes.append(self.head) <NEW_LINE> remEdges.append(self.edges[i]) <NEW_LINE> self.length += len(self.edges[i].points) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> while i < len(self.edges): <NEW_LINE> <INDENT> self.edges[i].owner = None <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> self.edges = remEdges <NEW_LINE> if len(self.edges) == 0: self.dead = True <NEW_LINE> <DEDENT> def checkAgreement(self, edge): <NEW_LINE> <INDENT> if edge in self.edges: raise Exception("Already own this edge!") <NEW_LINE> elif len(edge.points) + self.length > maxWormLength: return False <NEW_LINE> elif self.length == 0: return 4 <NEW_LINE> wormFracs = self.dirs / self.length <NEW_LINE> edgeFracs = edge.getDirs(self.head)/len(edge.points) <NEW_LINE> return dirCorr(wormFracs, edgeFracs) <NEW_LINE> <DEDENT> def checkSelfAgreement(self, edge): <NEW_LINE> <INDENT> if edge not in self.edges: <NEW_LINE> <INDENT> raise Exception("I don't own this edge!") <NEW_LINE> <DEDENT> consideringLen = len(edge.points) <NEW_LINE> consideringDirs = None <NEW_LINE> totalDirs = np.zeros(len(Direction)) <NEW_LINE> for n in range(len(self.nodes)-1): <NEW_LINE> <INDENT> thisNode = self.nodes[n] <NEW_LINE> if self.edges[n] == edge: <NEW_LINE> <INDENT> consideringDirs = edge.getDirs(thisNode) / consideringLen <NEW_LINE> <DEDENT> else: totalDirs += self.edges[n].getDirs(thisNode) <NEW_LINE> <DEDENT> totalDirs /= self.length - consideringLen <NEW_LINE> return dirCorr(consideringDirs, totalDirs)
Manages adding/removing segments to a worm Also generates likelihood scores for potential segments
6259905b2ae34c7f260ac6ee
class ElementMaker(object): <NEW_LINE> <INDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __getattribute__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __getattr__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, namespace=None, nsmap=None, annotate=True, makeelement=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __pyx_vtable__ = None
ElementMaker(self, namespace=None, nsmap=None, annotate=True, makeelement=None) An ElementMaker that can be used for constructing trees. Example:: >>> M = ElementMaker(annotate=False) >>> attributes = {'class': 'par'} >>> html = M.html( M.body( M.p('hello', attributes, M.br, 'objectify', style="font-weight: bold") ) ) >>> from lxml.etree import tostring >>> print(tostring(html, method='html').decode('ascii')) <html><body><p style="font-weight: bold" class="par">hello<br>objectify</p></body></html> To create tags that are not valid Python identifiers, call the factory directly and pass the tag name as first argument:: >>> root = M('tricky-tag', 'some text') >>> print(root.tag) tricky-tag >>> print(root.text) some text Note that this module has a predefined ElementMaker instance called ``E``.
6259905b21a7993f00c67574
class LockingError(ConnectorError): <NEW_LINE> <INDENT> pass
Raised when trying to lock an already locked resource.
6259905b8e7ae83300eea695
class RedactionEngine(object): <NEW_LINE> <INDENT> def __init__(self, rules=None): <NEW_LINE> <INDENT> if rules is None: <NEW_LINE> <INDENT> rules = [] <NEW_LINE> <DEDENT> self.rules = rules <NEW_LINE> <DEDENT> def add_rule(self, rule): <NEW_LINE> <INDENT> self.rules.append(rule) <NEW_LINE> <DEDENT> def add_rules(self, rules): <NEW_LINE> <INDENT> self.rules.extend(rules) <NEW_LINE> <DEDENT> def add_rules_from_string(self, string): <NEW_LINE> <INDENT> self.rules.extend(parse_redaction_rules_from_string(string)) <NEW_LINE> <DEDENT> def add_rules_from_file(self, filename): <NEW_LINE> <INDENT> self.rules.extend(parse_redaction_rules_from_filename(filename)) <NEW_LINE> <DEDENT> def redact(self, message): <NEW_LINE> <INDENT> for rule in self.rules: <NEW_LINE> <INDENT> message = rule.redact(message) <NEW_LINE> <DEDENT> return message <NEW_LINE> <DEDENT> def is_enabled(self): <NEW_LINE> <INDENT> return bool(self.rules) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'RedactionEngine(%r)' % self.rules <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.rules == other.rules <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
`RedactionEngine` applies a list of `RedactionRule`s to redact a string.
6259905b7047854f463409c7
class SubscriptionsWidget(AddBreadcrumbOnShowMixin, QWidget): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> QWidget.__init__(self, parent) <NEW_LINE> self.subscribe_button = None <NEW_LINE> self.initialized = False <NEW_LINE> self.contents_widget = None <NEW_LINE> self.channel_rating_label = None <NEW_LINE> <DEDENT> def initialize(self, contents_widget): <NEW_LINE> <INDENT> if not self.initialized: <NEW_LINE> <INDENT> self.contents_widget = contents_widget <NEW_LINE> self.subscribe_button = self.findChild(QWidget, "subscribe_button") <NEW_LINE> self.channel_rating_label = self.findChild(QLabel, "channel_rating_label") <NEW_LINE> self.channel_rating_label.setTextFormat(Qt.RichText) <NEW_LINE> connect(self.subscribe_button.clicked, self.on_subscribe_button_click) <NEW_LINE> self.subscribe_button.setToolTip('Click to subscribe/unsubscribe') <NEW_LINE> connect(self.subscribe_button.toggled, self._adjust_tooltip) <NEW_LINE> self.initialized = True <NEW_LINE> <DEDENT> <DEDENT> def _adjust_tooltip(self, toggled): <NEW_LINE> <INDENT> tooltip = ("Subscribed." if toggled else "Not subscribed.") + "\n(Click to unsubscribe)" <NEW_LINE> self.subscribe_button.setToolTip(tooltip) <NEW_LINE> <DEDENT> def update_subscribe_button_if_channel_matches(self, changed_channels_list): <NEW_LINE> <INDENT> if not (self.contents_widget.model and self.contents_widget.model.channel_info.get("public_key")): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for channel_info in changed_channels_list: <NEW_LINE> <INDENT> if ( self.contents_widget.model.channel_info["public_key"] == channel_info["public_key"] and self.contents_widget.model.channel_info["id"] == channel_info["id"] ): <NEW_LINE> <INDENT> self.update_subscribe_button(remote_response=channel_info) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_subscribe_button(self, remote_response=None): <NEW_LINE> <INDENT> if self.isHidden(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if remote_response and "subscribed" in remote_response: <NEW_LINE> <INDENT> self.contents_widget.model.channel_info["subscribed"] = remote_response["subscribed"] <NEW_LINE> <DEDENT> self.subscribe_button.setChecked(bool(remote_response["subscribed"])) <NEW_LINE> self._adjust_tooltip(bool(remote_response["subscribed"])) <NEW_LINE> votes = remote_response['votes'] <NEW_LINE> self.channel_rating_label.setText(format_votes_rich_text(votes)) <NEW_LINE> if DARWIN or WINDOWS: <NEW_LINE> <INDENT> font = QFont() <NEW_LINE> font.setLetterSpacing(QFont.PercentageSpacing, 60.0) <NEW_LINE> self.channel_rating_label.setFont(font) <NEW_LINE> <DEDENT> self.channel_rating_label.setToolTip(get_votes_rating_description(votes)) <NEW_LINE> <DEDENT> def on_subscribe_button_click(self, checked): <NEW_LINE> <INDENT> self.subscribe_button.setCheckedInstant(bool(self.contents_widget.model.channel_info["subscribed"])) <NEW_LINE> channel_info = self.contents_widget.model.channel_info <NEW_LINE> if channel_info["subscribed"]: <NEW_LINE> <INDENT> self.window().on_channel_unsubscribe(channel_info) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.window().on_channel_subscribe(channel_info)
This widget shows a favorite button and the number of subscriptions that a specific channel has.
6259905b24f1403a926863d2
class PrivateIngredientApiTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> self.user = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> self.client.force_authenticate(self.user) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_list(self): <NEW_LINE> <INDENT> Ingredient.objects.create(user=self.user, name='Kai') <NEW_LINE> Ingredient.objects.create(user=self.user, name='Jazz') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> ingredients = Ingredient.objects.all().order_by('-name') <NEW_LINE> serializer = IngredientSerializer(ingredients,many=True) <NEW_LINE> self.assertEqual(res.status_code,status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_ingredients_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> Ingredient.objects.create(user=user2, name='Vinegar') <NEW_LINE> ingredient = Ingredient.objects.create(user=self.user, name='tumeric') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], ingredient.name) <NEW_LINE> <DEDENT> def test_create_ingredient_successful(self): <NEW_LINE> <INDENT> payload = {'name': 'Cabbage'} <NEW_LINE> self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> exists = Ingredient.objects.filter( user=self.user, name=payload['name'] ).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_ingredient_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
Test the private ingredients API
6259905b3eb6a72ae038bc67
class SpeechRecognitionAlternative(_messages.Message): <NEW_LINE> <INDENT> confidence = _messages.FloatField(1, variant=_messages.Variant.FLOAT) <NEW_LINE> transcript = _messages.StringField(2) <NEW_LINE> words = _messages.MessageField('WordInfo', 3, repeated=True)
Alternative hypotheses (a.k.a. n-best list). Fields: confidence: Output only. The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater likelihood that the recognized words are correct. This field is set only for the top alternative of a non-streaming result or, of a streaming result where `is_final=true`. This field is not guaranteed to be accurate and users should not rely on it to be always provided. The default of 0.0 is a sentinel value indicating `confidence` was not set. transcript: Output only. Transcript text representing the words that the user spoke. words: Output only. A list of word-specific information for each recognized word. Note: When `enable_speaker_diarization` is true, you will see all the words from the beginning of the audio.
6259905b67a9b606de5475a5
class MigrationDataNormalPersistenceTestCase(TestCase): <NEW_LINE> <INDENT> def test_persistence(self): <NEW_LINE> <INDENT> self.assertEqual( Book.objects.count(), 1, )
Data loaded in migrations is available on TestCase
6259905b9c8ee82313040c8e
class SshPublicKey(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'certificate_data': {'key': 'certificateData', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SshPublicKey, self).__init__(**kwargs) <NEW_LINE> self.certificate_data = kwargs.get('certificate_data', None)
The SSH public key for the cluster nodes. :param certificate_data: The certificate for SSH. :type certificate_data: str
6259905b8a43f66fc4bf3796
class Event(object): <NEW_LINE> <INDENT> def __init__(self, elapsed_time, name, total_hot_dogs_eaten): <NEW_LINE> <INDENT> self.elapsed_time = elapsed_time <NEW_LINE> self.name = name <NEW_LINE> self.total_hot_dogs_eaten = total_hot_dogs_eaten <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Event(elapsed_time=%s, name=%s, total_hot_dogs_eaten=%s)' % ( repr(self.elapsed_time), repr(self.name), repr(self.total_hot_dogs_eaten)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return ( self.elapsed_time == other.elapsed_time and self.name == other.name and self.total_hot_dogs_eaten == other.total_hot_dogs_eaten ) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if self.elapsed_time == other.elapsed_time: <NEW_LINE> <INDENT> return self.name < other.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.elapsed_time < other.elapsed_time <NEW_LINE> <DEDENT> <DEDENT> def rounded(self, precision=Decimal('1.000')): <NEW_LINE> <INDENT> return Event(round_value(self.elapsed_time, precision), self.name, round_value(self.total_hot_dogs_eaten, precision))
An Event should be generated whenever a hot dog is eaten by a competitor, and at the end of the competition for each competitor.
6259905b379a373c97d9a62d
class TableDefWeightsPage(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{F64D9CE1-1F15-11D3-9C05-00C04F5B951E}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C0FC1503-7E6F-11D2-AABF-00C04FA375F1}', 10, 2)
Esri feature class weights association page.
6259905b8e71fb1e983bd0d2
class UptimeSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, name, unit): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._unit = unit <NEW_LINE> self.initial = dt_util.now() <NEW_LINE> self._state = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return ICON <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._unit <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> delta = dt_util.now() - self.initial <NEW_LINE> div_factor = 3600 <NEW_LINE> if self.unit_of_measurement == "days": <NEW_LINE> <INDENT> div_factor *= 24 <NEW_LINE> <DEDENT> elif self.unit_of_measurement == "minutes": <NEW_LINE> <INDENT> div_factor /= 60 <NEW_LINE> <DEDENT> elif self.unit_of_measurement == "seconds": <NEW_LINE> <INDENT> div_factor /= 3600 <NEW_LINE> <DEDENT> delta = delta.total_seconds() / div_factor <NEW_LINE> self._state = round(delta, 2) <NEW_LINE> _LOGGER.debug("New value: %s", delta)
Representation of an uptime sensor.
6259905b8da39b475be047ef
class QuerySet(ControllerView, query.QuerySet): <NEW_LINE> <INDENT> pass
View- and Controller-aware QuerySet
6259905b442bda511e95d85e
class TimeSetter: <NEW_LINE> <INDENT> def __init__(self, utcDelay): <NEW_LINE> <INDENT> self.utcDelay=utcDelay <NEW_LINE> self.verbose = False <NEW_LINE> <DEDENT> def set_verbose(self, verbose): <NEW_LINE> <INDENT> self.verbose = verbose <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print("getting ntptime") <NEW_LINE> <DEDENT> is_time_set = False <NEW_LINE> while not is_time_set: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ntptime.settime() <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("ntptime was set") <NEW_LINE> <DEDENT> is_time_set = True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print(".", end="") <NEW_LINE> <DEDENT> <DEDENT> tm = time.localtime() <NEW_LINE> tm_sec = time.mktime((tm[0], tm[1], tm[2], tm[3] + self.utcDelay, tm[4], tm[5], tm[6], tm[7])) <NEW_LINE> tm = time.localtime(tm_sec) <NEW_LINE> tm = tm[0:3] + (0,) + tm[3:6] + (0,) <NEW_LINE> machine.RTC().datetime(tm)
class to reset time Args: utc_delay: delay in hours form utc timezone Attributes: verbose (bool): activate verbose output utc_delay (int): time delay from utc (for Berlin: +1)
6259905b01c39578d7f1423b
class DogForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Dog <NEW_LINE> exclude = ('id', 'owner',) <NEW_LINE> widgets = { 'id' : forms.HiddenInput(), 'owner' : forms.HiddenInput() } <NEW_LINE> help_texts = { 'birthday': _('Format YYYY-MM-DD') }
Form for adding/editing a dog profile.
6259905b16aa5153ce401aec
class DiagnosticDetectorResponse(ProxyOnlyResource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'}, 'issue_detected': {'key': 'properties.issueDetected', 'type': 'bool'}, 'detector_definition': {'key': 'properties.detectorDefinition', 'type': 'DetectorDefinition'}, 'metrics': {'key': 'properties.metrics', 'type': '[DiagnosticMetricSet]'}, 'abnormal_time_periods': {'key': 'properties.abnormalTimePeriods', 'type': '[DetectorAbnormalTimePeriod]'}, 'data': {'key': 'properties.data', 'type': '[[NameValuePair]]'}, 'response_meta_data': {'key': 'properties.responseMetaData', 'type': 'ResponseMetaData'}, } <NEW_LINE> def __init__( self, *, kind: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, issue_detected: Optional[bool] = None, detector_definition: Optional["DetectorDefinition"] = None, metrics: Optional[List["DiagnosticMetricSet"]] = None, abnormal_time_periods: Optional[List["DetectorAbnormalTimePeriod"]] = None, data: Optional[List[List["NameValuePair"]]] = None, response_meta_data: Optional["ResponseMetaData"] = None, **kwargs ): <NEW_LINE> <INDENT> super(DiagnosticDetectorResponse, self).__init__(kind=kind, **kwargs) <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.issue_detected = issue_detected <NEW_LINE> self.detector_definition = detector_definition <NEW_LINE> self.metrics = metrics <NEW_LINE> self.abnormal_time_periods = abnormal_time_periods <NEW_LINE> self.data = data <NEW_LINE> self.response_meta_data = response_meta_data
Class representing Response from Diagnostic Detectors. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource Id. :vartype id: str :ivar name: Resource Name. :vartype name: str :ivar kind: Kind of resource. :vartype kind: str :ivar type: Resource type. :vartype type: str :ivar start_time: Start time of the period. :vartype start_time: ~datetime.datetime :ivar end_time: End time of the period. :vartype end_time: ~datetime.datetime :ivar issue_detected: Flag representing Issue was detected. :vartype issue_detected: bool :ivar detector_definition: Detector's definition. :vartype detector_definition: ~azure.mgmt.web.v2020_12_01.models.DetectorDefinition :ivar metrics: Metrics provided by the detector. :vartype metrics: list[~azure.mgmt.web.v2020_12_01.models.DiagnosticMetricSet] :ivar abnormal_time_periods: List of Correlated events found by the detector. :vartype abnormal_time_periods: list[~azure.mgmt.web.v2020_12_01.models.DetectorAbnormalTimePeriod] :ivar data: Additional Data that detector wants to send. :vartype data: list[list[~azure.mgmt.web.v2020_12_01.models.NameValuePair]] :ivar response_meta_data: Meta Data. :vartype response_meta_data: ~azure.mgmt.web.v2020_12_01.models.ResponseMetaData
6259905b2c8b7c6e89bd4df7
class iPerfUDPReverseTestWLAN(iPerfUDPReverseTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> if not wlan: <NEW_LINE> <INDENT> self.skipTest("skipping test no wlan") <NEW_LINE> <DEDENT> wlan.sendline('iwconfig') <NEW_LINE> wlan.expect(prompt) <NEW_LINE> super(iPerfReverseTestWLAN, self).runTest(client=wan, server=wlan) <NEW_LINE> <DEDENT> def recover(self): <NEW_LINE> <INDENT> super(iPerfReverseTestWLAN, self).recover(client=wan, server=wlan)
iPerf from WAN to LAN over Wifi
6259905b4428ac0f6e659b45
class Checker(ACVE): <NEW_LINE> <INDENT> def __init__(self, request, logger): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> <DEDENT> def run(self, version): <NEW_LINE> <INDENT> self.logger.handle("\n[*] Checker not available for this CVE", None)
This class checks if the given target is vulnerable to the CVE-2018-7600.
6259905b460517430c432b57
class Bricks(Enum): <NEW_LINE> <INDENT> def repeat(pattern, capture=True): <NEW_LINE> <INDENT> return (r"(" if capture else r"(?:") + pattern + r")+" <NEW_LINE> <DEDENT> def getItem(enum, index): <NEW_LINE> <INDENT> return enum[list(enum.__members__)[index]] <NEW_LINE> <DEDENT> def flatten(parsed): <NEW_LINE> <INDENT> parsedList = [parsed] if isinstance(parsed, dict) else parsed <NEW_LINE> result = [] <NEW_LINE> for dico in parsedList: <NEW_LINE> <INDENT> listKeys = [] <NEW_LINE> standardKeys = [] <NEW_LINE> for key in dico: <NEW_LINE> <INDENT> if isinstance(dico[key], list): <NEW_LINE> <INDENT> listKeys.append(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> standardKeys.append(key) <NEW_LINE> <DEDENT> <DEDENT> if not listKeys: <NEW_LINE> <INDENT> result.append(dico) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> partialResult = [{x:dico[x] for x in standardKeys}] <NEW_LINE> for key in listKeys: <NEW_LINE> <INDENT> recurs = Bricks.flatten(dico[key]) <NEW_LINE> partialResult = [{**x, **y} for x in partialResult for y in recurs] <NEW_LINE> <DEDENT> result.extend(partialResult) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> __baseNewline = r"(?:<p(?: align='\w*')?>|<br/>|</p>| )" <NEW_LINE> _newline = __baseNewline + "+" <NEW_LINE> _optionalNewline = __baseNewline + "*" <NEW_LINE> _upperString = r"[A-ZÀÉÈÔ' \-]+" <NEW_LINE> _romanNumber = r"(?:[IVX]+\. (?:\-|―) )" <NEW_LINE> _ANString = r"[\w' \-/]+" <NEW_LINE> _XString = r"[\w' \-,/\.]+"
Base building blocks and utility methods used by the other classes.
6259905b07f4c71912bb0a45
class SelectionItem(MenuItem): <NEW_LINE> <INDENT> def __init__(self, text, index, menu=None): <NEW_LINE> <INDENT> super(SelectionItem, self).__init__(text=text, menu=menu, should_exit=True) <NEW_LINE> self.index = index <NEW_LINE> <DEDENT> def get_return(self): <NEW_LINE> <INDENT> return self.index
The item type used in :class:`consolemenu.SelectionMenu`
6259905b3539df3088ecd8a5