code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Regex(object): <NEW_LINE> <INDENT> _type_marker = 11 <NEW_LINE> @classmethod <NEW_LINE> def from_native(cls, regex): <NEW_LINE> <INDENT> if not isinstance(regex, RE_TYPE): <NEW_LINE> <INDENT> raise TypeError( "regex must be a compiled regular expression, not %s" % type(regex)) <NEW_LINE> <DEDENT> return Regex(regex.pattern, regex.flags) <NEW_LINE> <DEDENT> def __init__(self, pattern, flags=0): <NEW_LINE> <INDENT> if not isinstance(pattern, (text_type, bytes)): <NEW_LINE> <INDENT> raise TypeError("pattern must be a string, not %s" % type(pattern)) <NEW_LINE> <DEDENT> self.pattern = pattern <NEW_LINE> if isinstance(flags, string_type): <NEW_LINE> <INDENT> self.flags = str_flags_to_int(flags) <NEW_LINE> <DEDENT> elif isinstance(flags, int): <NEW_LINE> <INDENT> self.flags = flags <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( "flags must be a string or int, not %s" % type(flags)) <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Regex): <NEW_LINE> <INDENT> return self.pattern == self.pattern and self.flags == other.flags <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> __hash__ = None <NEW_LINE> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Regex(%r, %r)" % (self.pattern, self.flags) <NEW_LINE> <DEDENT> def try_compile(self): <NEW_LINE> <INDENT> return re.compile(self.pattern, self.flags)
BSON regular expression data.
62599074bf627c535bcb2ded
class BitfieldParser(tpg.Parser, dict): <NEW_LINE> <INDENT> pass
set lexer = ContextSensitiveLexer separator space '\s+'; token TOKEN_CONSTANT '[0-1]+\??' $ ConstantField token TOKEN_VARIABLE '[A-Za-z_]+' $ VariableField token TOKEN_ITERATION '{\d+}' $ str token TOKEN_POSITION '\.\d+' $ str START/f -> BitFields/f ; BitFields/f -> BitField/f (BitField/n $ f.append(n) )* ; BitField/f -> ConstantBitField/f | VariableBitField/f ; ConstantBitField/f -> TOKEN_CONSTANT/f ; VariableBitField/f -> TOKEN_VARIABLE/f (TOKEN_ITERATION/i $ f.setiteration(i) )? (TOKEN_POSITION/p $ f.setposition(p) )? ;
6259907467a9b606de547734
class YumCleanAll(ActionProvider): <NEW_LINE> <INDENT> action_key = 'yum.clean_all' <NEW_LINE> def __init__(self, action_element, path_vars=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_commands(self): <NEW_LINE> <INDENT> if not FileUtilities.exe_exists('yum'): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> yield Command.Function( None, Unix.yum_clean, 'yum clean all')
Action to run 'yum clean all'
625990744e4d562566373d27
class PendingSponsorListView( LoginRequiredMixin, SponsorMixin, PaginationMixin, ListView): <NEW_LINE> <INDENT> context_object_name = 'sponsors' <NEW_LINE> template_name = 'sponsor/pending-list.html' <NEW_LINE> paginate_by = 10 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(PendingSponsorListView, self).__init__() <NEW_LINE> self.project = None <NEW_LINE> self.project_slug = None <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(PendingSponsorListView, self) .get_context_data(**kwargs) <NEW_LINE> context['num_sponsors'] = self.get_queryset().count() <NEW_LINE> context['unapproved'] = True <NEW_LINE> context['project_slug'] = self.project_slug <NEW_LINE> context['project'] = self.project <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> if self.queryset is None: <NEW_LINE> <INDENT> self.project_slug = self.kwargs.get('project_slug', None) <NEW_LINE> if self.project_slug: <NEW_LINE> <INDENT> self.project = Project.objects.get(slug=self.project_slug) <NEW_LINE> queryset = Sponsor.unapproved_objects.filter( project=self.project) <NEW_LINE> return queryset <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Http404('Sorry! We could not find your sponsor!') <NEW_LINE> <DEDENT> <DEDENT> return self.queryset
List view for pending Sponsor.
625990748a43f66fc4bf3ab5
class FieldEndpoint(ListAPIMixin, CreateAPIMixin, BaseAPIView): <NEW_LINE> <INDENT> permission = 'orgs.org_surveyor' <NEW_LINE> model = ContactField <NEW_LINE> serializer_class = ContactFieldReadSerializer <NEW_LINE> write_serializer_class = ContactFieldWriteSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = self.model.objects.filter(org=self.request.user.get_org(), is_active=True) <NEW_LINE> key = self.request.query_params.get('key', None) <NEW_LINE> if key: <NEW_LINE> <INDENT> queryset = queryset.filter(key__icontains=key) <NEW_LINE> <DEDENT> return queryset
## Listing Fields A **GET** returns the list of fields for your organization. * **key** - the unique key of this field (string) (filterable: ```key```) * **label** - the display label of this field (string) * **value_type** - one of the following strings: T - Text N - Decimal Number D - Datetime S - State I - District Example: GET /api/v1/fields.json Response containing the groups for your organization: { "count": 1, "next": null, "previous": null, "results": [ { "key": "nick_name", "label": "Nick name", "value_type": "T" }, ... ] } ## Adding a Field A **POST** can be used to create a new contact field. Don't specify a key as this will be generated for you. * **label** - the display label (string) * **value_type** - one of the value type codes (string) Example: POST /api/v1/fields.json { "label": "Nick name", "value_type": "T" } You will receive a field object (with the new field key) as a response if successful: { "key": "nick_name", "label": "Nick name", "value_type": "T" } ## Updating a Field A **POST** can also be used to update an existing field if you do specify it's key. * **key** - the unique field key * **label** - the display label (string) * **value_type** - one of the value type codes (string) Example: POST /api/v1/fields.json { "key": "nick_name", "label": "New label", "value_type": "T" } You will receive the updated field object as a response if successful: { "key": "nick_name", "label": "New label", "value_type": "T" }
6259907460cbc95b063659fd
@python_2_unicode_compatible <NEW_LINE> class Map(models.Model): <NEW_LINE> <INDENT> mapname = models.CharField(max_length=64, unique=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'mapnames' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{0}'.format(self.mapname)
Map Attributes: mapname (str): Name of the map Reverse lookup attributes: matches (QuerySet): MatchResult objects for every match on the map
6259907426068e7796d4e25d
class GroupType(bb.Union): <NEW_LINE> <INDENT> _catch_all = 'other' <NEW_LINE> team = None <NEW_LINE> user_managed = None <NEW_LINE> other = None <NEW_LINE> def is_team(self): <NEW_LINE> <INDENT> return self._tag == 'team' <NEW_LINE> <DEDENT> def is_user_managed(self): <NEW_LINE> <INDENT> return self._tag == 'user_managed' <NEW_LINE> <DEDENT> def is_other(self): <NEW_LINE> <INDENT> return self._tag == 'other' <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, processor): <NEW_LINE> <INDENT> super(GroupType, self)._process_custom_annotations(annotation_type, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'GroupType(%r, %r)' % (self._tag, self._value)
The group type determines how a group is created and managed. This class acts as a tagged union. Only one of the ``is_*`` methods will return true. To get the associated value of a tag (if one exists), use the corresponding ``get_*`` method. :ivar team: A group to which team members are automatically added. Applicable to `team folders <https://www.dropbox.com/help/986>`_ only. :ivar user_managed: A group is created and managed by a user.
6259907499fddb7c1ca63a64
class LinearOperatorWithDetOne(tf.linalg.LinearOperatorFullMatrix): <NEW_LINE> <INDENT> def determinant(self): <NEW_LINE> <INDENT> return tf.convert_to_tensor(1, DTYPE) <NEW_LINE> <DEDENT> def _determinant(self): <NEW_LINE> <INDENT> return tf.convert_to_tensor(1, DTYPE) <NEW_LINE> <DEDENT> def log_abs_determinant(self): <NEW_LINE> <INDENT> return tf.convert_to_tensor(0, DTYPE) <NEW_LINE> <DEDENT> def _log_abs_determinant(self): <NEW_LINE> <INDENT> return tf.convert_to_tensor(0, DTYPE) <NEW_LINE> <DEDENT> def inverse(self): <NEW_LINE> <INDENT> return self.adjoint()
tf LinearOperator for rotations (U such that U U^T = U^T U = Id)
625990743317a56b869bf1d5
class SyncListPermissionPage(Page): <NEW_LINE> <INDENT> def __init__(self, version, response, solution): <NEW_LINE> <INDENT> super(SyncListPermissionPage, self).__init__(version, response) <NEW_LINE> self._solution = solution <NEW_LINE> <DEDENT> def get_instance(self, payload): <NEW_LINE> <INDENT> return SyncListPermissionInstance( self._version, payload, service_sid=self._solution['service_sid'], list_sid=self._solution['list_sid'], ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Twilio.Sync.V1.SyncListPermissionPage>'
PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
62599074fff4ab517ebcf13b
class HolidayUpdateView(BSModalUpdateView): <NEW_LINE> <INDENT> model = Holiday <NEW_LINE> template_name = 'holiday/update.html' <NEW_LINE> form_class = HolidayModelForm <NEW_LINE> success_message = 'Success: Holiday was deleted.' <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse_lazy('school:manage_semester', kwargs={'school_id':self.request.user.school.pk})
module doc
62599074adb09d7d5dc0be8b
class affair_state(models.Model): <NEW_LINE> <INDENT> _name = 'affair.state' <NEW_LINE> _description = 'States for affair' <NEW_LINE> name = fields.Char('Name', required=True, translate=True) <NEW_LINE> sequence = fields.Integer(string='Sequence', default=0, required=False) <NEW_LINE> description = fields.Text(string='Description') <NEW_LINE> fold = fields.Boolean(string='Folded in kanban view', default=False, help='This stage is folded in the kanban view when' 'there are no records in that stage to display.') <NEW_LINE> is_end = fields.Boolean(string='Is affair end', default=False, help='This stage is the end of the affair process. For example state "Done"') <NEW_LINE> _sql_constraints = [ ('unique_sequence', 'unique(sequence)', 'Error: There is already an other state with this sequence.'), ]
States for affair
62599074ec188e330fdfa1c5
class MASKSET(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.empty() <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> self.mask = bytearray() <NEW_LINE> self.smask = bytearray() <NEW_LINE> self.tdi = bytearray() <NEW_LINE> self.tdo = bytearray() <NEW_LINE> self.size = 0 <NEW_LINE> <DEDENT> def syncLengths( self, sawTDI, sawTDO, sawMASK, sawSMASK, newSize ): <NEW_LINE> <INDENT> if self.size == newSize: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if newSize == 0: <NEW_LINE> <INDENT> self.empty() <NEW_LINE> return <NEW_LINE> <DEDENT> if self.name == 'SIR' and not sawMASK: <NEW_LINE> <INDENT> self.mask = bytearray( newSize ) <NEW_LINE> <DEDENT> if newSize != len(self.mask): <NEW_LINE> <INDENT> self.mask = bytearray( newSize ) <NEW_LINE> if self.name == 'SDR': <NEW_LINE> <INDENT> for i in range( newSize ): <NEW_LINE> <INDENT> self.mask[i] = 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if newSize != len(self.tdo): <NEW_LINE> <INDENT> self.tdo = bytearray( newSize ) <NEW_LINE> <DEDENT> if newSize != len(self.tdi): <NEW_LINE> <INDENT> self.tdi = bytearray( newSize ) <NEW_LINE> <DEDENT> if newSize != len(self.smask): <NEW_LINE> <INDENT> self.smask = bytearray( newSize ) <NEW_LINE> <DEDENT> self.size = newSize
Class MASKSET holds a set of bit vectors, all of which are related, will all have the same length, and are associated with one of the seven shiftOps: HIR, HDR, TIR, TDR, SIR, SDR, LSDR. One of these holds a mask, smask, tdi, tdo, and a size.
6259907438b623060ffaa4e5
class PublicMessage(UserWarning): <NEW_LINE> <INDENT> def __init__(self, format=None, message=None, **kw): <NEW_LINE> <INDENT> process_message_arguments(self, format, message, **kw) <NEW_LINE> super(PublicMessage, self).__init__(self.msg) <NEW_LINE> <DEDENT> errno = 10000 <NEW_LINE> format = None <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> return dict( type=unicode(self.type), name=unicode(type(self).__name__), message=self.strerror, code=self.errno, data=self.kw, )
**10000** Base class for messages that can be forwarded in an RPC response.
6259907463b5f9789fe86a85
class IHighlightBlock(Interface): <NEW_LINE> <INDENT> pass
Description of the Example Type
6259907416aa5153ce401dfa
class GameForm(messages.Message): <NEW_LINE> <INDENT> urlsafe_key = messages.StringField(1, required=True) <NEW_LINE> game_over = messages.BooleanField(2, required=True) <NEW_LINE> game_end_date = messages.StringField(3) <NEW_LINE> message = messages.StringField(4, required=True) <NEW_LINE> x_user_name = messages.StringField(5, required=True) <NEW_LINE> o_user_name = messages.StringField(6, required=True) <NEW_LINE> moves_count = messages.IntegerField(7) <NEW_LINE> game_moves = messages.StringField(8, repeated=True)
GameForm for outbound game state information
62599074435de62698e9d728
class Monster(GameObject): <NEW_LINE> <INDENT> def __init__(self, position, character, color): <NEW_LINE> <INDENT> GameObject.__init__(self, position, character, color, True) <NEW_LINE> self.position = position <NEW_LINE> self.movement_direction = [] <NEW_LINE> for x in range(-1, 2, 1): <NEW_LINE> <INDENT> for y in range(-1, 2, 1): <NEW_LINE> <INDENT> self.movement_direction.append((x, y)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_new_position(self, direction): <NEW_LINE> <INDENT> current_x, current_y = self.position <NEW_LINE> dx, dy = direction <NEW_LINE> return current_x + dx, current_y + dy <NEW_LINE> <DEDENT> def update(self, the_game): <NEW_LINE> <INDENT> free_space_available = False <NEW_LINE> while not free_space_available: <NEW_LINE> <INDENT> index = tcod.random_get_int(0, 0, len(self.movement_direction) - 1) <NEW_LINE> x, y = self.get_new_position(self.movement_direction[index]) <NEW_LINE> if not the_game.is_wall_in_way(x, y) and not the_game.is_player_in_way(x, y): <NEW_LINE> <INDENT> old_x, old_y = self.position <NEW_LINE> tcod.console_put_char(the_game.console_map, old_x, old_y, ' ', tcod.BKGND_NONE) <NEW_LINE> self.position = (x, y) <NEW_LINE> free_space_available = True
Monster class.
62599074796e427e5385009a
class GameOverScreen(Screen): <NEW_LINE> <INDENT> pass
Mapping to the GameOverScreen declared in kv file
625990744e4d562566373d28
class Edit(dexterity.EditForm): <NEW_LINE> <INDENT> grok.context(IActa)
A standard edit form.
625990742ae34c7f260aca05
class Optimizer(BaseOptimizer): <NEW_LINE> <INDENT> def optimize_process(self): <NEW_LINE> <INDENT> for stage in self.ordered_stages: <NEW_LINE> <INDENT> if not self.process.predecessors(stage): <NEW_LINE> <INDENT> self.run_pso(stage) <NEW_LINE> <DEDENT> elif len(self.process.predecessors(stage)) == 1: <NEW_LINE> <INDENT> predecessor = self.process.predecessors(stage)[0] <NEW_LINE> stage.input_vector = predecessor.get_output_of_stage( predecessor.input_vector, predecessor.control_params) <NEW_LINE> self.run_pso(stage) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('Incorrect process structure.') <NEW_LINE> <DEDENT> if stage.optimization_status != OptimizationStatus.success: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def run_pso(self, stage): <NEW_LINE> <INDENT> pso(stage, swarmsize=self.swarm_size)
Optimizer is object which optimize process.
625990744a966d76dd5f080b
class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler): <NEW_LINE> <INDENT> def results_iter(self): <NEW_LINE> <INDENT> if self.connection.ops.oracle: <NEW_LINE> <INDENT> from django.db.models.fields import DateTimeField <NEW_LINE> fields = [DateTimeField()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> needs_string_cast = self.connection.features.needs_datetime_string_cast <NEW_LINE> <DEDENT> offset = len(self.query.extra_select) <NEW_LINE> for rows in self.execute_sql(MULTI): <NEW_LINE> <INDENT> for row in rows: <NEW_LINE> <INDENT> date = row[offset] <NEW_LINE> if self.connection.ops.oracle: <NEW_LINE> <INDENT> date = self.resolve_columns(row, fields)[offset] <NEW_LINE> <DEDENT> elif needs_string_cast: <NEW_LINE> <INDENT> date = typecast_date(str(date)) <NEW_LINE> <DEDENT> if isinstance(date, datetime.datetime): <NEW_LINE> <INDENT> date = date.date() <NEW_LINE> <DEDENT> yield date
This is overridden for GeoDjango to properly cast date columns, since `GeoQuery.resolve_columns` is used for spatial values. See #14648, #16757.
6259907497e22403b383c824
@register <NEW_LINE> class StepOutResponse(BaseSchema): <NEW_LINE> <INDENT> __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } <NEW_LINE> __refs__ = set() <NEW_LINE> __slots__ = list(__props__.keys()) + ['kwargs'] <NEW_LINE> def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): <NEW_LINE> <INDENT> self.type = 'response' <NEW_LINE> self.request_seq = request_seq <NEW_LINE> self.success = success <NEW_LINE> self.command = command <NEW_LINE> self.seq = seq <NEW_LINE> self.message = message <NEW_LINE> self.body = body <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } <NEW_LINE> if self.message is not None: <NEW_LINE> <INDENT> dct['message'] = self.message <NEW_LINE> <DEDENT> if self.body is not None: <NEW_LINE> <INDENT> dct['body'] = self.body <NEW_LINE> <DEDENT> dct.update(self.kwargs) <NEW_LINE> return dct
Response to 'stepOut' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually.
62599074be7bc26dc9252ae6
class IWSGIMiddlewareFactory(Protocol): <NEW_LINE> <INDENT> def __call__(self, app: IWSGIApp, config: Mapping = {}) -> IWSGIMiddleware: <NEW_LINE> <INDENT> ...
Defines a minimal WSGI middleware factory.
6259907432920d7e50bc7969
class EmrTerminateJobFlowOperator(BaseOperator): <NEW_LINE> <INDENT> template_fields = ['job_flow_id'] <NEW_LINE> template_ext = () <NEW_LINE> ui_color = '#f9c915' <NEW_LINE> @apply_defaults <NEW_LINE> def __init__( self, job_flow_id, aws_conn_id='s3_default', *args, **kwargs): <NEW_LINE> <INDENT> super(EmrTerminateJobFlowOperator, self).__init__(*args, **kwargs) <NEW_LINE> self.job_flow_id = job_flow_id <NEW_LINE> self.aws_conn_id = aws_conn_id <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> emr = EmrHook(aws_conn_id=self.aws_conn_id).get_conn() <NEW_LINE> self.log.info('Terminating JobFlow %s', self.job_flow_id) <NEW_LINE> response = emr.terminate_job_flows(JobFlowIds=[self.job_flow_id]) <NEW_LINE> if not response['ResponseMetadata']['HTTPStatusCode'] == 200: <NEW_LINE> <INDENT> raise AirflowException('JobFlow termination failed: %s' % response) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.info('JobFlow with id %s terminated', self.job_flow_id)
Operator to terminate EMR JobFlows. :param job_flow_id: id of the JobFlow to terminate. (templated) :type job_flow_id: str :param aws_conn_id: aws connection to uses :type aws_conn_id: str
625990741f5feb6acb164515
class Notification(SoftDeletionModel): <NEW_LINE> <INDENT> __tablename__ = 'notifications' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE')) <NEW_LINE> user = db.relationship('User', backref='notifications', foreign_keys=[user_id]) <NEW_LINE> title = db.Column(db.String) <NEW_LINE> message = db.Column(db.Text) <NEW_LINE> received_at = db.Column(db.DateTime(timezone=True)) <NEW_LINE> is_read = db.Column(db.Boolean) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Notif %s:%s>' % (self.user, self.title)
Model for storing user notifications.
62599074b7558d5895464bc4
class FirstHeightGreaterThan: <NEW_LINE> <INDENT> def __init__(self, h): <NEW_LINE> <INDENT> self.h = h <NEW_LINE> <DEDENT> def __call__(self, p): <NEW_LINE> <INDENT> return p.height > self.h
Search criterium to a person with a height greater than h meters as a functor.
625990745fc7496912d48efa
class ResultSet(object): <NEW_LINE> <INDENT> def __init__(self, model, url, json): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.url = url <NEW_LINE> self.json = json <NEW_LINE> self.index = -1 <NEW_LINE> self.total_count = self.json['meta']['total_count'] <NEW_LINE> self.limit = self.json['meta']['limit'] <NEW_LINE> self.offset = self.json['meta']['offset'] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.json['objects']) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if 'include_metadata' in self.model.__init__.__code__.co_varnames: <NEW_LINE> <INDENT> return self.model(self.json['objects'][key], include_metadata=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.model(self.json['objects'][key]) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> self.index += 1 <NEW_LINE> if self.index >= len(self): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if 'include_metadata' in self.model.__init__.__code__.co_varnames: <NEW_LINE> <INDENT> return self.model(self.json['objects'][self.index], include_metadata=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.model(self.json['objects'][self.index])
Abstraction to represent JSON returned by MyTardis API which includes a list of records and some meta information e.g. whether there are additional pages of records to retrieve.
62599074a8370b77170f1ced
class ResyncState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> NONE = "None" <NEW_LINE> PREPARED_FOR_RESYNCHRONIZATION = "PreparedForResynchronization" <NEW_LINE> STARTED_RESYNCHRONIZATION = "StartedResynchronization"
The resync state.
6259907499cbb53fe683280c
class Selection(object): <NEW_LINE> <INDENT> def __init__(self, table, where=None, start=None, stop=None, **kwargs): <NEW_LINE> <INDENT> self.table = table <NEW_LINE> self.where = where <NEW_LINE> self.start = start <NEW_LINE> self.stop = stop <NEW_LINE> self.condition = None <NEW_LINE> self.filter = None <NEW_LINE> self.terms = None <NEW_LINE> self.coordinates = None <NEW_LINE> if com.is_list_like(where): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> inferred = lib.infer_dtype(where) <NEW_LINE> if inferred == 'integer' or inferred == 'boolean': <NEW_LINE> <INDENT> where = np.asarray(where) <NEW_LINE> if where.dtype == np.bool_: <NEW_LINE> <INDENT> start, stop = self.start, self.stop <NEW_LINE> if start is None: <NEW_LINE> <INDENT> start = 0 <NEW_LINE> <DEDENT> if stop is None: <NEW_LINE> <INDENT> stop = self.table.nrows <NEW_LINE> <DEDENT> self.coordinates = np.arange(start, stop)[where] <NEW_LINE> <DEDENT> elif issubclass(where.dtype.type, np.integer): <NEW_LINE> <INDENT> if ((self.start is not None and (where < self.start).any()) or (self.stop is not None and (where >= self.stop).any())): <NEW_LINE> <INDENT> raise ValueError( "where must have index locations >= start and " "< stop" ) <NEW_LINE> <DEDENT> self.coordinates = where <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if self.coordinates is None: <NEW_LINE> <INDENT> self.terms = self.generate(where) <NEW_LINE> if self.terms is not None: <NEW_LINE> <INDENT> self.condition, self.filter = self.terms.evaluate() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def generate(self, where): <NEW_LINE> <INDENT> if where is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> q = self.table.queryables() <NEW_LINE> try: <NEW_LINE> <INDENT> return Expr(where, queryables=q, encoding=self.table.encoding) <NEW_LINE> <DEDENT> except NameError as detail: <NEW_LINE> <INDENT> raise ValueError( "The passed where expression: {0}\n" " contains an invalid variable reference\n" " all of the variable refrences must be a " "reference to\n" " an axis (e.g. 'index' or 'columns'), or a " "data_column\n" " The currently defined references are: {1}\n" .format(where, ','.join(q.keys())) ) <NEW_LINE> <DEDENT> <DEDENT> def select(self): <NEW_LINE> <INDENT> if self.condition is not None: <NEW_LINE> <INDENT> return self.table.table.read_where(self.condition.format(), start=self.start, stop=self.stop) <NEW_LINE> <DEDENT> elif self.coordinates is not None: <NEW_LINE> <INDENT> return self.table.table.read_coordinates(self.coordinates) <NEW_LINE> <DEDENT> return self.table.table.read(start=self.start, stop=self.stop) <NEW_LINE> <DEDENT> def select_coords(self): <NEW_LINE> <INDENT> start, stop = self.start, self.stop <NEW_LINE> nrows = self.table.nrows <NEW_LINE> if start is None: <NEW_LINE> <INDENT> start = 0 <NEW_LINE> <DEDENT> elif start < 0: <NEW_LINE> <INDENT> start += nrows <NEW_LINE> <DEDENT> if self.stop is None: <NEW_LINE> <INDENT> stop = nrows <NEW_LINE> <DEDENT> elif stop < 0: <NEW_LINE> <INDENT> stop += nrows <NEW_LINE> <DEDENT> if self.condition is not None: <NEW_LINE> <INDENT> return self.table.table.get_where_list(self.condition.format(), start=start, stop=stop, sort=True) <NEW_LINE> <DEDENT> elif self.coordinates is not None: <NEW_LINE> <INDENT> return self.coordinates <NEW_LINE> <DEDENT> return np.arange(start, stop)
Carries out a selection operation on a tables.Table object. Parameters ---------- table : a Table object where : list of Terms (or convertable to) start, stop: indicies to start and/or stop selection
625990748a43f66fc4bf3ab7
class Cloudpipe(extensions.ExtensionDescriptor): <NEW_LINE> <INDENT> name = "Cloudpipe" <NEW_LINE> alias = "os-cloudpipe" <NEW_LINE> namespace = "http://docs.openstack.org/compute/ext/cloudpipe/api/v1.1" <NEW_LINE> updated = "2011-12-16T00:00:00+00:00" <NEW_LINE> admin_only = True <NEW_LINE> def get_resources(self): <NEW_LINE> <INDENT> resources = [] <NEW_LINE> res = extensions.ResourceExtension('os-cloudpipe', CloudpipeController()) <NEW_LINE> resources.append(res) <NEW_LINE> return resources
Adds actions to create cloudpipe instances. When running with the Vlan network mode, you need a mechanism to route from the public Internet to your vlans. This mechanism is known as a cloudpipe. At the time of creating this class, only OpenVPN is supported. Support for a SSH Bastion host is forthcoming.
62599074f9cc0f698b1c5f5c
class Response: <NEW_LINE> <INDENT> def __init__(self, response, request): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> self.status = response.status <NEW_LINE> http_body = response.read() <NEW_LINE> self.body = None <NEW_LINE> self.__headers = response.headers <NEW_LINE> self.request_id = self.__headers.get("X-Request-Id") <NEW_LINE> self.date = self.__headers.get("Date") <NEW_LINE> self.rate_limit = parse_int_header(self.__headers, "X-RateLimit-Limit") <NEW_LINE> self.rate_limit_remaining = parse_int_header( self.__headers, "X-RateLimit-Remaining" ) <NEW_LINE> self.rate_limit_reset = parse_datetime_header( self.__headers, "X-RateLimit-Reset" ) <NEW_LINE> self.content_type = self.__headers.get("Content-Type", "").split(";")[0] <NEW_LINE> self.proxy_metadata = { "server": self.__headers.get("Server"), "cf-ray": self.__headers.get("CF-RAY"), } <NEW_LINE> self.total_records = parse_int_header(self.__headers, "Recurly-Total-Records") <NEW_LINE> if http_body and len(http_body) > 0: <NEW_LINE> <INDENT> self.body = http_body
Class representing a response from Recurly
625990745fdd1c0f98e5f8a0
class token_types(lexer_token_types): <NEW_LINE> <INDENT> pass
Token Types which are being used during the parsing process.
62599074460517430c432ce9
class ViewBuilder(object): <NEW_LINE> <INDENT> def __init__(self, base_url, project_id=""): <NEW_LINE> <INDENT> self.base_url = base_url <NEW_LINE> self.project_id = project_id <NEW_LINE> <DEDENT> def _format_dates(self, image): <NEW_LINE> <INDENT> for attr in ['created_at', 'updated_at', 'deleted_at']: <NEW_LINE> <INDENT> if image.get(attr) is not None: <NEW_LINE> <INDENT> image[attr] = image[attr].strftime('%Y-%m-%dT%H:%M:%SZ') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _format_status(self, image): <NEW_LINE> <INDENT> status_mapping = { 'active': 'ACTIVE', 'queued': 'SAVING', 'saving': 'SAVING', 'deleted': 'DELETED', 'pending_delete': 'DELETED', 'killed': 'ERROR', } <NEW_LINE> try: <NEW_LINE> <INDENT> image['status'] = status_mapping[image['status']] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> image['status'] = 'UNKNOWN' <NEW_LINE> <DEDENT> <DEDENT> def _build_server(self, image, image_obj): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def generate_href(self, image_id): <NEW_LINE> <INDENT> return os.path.join(self.base_url, "images", str(image_id)) <NEW_LINE> <DEDENT> def build_list(self, image_objs, detail=False, **kwargs): <NEW_LINE> <INDENT> images = [] <NEW_LINE> for image_obj in image_objs: <NEW_LINE> <INDENT> image = self.build(image_obj, detail=detail) <NEW_LINE> images.append(image) <NEW_LINE> <DEDENT> return dict(images=images) <NEW_LINE> <DEDENT> def build(self, image_obj, detail=False): <NEW_LINE> <INDENT> self._format_dates(image_obj) <NEW_LINE> if "status" in image_obj: <NEW_LINE> <INDENT> self._format_status(image_obj) <NEW_LINE> <DEDENT> image = { "id": image_obj.get("id"), "name": image_obj.get("name"), } <NEW_LINE> self._build_server(image, image_obj) <NEW_LINE> self._build_image_id(image, image_obj) <NEW_LINE> if detail: <NEW_LINE> <INDENT> image.update({ "created": image_obj.get("created_at"), "updated": image_obj.get("updated_at"), "status": image_obj.get("status"), }) <NEW_LINE> if image["status"].upper() == "ACTIVE": <NEW_LINE> <INDENT> image["progress"] = 100 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> image["progress"] = 0 <NEW_LINE> <DEDENT> <DEDENT> return image
Base class for generating responses to OpenStack API image requests.
6259907499cbb53fe683280d
class Bundle: <NEW_LINE> <INDENT> def __init__(self, n, rmax, m): <NEW_LINE> <INDENT> self.n = n <NEW_LINE> self.rmax = float(rmax) <NEW_LINE> self.m = m <NEW_LINE> self.rays = [] <NEW_LINE> <DEDENT> def rtuniform(self): <NEW_LINE> <INDENT> for i in range(0, self.n+1): <NEW_LINE> <INDENT> radius = (i * self.rmax)/self.n <NEW_LINE> angle = 0.0 <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> yield radius, angle <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for x in range(0, self.m*i): <NEW_LINE> <INDENT> angle = angle + (2.0*np.pi)/(self.m*i) <NEW_LINE> yield radius, angle <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def getBundle(self, ray): <NEW_LINE> <INDENT> rayPosition = ray.p() <NEW_LINE> rayDirection = ray.k() <NEW_LINE> for r, t in self.rtuniform(): <NEW_LINE> <INDENT> point = [(r * np.cos(t)) + rayPosition[0], (r * np.sin(t)) + rayPosition[1], rayPosition[2]] <NEW_LINE> ray = Ray(point, rayDirection) <NEW_LINE> self.rays.append(ray) <NEW_LINE> <DEDENT> return self.rays
A class which allows a collection of ray objects to be created with position and direction vectors
625990744f6381625f19a13b
class Schedule(object): <NEW_LINE> <INDENT> EMPTY_SCHEDULE = [] <NEW_LINE> def __init__(self, tasks): <NEW_LINE> <INDENT> self.tasks = sorted(tasks, key=lambda task: task.getScheduledTime()) <NEW_LINE> <DEDENT> def getScheduledTasksForInterval(self, intervalStart, intervalEnd): <NEW_LINE> <INDENT> return [task for task in self.tasks if intervalStart <= task.getScheduledTime() < intervalEnd] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return reprHelper(self) .add("tasks", self.tasks) .build() <NEW_LINE> <DEDENT> def approxEquals(self, other, epsilon): <NEW_LINE> <INDENT> foundTasks = 0 <NEW_LINE> for task in self.tasks: <NEW_LINE> <INDENT> time = task.getScheduledTime() <NEW_LINE> tasksAtSameTime = other.getScheduledTasksForInterval(time - epsilon, time + epsilon) <NEW_LINE> foundMatch = False <NEW_LINE> for otherTask in tasksAtSameTime: <NEW_LINE> <INDENT> if task.getTask() == otherTask.getTask(): <NEW_LINE> <INDENT> if task.getMachine() != otherTask.getMachine(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> foundMatch = True <NEW_LINE> foundTasks += 1 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not foundMatch: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if foundTasks < len(other.tasks): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
A schedule of tasks produced by a scheduler. Users can ask for all the tasks that are scheduled in any interval.
62599074796e427e5385009c
class BaseDiscriminator(BaseModel): <NEW_LINE> <INDENT> def __init__(self, ndf, loss_type, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.ndf = ndf <NEW_LINE> self.loss_type = loss_type <NEW_LINE> <DEDENT> def _train_step_implementation( self, real_batch, netG=None, optD=None): <NEW_LINE> <INDENT> output_real = self._infer_step_implementation(real_batch) <NEW_LINE> fake_images = netG._infer_step_implementation(real_batch) <NEW_LINE> fake_images = F.zero_grad(fake_images) <NEW_LINE> output_fake = self._infer_step_implementation(fake_images) <NEW_LINE> errD = self.compute_gan_loss(output_real=output_real, output_fake=output_fake) <NEW_LINE> D_x, D_Gz = self.compute_probs(output_real=output_real, output_fake=output_fake) <NEW_LINE> optD.zero_grad() <NEW_LINE> optD.backward(errD) <NEW_LINE> optD.step() <NEW_LINE> return errD, D_x, D_Gz <NEW_LINE> <DEDENT> def _infer_step_implementation(self, batch): <NEW_LINE> <INDENT> return self.forward(batch) <NEW_LINE> <DEDENT> def compute_gan_loss(self, output_real, output_fake): <NEW_LINE> <INDENT> if self.loss_type == "gan" or self.loss_type == "ns": <NEW_LINE> <INDENT> errD = losses.minimax_loss_dis(output_fake=output_fake, output_real=output_real) <NEW_LINE> <DEDENT> elif self.loss_type == "wasserstein": <NEW_LINE> <INDENT> errD = losses.wasserstein_loss_dis(output_fake=output_fake, output_real=output_real) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid loss_type selected.") <NEW_LINE> <DEDENT> return errD <NEW_LINE> <DEDENT> def compute_probs(self, output_real, output_fake): <NEW_LINE> <INDENT> D_x = F.sigmoid(output_real).mean() <NEW_LINE> D_Gz = F.sigmoid(output_fake).mean() <NEW_LINE> return D_x, D_Gz
Base class for a generic unconditional discriminator model. Attributes: ndf (int): Variable controlling discriminator feature map sizes. loss_type (str): Name of loss to use for GAN loss.
6259907491f36d47f2231b20
class DependencyMapper(CSECachingMapperMixin, Collector): <NEW_LINE> <INDENT> def __init__(self, include_subscripts=True, include_lookups=True, include_calls=True, include_cses=False, composite_leaves=None): <NEW_LINE> <INDENT> if composite_leaves is False: <NEW_LINE> <INDENT> include_subscripts = False <NEW_LINE> include_lookups = False <NEW_LINE> include_calls = False <NEW_LINE> <DEDENT> if composite_leaves is True: <NEW_LINE> <INDENT> include_subscripts = True <NEW_LINE> include_lookups = True <NEW_LINE> include_calls = True <NEW_LINE> <DEDENT> assert include_calls in [True, False, "descend_args"] <NEW_LINE> self.include_subscripts = include_subscripts <NEW_LINE> self.include_lookups = include_lookups <NEW_LINE> self.include_calls = include_calls <NEW_LINE> self.include_cses = include_cses <NEW_LINE> <DEDENT> def map_variable(self, expr): <NEW_LINE> <INDENT> return {expr} <NEW_LINE> <DEDENT> def map_call(self, expr): <NEW_LINE> <INDENT> if self.include_calls == "descend_args": <NEW_LINE> <INDENT> return self.combine( [self.rec(child) for child in expr.parameters]) <NEW_LINE> <DEDENT> elif self.include_calls: <NEW_LINE> <INDENT> return {expr} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().map_call(expr) <NEW_LINE> <DEDENT> <DEDENT> def map_call_with_kwargs(self, expr): <NEW_LINE> <INDENT> if self.include_calls == "descend_args": <NEW_LINE> <INDENT> return self.combine( [self.rec(child) for child in expr.parameters] + [self.rec(val) for name, val in expr.kw_parameters.items()] ) <NEW_LINE> <DEDENT> elif self.include_calls: <NEW_LINE> <INDENT> return {expr} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().map_call_with_kwargs(expr) <NEW_LINE> <DEDENT> <DEDENT> def map_lookup(self, expr): <NEW_LINE> <INDENT> if self.include_lookups: <NEW_LINE> <INDENT> return {expr} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().map_lookup(expr) <NEW_LINE> <DEDENT> <DEDENT> def map_subscript(self, expr): <NEW_LINE> <INDENT> if self.include_subscripts: <NEW_LINE> <INDENT> return {expr} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().map_subscript(expr) <NEW_LINE> <DEDENT> <DEDENT> def map_common_subexpression_uncached(self, expr): <NEW_LINE> <INDENT> if self.include_cses: <NEW_LINE> <INDENT> return {expr} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Collector.map_common_subexpression(self, expr) <NEW_LINE> <DEDENT> <DEDENT> def map_slice(self, expr): <NEW_LINE> <INDENT> return self.combine( [self.rec(child) for child in expr.children if child is not None])
Maps an expression to the :class:`set` of expressions it is based on. The ``include_*`` arguments to the constructor determine which types of objects occur in this output set. If all are *False*, only :class:`pymbolic.primitives.Variable` instances are included.
62599074e5267d203ee6d04f
class Output: <NEW_LINE> <INDENT> def __init__(self, output): <NEW_LINE> <INDENT> self.beta = output[0] <NEW_LINE> self.sd_beta = output[1] <NEW_LINE> self.cov_beta = output[2] <NEW_LINE> if len(output) == 4: <NEW_LINE> <INDENT> self.__dict__.update(output[3]) <NEW_LINE> self.stopreason = _report_error(self.info) <NEW_LINE> <DEDENT> <DEDENT> def pprint(self): <NEW_LINE> <INDENT> print('Beta:', self.beta) <NEW_LINE> print('Beta Std Error:', self.sd_beta) <NEW_LINE> print('Beta Covariance:', self.cov_beta) <NEW_LINE> if hasattr(self, 'info'): <NEW_LINE> <INDENT> print('Residual Variance:',self.res_var) <NEW_LINE> print('Inverse Condition #:', self.inv_condnum) <NEW_LINE> print('Reason(s) for Halting:') <NEW_LINE> for r in self.stopreason: <NEW_LINE> <INDENT> print(' %s' % r)
The Output class stores the output of an ODR run. Attributes ---------- beta : ndarray Estimated parameter values, of shape (q,). sd_beta : ndarray Standard deviations of the estimated parameters, of shape (p,). cov_beta : ndarray Covariance matrix of the estimated parameters, of shape (p,p). delta : ndarray, optional Array of estimated errors in input variables, of same shape as `x`. eps : ndarray, optional Array of estimated errors in response variables, of same shape as `y`. xplus : ndarray, optional Array of ``x + delta``. y : ndarray, optional Array ``y = fcn(x + delta)``. res_var : float, optional Residual variance. sum_square : float, optional Sum of squares error. sum_square_delta : float, optional Sum of squares of delta error. sum_square_eps : float, optional Sum of squares of eps error. inv_condnum : float, optional Inverse condition number (cf. ODRPACK UG p. 77). rel_error : float, optional Relative error in function values computed within fcn. work : ndarray, optional Final work array. work_ind : dict, optional Indices into work for drawing out values (cf. ODRPACK UG p. 83). info : int, optional Reason for returning, as output by ODRPACK (cf. ODRPACK UG p. 38). stopreason : list of str, optional `info` interpreted into English. Notes ----- Takes one argument for initialization, the return value from the function `~scipy.odr.odr`. The attributes listed as "optional" above are only present if `~scipy.odr.odr` was run with ``full_output=1``.
6259907471ff763f4b5e90cd
class Resource(mongoengine.EmbeddedDocument): <NEW_LINE> <INDENT> meta = dict(allow_inheritance=True) <NEW_LINE> name = fields.StringField(required=True)
The image store file access abstraction.
6259907466673b3332c31d22
class EditVehicleSubclass(View, LoginRequiredMixin): <NEW_LINE> <INDENT> template = 'vehicle/edit_vehicle_subclass.html' <NEW_LINE> context = {} <NEW_LINE> form_class = None <NEW_LINE> @staticmethod <NEW_LINE> def determine_subclass(listing, vehicle_type, new): <NEW_LINE> <INDENT> if new: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if vehicle_type == veh_const.SEDAN: <NEW_LINE> <INDENT> return listing.vehicle.sedan <NEW_LINE> <DEDENT> elif vehicle_type == veh_const.TRUCK: <NEW_LINE> <INDENT> return listing.vehicle.truck <NEW_LINE> <DEDENT> elif vehicle_type == veh_const.COUPE: <NEW_LINE> <INDENT> return listing.vehicle.coupe <NEW_LINE> <DEDENT> return listing.vehicle.suv <NEW_LINE> <DEDENT> @transaction.atomic <NEW_LINE> def get(self, request, **kwargs): <NEW_LINE> <INDENT> listing = acc_models.List.objects.get(pk=kwargs['listing']) <NEW_LINE> subclass = self.determine_subclass(listing, kwargs['vehicle_type'], kwargs['new']) <NEW_LINE> form = self.form_class(instance=subclass) <NEW_LINE> self.context['listing'] = kwargs['listing'] <NEW_LINE> self.context['form'] = form <NEW_LINE> return render(request, self.template, self.context) <NEW_LINE> <DEDENT> @transaction.atomic <NEW_LINE> def post(self, request, **kwargs): <NEW_LINE> <INDENT> listing = acc_models.List.objects.get(pk=kwargs['listing']) <NEW_LINE> subclass = self.determine_subclass(listing, kwargs['vehicle_type'], kwargs['new']) <NEW_LINE> form = self.form_class(request.POST, instance=subclass) <NEW_LINE> self.context['listing'] = kwargs['listing'] <NEW_LINE> self.context['form'] = form <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> sub_vehicle = form.save(commit=False) <NEW_LINE> sub_vehicle.vehicle = listing.vehicle <NEW_LINE> sub_vehicle.save() <NEW_LINE> return redirect(reverse('account:edit_listing', kwargs={'listing': kwargs['listing']})) <NEW_LINE> <DEDENT> messages.error(request, main_const.ERROR_MESSAGE) <NEW_LINE> return render(request, self.template, self.context)
This class based view will handle updating vehicle subclasses
62599074d268445f2663a7ef
class Probe(object): <NEW_LINE> <INDENT> level = None <NEW_LINE> name = None <NEW_LINE> last_measure = {} <NEW_LINE> last_measure_time = {} <NEW_LINE> home = None <NEW_LINE> def __init__(self, options): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def set_home(self, home): <NEW_LINE> <INDENT> self.home = home <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> if self.name is not None: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> m = re.search(r'^probe_(\w+)$', self.__class__.__name__.lower()) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> return m.group(1) <NEW_LINE> <DEDENT> logger.error("Could not get the name of the probe") <NEW_LINE> return None <NEW_LINE> <DEDENT> def get_last_measure(self, key): <NEW_LINE> <INDENT> row = db.get_last_measure( self.home, 'monitoring.db', key ) <NEW_LINE> if row: <NEW_LINE> <INDENT> return dict(time=row[0], data=json.loads(row[1])) <NEW_LINE> <DEDENT> <DEDENT> def upsert_last_measure(self, time, key, data): <NEW_LINE> <INDENT> db.upsert_last_measure( self.home, 'monitoring.db', time, key, data ) <NEW_LINE> <DEDENT> def delta(self, key, current_values): <NEW_LINE> <INDENT> current_time = time.time() <NEW_LINE> store_key = self.get_name() + key <NEW_LINE> last_measure = self.get_last_measure(store_key) <NEW_LINE> delta = (None, None) <NEW_LINE> delta_value = None <NEW_LINE> try: <NEW_LINE> <INDENT> if last_measure: <NEW_LINE> <INDENT> delta_time = current_time - last_measure['time'] <NEW_LINE> delta_values = {} <NEW_LINE> for k in current_values.keys(): <NEW_LINE> <INDENT> delta_value = current_values[k] - last_measure['data'][k] <NEW_LINE> if delta_value < 0: <NEW_LINE> <INDENT> raise Exception('Negative delta value.') <NEW_LINE> <DEDENT> delta_values[k] = delta_value <NEW_LINE> <DEDENT> delta = (delta_time, delta_values) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> delta = (None, None) <NEW_LINE> <DEDENT> self.upsert_last_measure( current_time, store_key, current_values ) <NEW_LINE> return delta <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.get_name()
Base class for all plugins.
62599074627d3e7fe0e087ab
class Error(Exception): <NEW_LINE> <INDENT> pass
general error exception
625990747047854f46340cdc
class IPQueue(object): <NEW_LINE> <INDENT> def __init__(self, maxlen=200, ttl=360): <NEW_LINE> <INDENT> self._ips = deque() <NEW_LINE> self._counter = dict() <NEW_LINE> self._last_update = dict() <NEW_LINE> self._maxlen = maxlen <NEW_LINE> self._ttl = float(ttl) <NEW_LINE> self._lock = threading.RLock() <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> odict = self.__dict__.copy() <NEW_LINE> del odict['_lock'] <NEW_LINE> return odict <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.__dict__.update(state) <NEW_LINE> <DEDENT> def append(self, ip): <NEW_LINE> <INDENT> self._lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> if ip not in self._ips: <NEW_LINE> <INDENT> self._ips.appendleft(ip) <NEW_LINE> self._counter[ip] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ips.remove(ip) <NEW_LINE> self._ips.appendleft(ip) <NEW_LINE> self._counter[ip] += 1 <NEW_LINE> <DEDENT> self._last_update[ip] = time.time() <NEW_LINE> if len(self._ips) > self._maxlen: <NEW_LINE> <INDENT> ip = self._ips.pop() <NEW_LINE> del self._counter[ip] <NEW_LINE> del self._last_update[ip] <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self._lock.release() <NEW_LINE> <DEDENT> <DEDENT> def _discard_if_old(self, ip): <NEW_LINE> <INDENT> updated = self._last_update.get(ip) <NEW_LINE> if updated is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if time.time() - updated > self._ttl: <NEW_LINE> <INDENT> self.remove(ip) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def _discard_old_ips(self): <NEW_LINE> <INDENT> index = len(self._ips) - 1 <NEW_LINE> while index >= 0: <NEW_LINE> <INDENT> ip = self._ips[index] <NEW_LINE> if not self._discard_if_old(ip): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> index -= 1 <NEW_LINE> <DEDENT> <DEDENT> def count(self, ip): <NEW_LINE> <INDENT> self._discard_if_old(ip) <NEW_LINE> return self._counter.get(ip, 0) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> self._discard_old_ips() <NEW_LINE> return len(self._ips) <NEW_LINE> <DEDENT> def __contains__(self, ip): <NEW_LINE> <INDENT> self._discard_if_old(ip) <NEW_LINE> return ip in self._ips <NEW_LINE> <DEDENT> def remove(self, ip): <NEW_LINE> <INDENT> self._lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self._ips.remove(ip) <NEW_LINE> del self._counter[ip] <NEW_LINE> del self._last_update[ip] <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._lock.release()
IP Queue that keeps a counter for each IP. When an IP comes in, it's append in the left and the counter initialized to 1. If the IP is already in the queue, its counter is incremented, and it's moved back to the left. When the queue is full, the right element is discarded. Elements that are too old gets discarded, so this works also for low traffic applications.
625990747b25080760ed8976
class CategoryView(GenericAPIView): <NEW_LINE> <INDENT> queryset = GoodsCategory.objects.all() <NEW_LINE> def get(self, request, pk=None): <NEW_LINE> <INDENT> ret = { 'cat1': '', 'cat2': '', 'cat3': '', } <NEW_LINE> category = self.get_object() <NEW_LINE> if category.parent is None: <NEW_LINE> <INDENT> ret['cat1'] = ChannelSerializer(category.goodschannel_set.all()[0]).data <NEW_LINE> <DEDENT> elif category.goodscategory_set.count() == 0: <NEW_LINE> <INDENT> ret['cat3'] = CategorySerializer(category).data <NEW_LINE> cat2 = category.parent <NEW_LINE> ret['cat2'] = CategorySerializer(cat2).data <NEW_LINE> ret['cat1'] = ChannelSerializer( cat2.parent.goodschannel_set.all()[0]).data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret['cat2'] = CategorySerializer(category).data <NEW_LINE> ret['cat1'] = ChannelSerializer( category.parent.goodschannel_set.all()[0]).data <NEW_LINE> <DEDENT> return Response(ret)
商品列表页面包屑导航
62599074167d2b6e312b8222
class EnergyBand(): <NEW_LINE> <INDENT> def __init__(self, lo, hi, token): <NEW_LINE> <INDENT> self._lo = lo <NEW_LINE> self._hi = hi <NEW_LINE> self._token = token <NEW_LINE> <DEDENT> @property <NEW_LINE> def lo(self): <NEW_LINE> <INDENT> return self._lo <NEW_LINE> <DEDENT> @property <NEW_LINE> def hi(self): <NEW_LINE> <INDENT> return self._hi <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> return self._token
Something to hold all the energy band specific stuff The energy band is defined by the low energy cutoff, high energy cutoff, and a label to be use eg when plotting. >>> broad = EnergyBand(0.5, 7.0, "B")
625990744e4d562566373d2b
@attr.s <NEW_LINE> class InstanceLocationConfig: <NEW_LINE> <INDENT> host = attr.ib(type=str) <NEW_LINE> port = attr.ib(type=int)
The host and port to talk to an instance via HTTP replication.
6259907423849d37ff8529db
class ConfigHelper: <NEW_LINE> <INDENT> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> def load(self, config_variable, config_file, suite): <NEW_LINE> <INDENT> config_data = os.environ.get(config_variable, None) <NEW_LINE> if not config_data: <NEW_LINE> <INDENT> log.info("Loading %s config from %s", suite, config_file) <NEW_LINE> with open(config_file, "rb") as file_: <NEW_LINE> <INDENT> config_data = file_.read() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.info("Loading %s config from %s", suite, config_variable) <NEW_LINE> <DEDENT> self.context.suite = suite <NEW_LINE> config = self._variable_substitution( yaml.load( os.path.expandvars(config_data), Loader=yaml.FullLoader ) ) <NEW_LINE> if not self._validate_config_base(config): <NEW_LINE> <INDENT> raise ValueError("Invalid config") <NEW_LINE> <DEDENT> self.context.config = config["suites"].get(suite) <NEW_LINE> <DEDENT> def _variable_substitution(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, dict): <NEW_LINE> <INDENT> for key in list(obj.keys()): <NEW_LINE> <INDENT> obj[self._variable_substitution(key)] = self._variable_substitution(obj.pop(key)) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(obj, list): <NEW_LINE> <INDENT> for index, item in enumerate(obj): <NEW_LINE> <INDENT> obj[index] = self._variable_substitution(item) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(obj, str) and re.match(r"^\@[a-zA-Z_][a-zA-Z0-9_]*$", obj) and obj[1:] in os.environ: <NEW_LINE> <INDENT> return os.environ[obj[1:]] <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def _validate_config_base(self, config): <NEW_LINE> <INDENT> if config.get(constants.CONFIG_VERSION_KEY, 0) != constants.CURRENT_CONFIG_VERSION: <NEW_LINE> <INDENT> log.error("Invalid config version") <NEW_LINE> return False <NEW_LINE> <DEDENT> if "suites" not in config: <NEW_LINE> <INDENT> log.error("Suites are not defined") <NEW_LINE> return False <NEW_LINE> <DEDENT> if not config["suites"].get(self.context.suite, None): <NEW_LINE> <INDENT> log.error("Suite is not defined: %s", self.context.suite) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fill_config(data_obj): <NEW_LINE> <INDENT> data_obj.insert( len(data_obj), constants.CONFIG_VERSION_KEY, constants.CURRENT_CONFIG_VERSION ) <NEW_LINE> data_obj.insert(len(data_obj), "suites", CommentedMap(), comment="Test suites")
Parses config
625990744f88993c371f11b3
class RemoveGroupTask(MinionCmdTask): <NEW_LINE> <INDENT> PARAMS = MinionCmdTask.PARAMS <NEW_LINE> def __init__(self, job): <NEW_LINE> <INDENT> super(RemoveGroupTask, self).__init__(job) <NEW_LINE> self.cmd = TaskTypes.TYPE_REMOVE_GROUP <NEW_LINE> self.type = TaskTypes.TYPE_REMOVE_GROUP <NEW_LINE> <DEDENT> def execute(self, processor): <NEW_LINE> <INDENT> if self.group not in storage.groups: <NEW_LINE> <INDENT> raise JobBrokenError( 'Group {group_id} is not found in storage'.format( group_id=self.group, ) ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> minion_response = processor.minions.remove_group( self.host, self.params ) <NEW_LINE> <DEDENT> except RuntimeError as e: <NEW_LINE> <INDENT> raise RetryError(self.attempts, e) <NEW_LINE> <DEDENT> self._set_minion_task_parameters(minion_response)
Minion task to remove storage group Current implementation just renames the backend base path so that automatic configuration could skip backend when the node is being started.
6259907497e22403b383c828
class City(Base): <NEW_LINE> <INDENT> __tablename__ = 'cities' <NEW_LINE> id = Column(Integer, primary_key=True, autoincrement=True) <NEW_LINE> name = Column(String(128), nullable=False) <NEW_LINE> state_id = Column(Integer, ForeignKey('states.id'), nullable=False)
Represents a city
62599074cc0a2c111447c763
class BreweryApiMixin(APIView): <NEW_LINE> <INDENT> serializer_class = serializers.BrewerySerializer <NEW_LINE> permission_classes = (IsAuthenticated, permissions.IsMemberOfBrewingCompany) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return models.Brewery.objects.filter( company__group__user=self.request.user)
Common REST API view information for ``Brewery`` model.
625990743346ee7daa3382f2
class Pwd(Command): <NEW_LINE> <INDENT> def run(self, input_stream, env): <NEW_LINE> <INDENT> return CommandResult(env.get_cur_dir())
команда pwd отображает текущую рабочую директорию например, pwd
625990744f6381625f19a13c
class I18nBuilder(Builder): <NEW_LINE> <INDENT> name = 'i18n' <NEW_LINE> versioning_method = 'text' <NEW_LINE> versioning_compare = None <NEW_LINE> use_message_catalog = False <NEW_LINE> def init(self): <NEW_LINE> <INDENT> super().init() <NEW_LINE> self.env.set_versioning_method(self.versioning_method, self.env.config.gettext_uuid) <NEW_LINE> self.tags = I18nTags() <NEW_LINE> self.catalogs = defaultdict(Catalog) <NEW_LINE> <DEDENT> def get_target_uri(self, docname, typ=None): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def get_outdated_docs(self): <NEW_LINE> <INDENT> return self.env.found_docs <NEW_LINE> <DEDENT> def prepare_writing(self, docnames): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def compile_catalogs(self, catalogs, message): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def write_doc(self, docname, doctree): <NEW_LINE> <INDENT> catalog = self.catalogs[docname_to_domain(docname, self.config.gettext_compact)] <NEW_LINE> for toctree in self.env.tocs[docname].traverse(addnodes.toctree): <NEW_LINE> <INDENT> for node, msg in extract_messages(toctree): <NEW_LINE> <INDENT> node.uid = '' <NEW_LINE> catalog.add(msg, node) <NEW_LINE> <DEDENT> <DEDENT> for node, msg in extract_messages(doctree): <NEW_LINE> <INDENT> catalog.add(msg, node) <NEW_LINE> <DEDENT> if 'index' in self.env.config.gettext_additional_targets: <NEW_LINE> <INDENT> for node, entries in traverse_translatable_index(doctree): <NEW_LINE> <INDENT> for typ, msg, tid, main, key_ in entries: <NEW_LINE> <INDENT> for m in split_index_msg(typ, msg): <NEW_LINE> <INDENT> if typ == 'pair' and m in pairindextypes.values(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> catalog.add(m, node)
General i18n builder.
62599074435de62698e9d72c
class TestStore(object): <NEW_LINE> <INDENT> def test_build_is_not_persistent(self, alchemy_store, alchemy_category_factory): <NEW_LINE> <INDENT> assert alchemy_store.session.query(AlchemyCategory).count() == 0 <NEW_LINE> alchemy_category_factory.build() <NEW_LINE> assert alchemy_store.session.query(AlchemyCategory).count() == 0 <NEW_LINE> <DEDENT> def test_factory_call_persistent(self, alchemy_store, alchemy_category_factory): <NEW_LINE> <INDENT> assert alchemy_store.session.query(AlchemyCategory).count() == 0 <NEW_LINE> alchemy_category_factory() <NEW_LINE> assert alchemy_store.session.query(AlchemyCategory).count() == 1 <NEW_LINE> <DEDENT> def test_create_is_persistent(self, alchemy_store, alchemy_category_factory): <NEW_LINE> <INDENT> assert alchemy_store.session.query(AlchemyCategory).count() == 0 <NEW_LINE> alchemy_category_factory() <NEW_LINE> assert alchemy_store.session.query(AlchemyCategory).count() == 1 <NEW_LINE> <DEDENT> def test_build_pk(self, alchemy_store, alchemy_category_factory): <NEW_LINE> <INDENT> instance = alchemy_category_factory.build() <NEW_LINE> assert instance.pk <NEW_LINE> <DEDENT> def test_create_pk(self, alchemy_store, alchemy_category_factory): <NEW_LINE> <INDENT> instance = alchemy_category_factory.create() <NEW_LINE> assert instance.pk <NEW_LINE> <DEDENT> def test_instance_fixture(self, alchemy_store, alchemy_category): <NEW_LINE> <INDENT> assert alchemy_store.session.query(AlchemyCategory).count() == 1 <NEW_LINE> assert alchemy_category.pk <NEW_LINE> assert alchemy_category.name <NEW_LINE> <DEDENT> def test_get_db_url(self, alchemy_config_parametrized, alchemy_store): <NEW_LINE> <INDENT> config, expectation = alchemy_config_parametrized <NEW_LINE> alchemy_store.config = config <NEW_LINE> assert alchemy_store._get_db_url() == expectation <NEW_LINE> <DEDENT> def test_get_db_url_missing_keys(self, alchemy_config_missing_store_config_parametrized, alchemy_store): <NEW_LINE> <INDENT> alchemy_store.config = alchemy_config_missing_store_config_parametrized <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> alchemy_store._get_db_url() <NEW_LINE> <DEDENT> <DEDENT> def test_init_with_unicode_path(self, alchemy_config, db_path_parametrized): <NEW_LINE> <INDENT> alchemy_config['db_path'] = db_path_parametrized <NEW_LINE> assert SQLAlchemyStore(alchemy_config)
Tests to make sure our store/test setup behaves as expected.
62599074d486a94d0ba2d8dd
class CommandOutput(object): <NEW_LINE> <INDENT> openapi_types = { 'status': 'ProcessingStatus', 'errors': 'list[Error]' } <NEW_LINE> attribute_map = { 'status': 'status', 'errors': 'errors' } <NEW_LINE> def __init__(self, status=None, errors=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._status = None <NEW_LINE> self._errors = None <NEW_LINE> self.discriminator = None <NEW_LINE> if status is not None: <NEW_LINE> <INDENT> self.status = status <NEW_LINE> <DEDENT> if errors is not None: <NEW_LINE> <INDENT> self.errors = errors <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> self._status = status <NEW_LINE> <DEDENT> @property <NEW_LINE> def errors(self): <NEW_LINE> <INDENT> return self._errors <NEW_LINE> <DEDENT> @errors.setter <NEW_LINE> def errors(self, errors): <NEW_LINE> <INDENT> self._errors = errors <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CommandOutput): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CommandOutput): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259907471ff763f4b5e90cf
class HstackDiag(nn.Module): <NEW_LINE> <INDENT> def __init__(self, size, deg=0, diag1=None, diag2=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.size = size <NEW_LINE> self.diag1 = diag1 or nn.Parameter(torch.randn(size, 2, 2, deg + 1)) <NEW_LINE> self.diag2 = diag2 or nn.Parameter(torch.randn(size, 2, 2, deg + 1)) <NEW_LINE> assert self.diag1.shape == self.diag2.shape, 'The two diagonals must have the same shape' <NEW_LINE> self.deg = self.diag1.shape[-1] - 1 <NEW_LINE> <DEDENT> def forward(self, input_): <NEW_LINE> <INDENT> output = polymatmul(input_[:, :self.size], self.diag1) + polymatmul(input_[:, self.size:], self.diag2) <NEW_LINE> return output
Horizontally stacked diagonal matrices of size n x 2n. Each entry in a 2x2 matrix of polynomials.
625990742c8b7c6e89bd510d
class GenreListView(ListView): <NEW_LINE> <INDENT> paginate_by = 5 <NEW_LINE> template_name = "refferences_db/genre_list_view.html" <NEW_LINE> model = Genre
displays a list of all Genre objects
6259907492d797404e3897ee
class MF_moMF_MB_dec(RL_agent): <NEW_LINE> <INDENT> def __init__(self, kernels = ['bs', 'ck', 'rb']): <NEW_LINE> <INDENT> self.name = 'MF_moMF_MB_dec' <NEW_LINE> self.param_names = ['alpQ', 'decQ','alpP', 'decP', 'lbd' , 'alpT', 'decT', 'G_td', 'G_tdm', 'G_mb'] <NEW_LINE> self.param_ranges = ['unit']*7 + ['pos']*3 <NEW_LINE> RL_agent.__init__(self, kernels) <NEW_LINE> <DEDENT> @jit <NEW_LINE> def session_likelihood(self, session, params_T, get_DVs = False): <NEW_LINE> <INDENT> choices, second_steps, outcomes = session.unpack_trial_data('CSO') <NEW_LINE> prev_sec_steps = np.hstack((0,second_steps[:-1])) <NEW_LINE> alpQ, decQ, alpP, decP, lbd, alpT, decT, G_td, G_tdm, G_mb = params_T[:10] <NEW_LINE> Q = np.zeros([2,session.n_trials]) <NEW_LINE> P = np.zeros([2,2,session.n_trials]) <NEW_LINE> V = np.zeros([2,session.n_trials]) <NEW_LINE> T = np.zeros([2,session.n_trials]) <NEW_LINE> T[:,0] = 0.5 <NEW_LINE> for i, (c, s, o, ps) in enumerate(zip(choices[:-1], second_steps, outcomes, prev_sec_steps)): <NEW_LINE> <INDENT> n = 1 - c <NEW_LINE> r = 1 - s <NEW_LINE> Q[n,i+1] = Q[n,i] * (1.-decQ) <NEW_LINE> P[:,:,i+1] = P[:,:,i] * (1.-decP) <NEW_LINE> V[r,i+1] = V[r,i] * (1.-decQ) <NEW_LINE> T[n,i+1] = T[n,i] - decT*(T[n,i]-0.5) <NEW_LINE> Q[c,i+1] = (1.-alpQ)*Q[c,i] + alpQ*((1.-lbd)*V[s,i] + lbd*o) <NEW_LINE> P[c,ps,i+1] = (1.-alpP)*P[c,ps,i] + alpP*((1.-lbd)*V[s,i] + lbd*o) <NEW_LINE> V[s,i+1] = (1.-alpQ)*V[s,i] + alpQ*o <NEW_LINE> T[c,i+1] = (1.-alpT)*T[c,i] + alpT*s <NEW_LINE> <DEDENT> P = P[:,prev_sec_steps,np.arange(session.n_trials)] <NEW_LINE> M = T*V[1,:] + (1.-T)*V[0,:] <NEW_LINE> Q_net = G_td*Q + G_tdm*P + G_mb*M <NEW_LINE> Q_net = self.apply_kernels(Q_net, choices, second_steps, params_T) <NEW_LINE> if get_DVs: return self.get_DVs(session, params_T, Q_net, Q, M, P) <NEW_LINE> else: return session_log_likelihood(choices, Q_net)
Mixture agent with forgetting and motor level model free, seperate learning rates for motor and choice level model free.
6259907421bff66bcd72458d
class ShiftDetails(ModelSQL, ModelView): <NEW_LINE> <INDENT> __name__ = 'attendance.shiftdetails' <NEW_LINE> slot = fields.Char('Slot') <NEW_LINE> in_time = fields.Time('In Time') <NEW_LINE> out_time = fields.Time('Out Time') <NEW_LINE> monday = fields.Boolean('Monday') <NEW_LINE> tuesday = fields.Boolean('Tuesday') <NEW_LINE> wednesday = fields.Boolean('Wednesday') <NEW_LINE> thursday = fields.Boolean('Thursday') <NEW_LINE> friday = fields.Boolean('Friday') <NEW_LINE> saturday = fields.Boolean('Saturday') <NEW_LINE> sunday = fields.Boolean('Sunday') <NEW_LINE> no_days = fields.Function(fields.Integer('Number of days per week'), 'get_no_of_days') <NEW_LINE> def get_no_of_days(self, name): <NEW_LINE> <INDENT> return (self.monday + self.tuesday + self.wednesday + self.thursday + self.friday + self.saturday + self.sunday)
Shift Details
625990747d43ff24874280a6
class ProcessedDataSave: <NEW_LINE> <INDENT> def __init__(self, universe_vigeo_df, saving_path): <NEW_LINE> <INDENT> self.universe_vigeo_df = universe_vigeo_df <NEW_LINE> self.saving_path = saving_path <NEW_LINE> <DEDENT> def save_file(self): <NEW_LINE> <INDENT> self._drop_column() <NEW_LINE> self.universe_vigeo_df.to_csv(self.saving_path) <NEW_LINE> <DEDENT> def _drop_column(self): <NEW_LINE> <INDENT> self.universe_vigeo_df.drop(columns=[base.DATE], inplace=True)
Save a DataFrame with vigeo keys merged values. Attributes ---------- universe_vigeo_df: pandas.DataFrame saving_path: str Methods ------- __init__ save_file
625990744e4d562566373d2d
class StaleBeamSearch(ExplorationPolicy): <NEW_LINE> <INDENT> def __init__(self, decoder, config, normalization, train): <NEW_LINE> <INDENT> if not train: <NEW_LINE> <INDENT> raise ValueError( "Stale Beam Search should only be used at train time") <NEW_LINE> <DEDENT> super(StaleBeamSearch, self).__init__( decoder, config, normalization, train) <NEW_LINE> self._fresh_policy = get_exploration_policy( decoder, config.fresh_policy, normalization, train) <NEW_LINE> self._max_age = self._config.max_age <NEW_LINE> self._beam_map = BeamMap() <NEW_LINE> <DEDENT> def get_beams(self, examples, verbose=False): <NEW_LINE> <INDENT> expired_examples = [] <NEW_LINE> fresh_beams = [] <NEW_LINE> fresh_indices = [] <NEW_LINE> for example in examples: <NEW_LINE> <INDENT> if not self._beam_map.contains(example) or self._beam_map.get_beam_age(example) >= self._max_age: <NEW_LINE> <INDENT> fresh_indices.append(False) <NEW_LINE> expired_examples.append(example) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._beam_map.increment_age(example) <NEW_LINE> fresh_indices.append(True) <NEW_LINE> fresh_beams.append(self._beam_map.get_beam(example)) <NEW_LINE> <DEDENT> <DEDENT> if len(expired_examples) > 0: <NEW_LINE> <INDENT> recalculated_beams = self._fresh_policy.get_beams( expired_examples, verbose) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> recalculated_beams = [] <NEW_LINE> <DEDENT> for expired_example, recalculated_beam in zip( expired_examples, recalculated_beams): <NEW_LINE> <INDENT> self._beam_map.set_beam(expired_example, recalculated_beam) <NEW_LINE> <DEDENT> beams = [] <NEW_LINE> for fresh in fresh_indices: <NEW_LINE> <INDENT> if fresh: <NEW_LINE> <INDENT> beams.append(fresh_beams.pop(0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> beams.append(recalculated_beams.pop(0)) <NEW_LINE> <DEDENT> <DEDENT> return beams <NEW_LINE> <DEDENT> def get_intermediate_beams(self, examples, verbose=False): <NEW_LINE> <INDENT> return self._fresh_policy.get_intermediate_beams( examples, verbose=verbose)
Performs beam search every max_age iterations. On the other iterations, returns the stale beams. NOTE: Does not recalculate scores Args: decoder (Decoder) config (Config) normalization (NormalizationOptions) fresh_policy (ExplorationPolicy): the policy that runs to obtain fresh beams train (bool): train or test policy
62599074f548e778e596ceb5
class ObjectManager(AbstractManager): <NEW_LINE> <INDENT> name = 'object_manager' <NEW_LINE> type = 'common' <NEW_LINE> types = ('common',) <NEW_LINE> def __init__(self, object_configs=None, **kwargs): <NEW_LINE> <INDENT> super(ObjectManager, self).__init__(**kwargs) <NEW_LINE> self.config = context.app_config['containers'].get(self.type) or {} <NEW_LINE> self.config_intervals = self.config.get('poll_intervals') or {} <NEW_LINE> self.object_configs = object_configs if object_configs else {} <NEW_LINE> self.objects = context.objects <NEW_LINE> self.last_discover = 0 <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _discover_objects(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _discover(self): <NEW_LINE> <INDENT> if time.time() > self.last_discover + (self.config_intervals.get('discover') or self.interval): <NEW_LINE> <INDENT> self._discover_objects() <NEW_LINE> <DEDENT> context.log.debug('%s objects: %s' % ( self.type, [obj.definition_hash for obj in self.objects.find_all(types=self.types)] )) <NEW_LINE> <DEDENT> def _start_objects(self): <NEW_LINE> <INDENT> for managed_obj in self.objects.find_all(types=self.types): <NEW_LINE> <INDENT> managed_obj.start() <NEW_LINE> for child_obj in self.objects.find_all(obj_id=managed_obj.id, children=True, include_self=False): <NEW_LINE> <INDENT> child_obj.start() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _schedule_cloud_commands(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._discover() <NEW_LINE> self._start_objects() <NEW_LINE> self._schedule_cloud_commands() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> context.default_log.error('run failed', exc_info=True) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self._run() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> super(ObjectManager, self).stop() <NEW_LINE> self._stop_objects() <NEW_LINE> <DEDENT> def _stop_objects(self): <NEW_LINE> <INDENT> for managed_obj in self.objects.find_all(types=self.types): <NEW_LINE> <INDENT> for child_obj in self.objects.find_all(obj_id=managed_obj.id, children=True, include_self=False): <NEW_LINE> <INDENT> child_obj.stop() <NEW_LINE> self.objects.unregister(obj=child_obj) <NEW_LINE> <DEDENT> managed_obj.stop() <NEW_LINE> self.objects.unregister(obj=managed_obj)
Common Object manager. Object managers manage objects of a specific type. There should a be a different object manager for each type ('system' and 'nginx' for now). Object managers should have a run action that follows the following run pattern: discover, start objects, schedule cloud commands.
625990747b25080760ed8977
class LockedMachine(Machine): <NEW_LINE> <INDENT> event_cls = LockedEvent <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._ident = IdentManager() <NEW_LINE> try: <NEW_LINE> <INDENT> self.machine_context = listify(kwargs.pop('machine_context')) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.machine_context = [PicklableLock()] <NEW_LINE> <DEDENT> self.machine_context.append(self._ident) <NEW_LINE> self.model_context_map = defaultdict(list) <NEW_LINE> _super(LockedMachine, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def add_model(self, model, initial=None, model_context=None): <NEW_LINE> <INDENT> models = listify(model) <NEW_LINE> model_context = listify(model_context) if model_context is not None else [] <NEW_LINE> output = _super(LockedMachine, self).add_model(models, initial) <NEW_LINE> for mod in models: <NEW_LINE> <INDENT> mod = self if mod == 'self' else mod <NEW_LINE> self.model_context_map[mod].extend(self.machine_context) <NEW_LINE> self.model_context_map[mod].extend(model_context) <NEW_LINE> <DEDENT> return output <NEW_LINE> <DEDENT> def remove_model(self, model): <NEW_LINE> <INDENT> models = listify(model) <NEW_LINE> for mod in models: <NEW_LINE> <INDENT> del self.model_context_map[mod] <NEW_LINE> <DEDENT> return _super(LockedMachine, self).remove_model(models) <NEW_LINE> <DEDENT> def __getattribute__(self, item): <NEW_LINE> <INDENT> get_attr = _super(LockedMachine, self).__getattribute__ <NEW_LINE> tmp = get_attr(item) <NEW_LINE> if not item.startswith('_') and inspect.ismethod(tmp): <NEW_LINE> <INDENT> return partial(get_attr('_locked_method'), tmp) <NEW_LINE> <DEDENT> return tmp <NEW_LINE> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return _super(LockedMachine, self).__getattribute__(item) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return _super(LockedMachine, self).__getattr__(item) <NEW_LINE> <DEDENT> <DEDENT> def _add_model_to_state(self, state, model): <NEW_LINE> <INDENT> _super(LockedMachine, self)._add_model_to_state(state, model) <NEW_LINE> for prefix in ['enter', 'exit']: <NEW_LINE> <INDENT> callback = "on_{0}_".format(prefix) + state.name <NEW_LINE> func = getattr(model, callback, None) <NEW_LINE> if isinstance(func, partial) and func.func != state.add_callback: <NEW_LINE> <INDENT> state.add_callback(prefix, callback) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _locked_method(self, func, *args, **kwargs): <NEW_LINE> <INDENT> if self._ident.current != get_ident(): <NEW_LINE> <INDENT> with nested(*self.machine_context): <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return func(*args, **kwargs)
Machine class which manages contexts. In it's default version the machine uses a `threading.Lock` context to lock access to its methods and event triggers bound to model objects. Attributes: machine_context (dict): A dict of context managers to be entered whenever a machine method is called or an event is triggered. Contexts are managed for each model individually.
625990742c8b7c6e89bd510e
class BoutonDessin(Bouton): <NEW_LINE> <INDENT> def __init__(self, parent=None, height=None, signal=None, dessin=None): <NEW_LINE> <INDENT> Bouton.__init__(self, parent=parent, height=height, signal=signal) <NEW_LINE> Bouton.config(image=dessin)
Classe dérivée de la classe Bouton, à laquelle on ajoute un dessin
62599074ad47b63b2c5a9174
class ExceptionHandler(object): <NEW_LINE> <INDENT> def render(self, request_error): <NEW_LINE> <INDENT> raise NotImplementedError()
Base class for exception handler.
625990744f6381625f19a13d
class DeletionProxy(CustomProxy): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(DeletionProxy, self).__init__(*args, **kwargs) <NEW_LINE> self._options["KO phenotype"] = self._filter_ko_phenotype <NEW_LINE> self._options["Partial phenotype"] = self._filter_partial_phenotype <NEW_LINE> self._options["No phenotype"] = self._filter_no_phenotype <NEW_LINE> <DEDENT> def _filter_ko_phenotype(self, row, _): <NEW_LINE> <INDENT> return self.sourceModel().objective(row) <= 10**-6 <NEW_LINE> <DEDENT> def _filter_partial_phenotype(self, row, _): <NEW_LINE> <INDENT> table = self.sourceModel() <NEW_LINE> return 10**-6 < table.objective(row) < 0.999 * table.max_flux <NEW_LINE> <DEDENT> def _filter_no_phenotype(self, row, _): <NEW_LINE> <INDENT> table = self.sourceModel() <NEW_LINE> return table.objective(row) >= 0.999 * table.max_flux
QSortFilterProxyModel to be used in deletion solution dialogs This proxy model allows the user to filter for categories relevant to deletion solutions.
62599074097d151d1a2c299a
class Router: <NEW_LINE> <INDENT> def __init__(self, table=None, policy=None, status=Status.uninitialized): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.table = None <NEW_LINE> self.policy = None <NEW_LINE> self.install_table(table) <NEW_LINE> self.install_policy(policy) <NEW_LINE> self.status = status <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.policy: <NEW_LINE> <INDENT> return ' '.join(['status:', str(self.status), 'table: {', str(self.table) + '}', 'policy:', str(signature(self.policy))]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ' '.join(['status:', str(self.status), 'table: {', str(self.table) + '}', 'policy:', str(self.policy)]) <NEW_LINE> <DEDENT> <DEDENT> def route(self, packet): <NEW_LINE> <INDENT> if not self.status == Status.routing: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not self.table: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.policy and not self.policy(packet): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.table.match(packet) <NEW_LINE> <DEDENT> def install_table(self, table): <NEW_LINE> <INDENT> self.table = table <NEW_LINE> <DEDENT> def install_policy(self, policy): <NEW_LINE> <INDENT> self.policy = policy <NEW_LINE> <DEDENT> def start_routing(self): <NEW_LINE> <INDENT> self.status = Status.routing <NEW_LINE> <DEDENT> def block(self): <NEW_LINE> <INDENT> self.status = Status.blocking <NEW_LINE> <DEDENT> def mobility(self, rule=None, address=None, interface=None): <NEW_LINE> <INDENT> if rule: <NEW_LINE> <INDENT> self.table.modify(rule) <NEW_LINE> <DEDENT> elif address and interface: <NEW_LINE> <INDENT> self.table.modify(Rule(address, interface)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Rule or (address,interface) must not be None') <NEW_LINE> <DEDENT> <DEDENT> def drop(self, rule=None, address=None): <NEW_LINE> <INDENT> if rule: <NEW_LINE> <INDENT> self.table.drop(rule) <NEW_LINE> <DEDENT> elif address: <NEW_LINE> <INDENT> self.table.drop(Rule(address)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Rule or address must not be None') <NEW_LINE> <DEDENT> <DEDENT> def delete(self, rule=None, address=None): <NEW_LINE> <INDENT> if rule: <NEW_LINE> <INDENT> self.table.modify(rule) <NEW_LINE> <DEDENT> elif address: <NEW_LINE> <INDENT> self.table.delete(Rule(address)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Rule or address must not be None')
This class model a generic router of the network
625990748e7ae83300eea9b8
class ArgPlugin(InterfaceActionBase): <NEW_LINE> <INDENT> name = 'ARG Plugin' <NEW_LINE> description = 'A plugin to interact with ARG collections' <NEW_LINE> supported_platforms = ['windows', 'osx', 'linux'] <NEW_LINE> author = 'Alex Kosloff' <NEW_LINE> version = (1, 0, 0) <NEW_LINE> minimum_calibre_version = (0, 7, 53) <NEW_LINE> actual_plugin = 'calibre_plugins.arg_plugin.ui:ArgUI' <NEW_LINE> def is_customizable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def config_widget(self): <NEW_LINE> <INDENT> from calibre_plugins.casanova_plugin.config import ConfigWidget <NEW_LINE> return ConfigWidget() <NEW_LINE> <DEDENT> def save_settings(self, config_widget): <NEW_LINE> <INDENT> config_widget.save_settings() <NEW_LINE> ac = self.actual_plugin_ <NEW_LINE> if ac is not None: <NEW_LINE> <INDENT> ac.apply_settings()
This class is a simple wrapper that provides information about the actual plugin class. The actual interface plugin class is called InterfacePlugin and is defined in the ui.py file, as specified in the actual_plugin field below. The reason for having two classes is that it allows the command line calibre utilities to run without needing to load the GUI libraries.
625990747047854f46340cdf
class Money: <NEW_LINE> <INDENT> def __init__(self, amount, currency): <NEW_LINE> <INDENT> self.currency = currency <NEW_LINE> self.amount = amount <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.currency.symbol: <NEW_LINE> <INDENT> return f"{self.currency.symbol}{self.amount:.{self.currency.digits}f}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f"{self.currency.code} {self.amount:.{self.currency.digits}f}" <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<Money {str(self)}>" <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (type(self) == type(other) and self.amount == other.amount and self.currency == other.currency) <NEW_LINE> <DEDENT> def add(self, other): <NEW_LINE> <INDENT> if self.currency.code == other.currency.code: <NEW_LINE> <INDENT> self.amount += other.amount <NEW_LINE> return self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise DifferentCurrencyError <NEW_LINE> <DEDENT> <DEDENT> def sub(self, other): <NEW_LINE> <INDENT> if self.currency.code == other.currency.code: <NEW_LINE> <INDENT> self.amount -= other.amount <NEW_LINE> return self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise DifferentCurrencyError <NEW_LINE> <DEDENT> <DEDENT> def mul(self, multiplier): <NEW_LINE> <INDENT> self.amount *= multiplier <NEW_LINE> return self <NEW_LINE> <DEDENT> def div(self, divisor): <NEW_LINE> <INDENT> self.amount /= divisor <NEW_LINE> return self
Represents an amount of money. Requires an amount and a currency.
62599074283ffb24f3cf51d1
class FREQuency(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "FREQuency" <NEW_LINE> args = ["1"] <NEW_LINE> class STEP(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "STEP" <NEW_LINE> args = [] <NEW_LINE> class INCRement(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "INCRement" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> INCRement = INCRement() <NEW_LINE> <DEDENT> STEP = STEP()
SOURce:MARKer:FREQuency Arguments: 1
62599074e5267d203ee6d051
class _Mouse(threading.Thread): <NEW_LINE> <INDENT> BUTTON_1 = 1 << 1 <NEW_LINE> BUTTON_2 = 1 << 2 <NEW_LINE> BUTTONS = BUTTON_1 & BUTTON_2 <NEW_LINE> HEADER = 1 << 3 <NEW_LINE> XSIGN = 1 << 4 <NEW_LINE> YSIGN = 1 << 5 <NEW_LINE> INSTANCE = None <NEW_LINE> def __init__(self, mouse='mice', restrict=True, width=1920, height=1200): <NEW_LINE> <INDENT> super(_Mouse, self).__init__() <NEW_LINE> self.fd = open('/dev/input/' + mouse, 'rb') <NEW_LINE> self.running = False <NEW_LINE> self.buffr = '' if six.PY3 else b'' <NEW_LINE> self.lock = threading.RLock() <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.restrict = restrict <NEW_LINE> from pi3d.Display import Display <NEW_LINE> Display.INSTANCE.external_mouse = self <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self._x = self._y = self._dx = self._dy = 0 <NEW_LINE> <DEDENT> self.button = False <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> if not self.running: <NEW_LINE> <INDENT> self.running = True <NEW_LINE> super(_Mouse, self).start() <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> while self.running: <NEW_LINE> <INDENT> self._check_event() <NEW_LINE> <DEDENT> self.fd.close() <NEW_LINE> <DEDENT> def position(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> return self._x, self._y <NEW_LINE> <DEDENT> <DEDENT> def velocity(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> return self._dx, self._dy <NEW_LINE> <DEDENT> <DEDENT> def _check_event(self): <NEW_LINE> <INDENT> if len(self.buffr) >= 3: <NEW_LINE> <INDENT> buttons = ord(self.buffr[0]) <NEW_LINE> self.buffr = self.buffr[1:] <NEW_LINE> if buttons & _Mouse.HEADER: <NEW_LINE> <INDENT> dx, dy = map(ord, self.buffr[0:2]) <NEW_LINE> self.buffr = self.buffr[2:] <NEW_LINE> self.button = buttons & _Mouse.BUTTONS <NEW_LINE> if buttons & _Mouse.XSIGN: <NEW_LINE> <INDENT> dx -= 256 <NEW_LINE> <DEDENT> if buttons & _Mouse.YSIGN: <NEW_LINE> <INDENT> dy -= 256 <NEW_LINE> <DEDENT> x = self._x + dx <NEW_LINE> y = self._y + dy <NEW_LINE> if self.restrict: <NEW_LINE> <INDENT> x = min(max(x, 0), self.width - 1) <NEW_LINE> y = min(max(y, 0), self.height - 1) <NEW_LINE> <DEDENT> with self.lock: <NEW_LINE> <INDENT> self._x, self._y, self._dx, self._dy = x, y, dx, dy <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> strn = self.fd.read(3).decode("latin-1") <NEW_LINE> self.buffr += strn <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("exception is: {}".format(e)) <NEW_LINE> self.stop() <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.running = False
holds Mouse object, see also (the preferred) events methods
62599074091ae35668706560
class GazeboGroundTruth: <NEW_LINE> <INDENT> def __init__(self, model_name, model_new_name, publish_pose=True): <NEW_LINE> <INDENT> self.model_name = model_name <NEW_LINE> self.model_new_name = model_new_name <NEW_LINE> self.model_pose = PoseStamped() <NEW_LINE> self.br = tf2_ros.TransformBroadcaster() <NEW_LINE> if not self.model_name: <NEW_LINE> <INDENT> raise ValueError("'model_name' is an empty string") <NEW_LINE> <DEDENT> self.states_sub = rospy.Subscriber("/gazebo/model_states", ModelStates, self.callback) <NEW_LINE> self.pose_pub = rospy.Publisher("/gazebo/" + self.model_new_name + "/pose", PoseStamped, queue_size=10) <NEW_LINE> <DEDENT> def callback(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ind = data.name.index(self.model_name) <NEW_LINE> self.model_pose.pose = data.pose[ind] <NEW_LINE> self.model_pose.header.frame_id = "map" <NEW_LINE> self.model_pose.header.stamp = rospy.Time.now() <NEW_LINE> t = TransformStamped() <NEW_LINE> t.header.stamp = rospy.Time.now() <NEW_LINE> t.header.frame_id = "map" <NEW_LINE> t.child_frame_id = self.model_new_name <NEW_LINE> t.transform.translation = self.model_pose.pose.position <NEW_LINE> t.transform.rotation = self.model_pose.pose.orientation <NEW_LINE> self.br.sendTransform(t) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass
This code publishes a transform from the model to map/world frame. Use this tf to validate other sensor errors. link_name: finds
625990745fcc89381b266dec
class TabuSearch(Optimizer): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> return None
Optimization Algorithms: Tabu Search
625990745166f23b2e244cfc
class ReportOrderableReferenceField(ExtensionField, ExtensionFieldMixin, ReferenceField): <NEW_LINE> <INDENT> pass
Archetypes SchemaExtender aware reference field
6259907460cbc95b06365a01
class Field(object): <NEW_LINE> <INDENT> def __init__(self, name=None, value=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> if isinstance(value, str): <NEW_LINE> <INDENT> self.original_value = value <NEW_LINE> <DEDENT> elif value: <NEW_LINE> <INDENT> self.original_value = repr(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.original_value = value <NEW_LINE> <DEDENT> self.value = value or self.default_value() <NEW_LINE> self.errors = [] <NEW_LINE> <DEDENT> def default_value(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_content(self): <NEW_LINE> <INDENT> return self.original_value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> name = self.name <NEW_LINE> value = self.value <NEW_LINE> r = ('Field(name=%(name)r, value=%(value)r)') <NEW_LINE> return r % locals()
An ABOUT file field. The initial value is a string. Subclasses can and will alter the value type as needed.
625990744e4d562566373d2f
class MCommand(click.MultiCommand): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.commands = commands(*args, **kwargs) <NEW_LINE> if kwargs and click_args: <NEW_LINE> <INDENT> kwargs.update(click_args) <NEW_LINE> <DEDENT> click.MultiCommand.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def list_commands(self, ctx): <NEW_LINE> <INDENT> return sorted(set(self.commands.list_commands(ctx))) <NEW_LINE> <DEDENT> def invoke(self, ctx): <NEW_LINE> <INDENT> name = ctx.protected_args[0] <NEW_LINE> try: <NEW_LINE> <INDENT> module = plugin_manager.load(module_name, name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return super(MCommand, self).invoke(ctx) <NEW_LINE> <DEDENT> module_path = module.__file__ <NEW_LINE> if module_path.endswith('pyc'): <NEW_LINE> <INDENT> module_path = module_path[:-1] <NEW_LINE> <DEDENT> if os.name == 'nt': <NEW_LINE> <INDENT> nt_path = module_path[:-2] + 'cmd' <NEW_LINE> if os.path.exists(nt_path): <NEW_LINE> <INDENT> os.execvp(nt_path, [nt_path] + ctx.args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.critical( "%s cli is not supported on windows", name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> is_exec = os.access(module_path, os.X_OK) <NEW_LINE> if not is_exec: <NEW_LINE> <INDENT> return super(MCommand, self).invoke(ctx) <NEW_LINE> <DEDENT> utils.sane_execvp(module_path, [os.path.basename(module_path)] + ctx.args) <NEW_LINE> <DEDENT> <DEDENT> def get_command(self, ctx, cmd_name): <NEW_LINE> <INDENT> return self.commands.get_command(ctx, cmd_name) <NEW_LINE> <DEDENT> def format_commands(self, ctx, formatter): <NEW_LINE> <INDENT> rows = [] <NEW_LINE> for subcommand in self.list_commands(ctx): <NEW_LINE> <INDENT> entry_points = list(pkg_resources.iter_entry_points( module_name, subcommand)) <NEW_LINE> dist = entry_points[0].dist <NEW_LINE> if dist.has_metadata('cli_help'): <NEW_LINE> <INDENT> help_text = dist.get_metadata('cli_help') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> help_text = '' <NEW_LINE> <DEDENT> rows.append((subcommand, help_text)) <NEW_LINE> <DEDENT> if rows: <NEW_LINE> <INDENT> with formatter.section('Commands'): <NEW_LINE> <INDENT> formatter.write_dl(rows)
Treadmill CLI driver.
6259907466673b3332c31d27
class ClientFuncsDict(MutableMapping): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self.client.functions, attr) <NEW_LINE> <DEDENT> def __setitem__(self, key, val): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if key not in self.client.functions: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> low = { "fun": key, "args": args, "kwargs": kwargs, } <NEW_LINE> pub_data = {} <NEW_LINE> kwargs_keys = list(kwargs) <NEW_LINE> for kwargs_key in kwargs_keys: <NEW_LINE> <INDENT> if kwargs_key.startswith("__pub_"): <NEW_LINE> <INDENT> pub_data[kwargs_key] = kwargs.pop(kwargs_key) <NEW_LINE> <DEDENT> <DEDENT> async_pub = self.client._gen_async_pub(pub_data.get("__pub_jid")) <NEW_LINE> user = salt.utils.user.get_specific_user() <NEW_LINE> return self.client._proc_function( key, low, user, async_pub["tag"], async_pub["jid"], False, ) <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.client.functions) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.client.functions)
Class to make a read-only dict for accessing runner funcs "directly"
625990744527f215b58eb634
class TestPublicTrade(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testPublicTrade(self): <NEW_LINE> <INDENT> pass
PublicTrade unit test stubs
6259907463b5f9789fe86a8d
class SubmitFile(models.Model): <NEW_LINE> <INDENT> project_name = models.CharField(u'项目名称', max_length=32) <NEW_LINE> profile = models.FileField(u'配置文件', upload_to='uploads/profile') <NEW_LINE> tech_template = models.ForeignKey(TechTemplate, verbose_name=u'技术方案模版',) <NEW_LINE> submit_time = models.DateTimeField(u'提交时间', auto_now=True) <NEW_LINE> download_file = models.FilePathField(u'文件下载') <NEW_LINE> user = models.ForeignKey(UserProfile, verbose_name=u'属主', ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '<%s-%s>' % (self.submit_time, self.project_name)
提交表
625990743346ee7daa3382f4
class mydnnnetwork(): <NEW_LINE> <INDENT> def __init__(self,dataname,modeldir,flag): <NEW_LINE> <INDENT> self.trainsize=250000 <NEW_LINE> self.batchsize=10000 <NEW_LINE> self.validatesize=40000 <NEW_LINE> self.datadim=[9,1] <NEW_LINE> self.startlearningrate= 0.01 <NEW_LINE> self.classificationthreshold = 999 <NEW_LINE> self.Nbofsaveriteractions = 50000 <NEW_LINE> self.printiteractions = 500 <NEW_LINE> self.Nboflearn= 3 <NEW_LINE> if flag<7: <NEW_LINE> <INDENT> self.dnnlayer=[30,24,17,13,self.datadim[-1]] <NEW_LINE> self.dnnact=[tf.nn.tanh,tf.nn.tanh,tf.nn.tanh,tf.nn.tanh,None] <NEW_LINE> <DEDENT> elif flag==8: <NEW_LINE> <INDENT> self.dnnlayer=[9,10,14,20,5,self.datadim[-1]] <NEW_LINE> self.dnnact=[tf.nn.tanh,tf.nn.tanh,tf.nn.tanh,tf.nn.tanh,tf.nn.tanh,tf.nn.relu] <NEW_LINE> <DEDENT> self.lstmlayer=[] <NEW_LINE> self.cnnlayer=[] <NEW_LINE> self.cnnact=[]
define every specific network
62599074baa26c4b54d50bd6
class BaseException(Exception): <NEW_LINE> <INDENT> pass
Base exception for steps.
6259907492d797404e3897ef
class BatchSequence(Sequence): <NEW_LINE> <INDENT> def __init__(self, input_dir, y, batch_size, session, desired_size=DESIRED_IMAGE_SIZE): <NEW_LINE> <INDENT> self.input_dir = input_dir <NEW_LINE> self.desired_size = desired_size <NEW_LINE> self.session = session <NEW_LINE> self.x = ['{}.jpg'.format(i+1) for i in range(y.shape[0])] <NEW_LINE> self.y = y <NEW_LINE> self.batch_size = batch_size <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return int(np.ceil(len(self.x) / float(self.batch_size))) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> idx_min = idx * self.batch_size <NEW_LINE> idx_max = np.min([idx_min + self.batch_size, len(self.x)]) <NEW_LINE> idxs = np.arange(idx_min, idx_max) <NEW_LINE> batch_x = [self.x[i] for i in idxs] <NEW_LINE> batch_y = [self.y[i][1:] for i in idxs] <NEW_LINE> return np.array([ image_to_ndarray(join(self.input_dir, x), self.session, desired_size=self.desired_size) for x in batch_x]), np.array(batch_y)
This class generates batches that can be provided to a neural network. It can be used for validation only. For training use the BatchGenerator class. Arguments: Sequence {class} -- a sequence never repeats items.
6259907455399d3f05627e41
@unittest.skipUnless(settings.RUN_BLOCKSTORE_TESTS, "Requires a running Blockstore server") <NEW_LINE> class BundleCacheClearTest(TestWithBundleMixin, unittest.TestCase): <NEW_LINE> <INDENT> def test_bundle_cache_clear(self): <NEW_LINE> <INDENT> cache = BundleCache(self.bundle.uuid) <NEW_LINE> key1 = ("some", "key", "1") <NEW_LINE> value1 = "value1" <NEW_LINE> cache.set(key1, value1) <NEW_LINE> self.assertEqual(cache.get(key1), value1) <NEW_LINE> api.write_draft_file(self.draft.uuid, "test.txt", "we need a changed file in order to publish a new version") <NEW_LINE> api.commit_draft(self.draft.uuid) <NEW_LINE> self.assertEqual(cache.get(key1), value1) <NEW_LINE> cache.clear() <NEW_LINE> self.assertEqual(cache.get(key1), None)
Tests for BundleCache's clear() method. Requires MAX_BLOCKSTORE_CACHE_DELAY to be non-zero. This clear() method does not actually clear the cache but rather just means "a new bundle/draft version has been created, so immediately start reading/writing cache keys using the new version number.
6259907444b2445a339b75f2
class Seed(Command): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def register_arguments(parser): <NEW_LINE> <INDENT> parser.add_argument('users', type=int) <NEW_LINE> parser.add_argument('--max-bytes', type=int, default=DEFAULT_MAX_BYTES) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> Log.info('Seeding %d user(s)' % self.app.args.users) <NEW_LINE> for i in range(self.app.args.users): <NEW_LINE> <INDENT> user = self.app.models.User.create( username=self.random_string(8), password=self.random_string(8), max_bytes=self.app.args.max_bytes ) <NEW_LINE> note = 'Created user with username="%s" password="%s" max_bytes=%d' % (user.username, user.password, user.max_bytes) <NEW_LINE> print(note) <NEW_LINE> Log.info(note) <NEW_LINE> <DEDENT> <DEDENT> def random_string(self, length=8): <NEW_LINE> <INDENT> letters = string.ascii_lowercase <NEW_LINE> return ''.join(random.choice(letters) for i in range(length))
Seed the database
62599074cc0a2c111447c765
class DoraemonListAPI(Resource): <NEW_LINE> <INDENT> __abstract__ = True <NEW_LINE> def __init__(self, obj, ignore=None, **kw): <NEW_LINE> <INDENT> super(DoraemonListAPI, self).__init__() <NEW_LINE> self.parser = reqparse.RequestParser() <NEW_LINE> self.parser.add_argument( 'page', type=inputs.positive, help='Page must be a positive integer') <NEW_LINE> self.parser.add_argument( 'pp', type=inputs.positive, help='PerPage must be a positive integer', dest='per_page') <NEW_LINE> self.parser.add_argument( 'extend', type=inputs.boolean, help='extend must be boolean') <NEW_LINE> self.parser.add_argument( 'opt', type=str, help='options must be splited by %%') <NEW_LINE> setattr(self, 'params', []) <NEW_LINE> type_dict = {'str_params': str, 'int_params': inputs.positive} <NEW_LINE> for k, w in kw.items(): <NEW_LINE> <INDENT> if k in type_dict.keys(): <NEW_LINE> <INDENT> self.params.extend(w) <NEW_LINE> for x in w: <NEW_LINE> <INDENT> self.parser.add_argument(x, type=type_dict[k]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> setattr(self, 'obj', obj) <NEW_LINE> setattr(self, 'ignore', ignore) <NEW_LINE> <DEDENT> @auth.PrivilegeAuth(privilegeRequired="inventoryAdmin") <NEW_LINE> def get(self): <NEW_LINE> <INDENT> pages, data, kw = False, [], {} <NEW_LINE> args = self.parser.parse_args() <NEW_LINE> for x in self.params: <NEW_LINE> <INDENT> if args[x]: <NEW_LINE> <INDENT> kw[x] = args[x] <NEW_LINE> <DEDENT> <DEDENT> option = args['opt'].split('%%') if args['opt'] else None <NEW_LINE> depth = 1 if args['extend'] else 0 <NEW_LINE> page = args['page'] <NEW_LINE> if kw or not page: <NEW_LINE> <INDENT> data = self.obj.get( depth=depth, option=option, ignore=self.ignore, **kw) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> query = [] <NEW_LINE> per_page = args['per_page'] <NEW_LINE> if per_page: <NEW_LINE> <INDENT> query = self.obj.get( page=page, per_page=per_page, depth=depth, option=option, ignore=self.ignore, **kw) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> query = self.obj.get( page=page, depth=depth, option=option, ignore=self.ignore, **kw) <NEW_LINE> <DEDENT> if query: <NEW_LINE> <INDENT> data, pages = query[0], query[1] <NEW_LINE> <DEDENT> <DEDENT> return {'totalpage': pages, 'data': data}, 200
Super DataList Restful API. Supported By Eater. Methods: GET (Readonly) Pay attention pls: Attributes 'parms' and 'obj' are asked during implementation. 'params': a list of retrievable arguments of 'obj', can be [] or (). 'obj': an instance of one of the models belonging to Eater.
6259907499cbb53fe6832813
class CookiesTransport(xmlrpc.client.Transport): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._cookies = [] <NEW_LINE> <DEDENT> def send_headers(self, connection, headers): <NEW_LINE> <INDENT> if self._cookies: <NEW_LINE> <INDENT> connection.putheader("Cookie", "; ".join(self._cookies)) <NEW_LINE> <DEDENT> super().send_headers(connection, headers) <NEW_LINE> <DEDENT> def parse_response(self, response): <NEW_LINE> <INDENT> for header in response.msg.get_all("Set-Cookie"): <NEW_LINE> <INDENT> cookie = header.split(";", 1)[0] <NEW_LINE> self._cookies.append(cookie) <NEW_LINE> <DEDENT> return super().parse_response(response)
Only used for the class tapatalk itself. http://stackoverflow.com/a/25876504
6259907401c39578d7f143c9
class EndpointParseException(Ice.LocalException): <NEW_LINE> <INDENT> def __init__(self, str=''): <NEW_LINE> <INDENT> self.str = str <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return IcePy.stringifyException(self) <NEW_LINE> <DEDENT> __repr__ = __str__ <NEW_LINE> _ice_name = 'Ice::EndpointParseException'
This exception is raised if there was an error while parsing an endpoint.
6259907497e22403b383c82c
class FpTree(Tree): <NEW_LINE> <INDENT> def __init__(self, transactions, supportThreshold): <NEW_LINE> <INDENT> super(FpTree, self).__init__(root=FpTreeNode(name='root', count=0, parent=None)) <NEW_LINE> self.supportThreshold = supportThreshold <NEW_LINE> self.items = [] <NEW_LINE> self.itemCounts = {} <NEW_LINE> self.headerTable = {} <NEW_LINE> self.createTree(transactions) <NEW_LINE> <DEDENT> def createTree(self, transactions): <NEW_LINE> <INDENT> self.items, self.itemCounts, transactions = self.filterItemsBySupport(transactions) <NEW_LINE> self.sortTransactions(transactions) <NEW_LINE> for transaction in transactions: <NEW_LINE> <INDENT> self.updateTree(transaction, self.root) <NEW_LINE> <DEDENT> <DEDENT> def filterItemsBySupport(self, transactions): <NEW_LINE> <INDENT> transactionSets = [set(transaction) for transaction in transactions] <NEW_LINE> itemCounts = {} <NEW_LINE> for transaction in transactionSets: <NEW_LINE> <INDENT> for item in transaction: <NEW_LINE> <INDENT> itemCount = itemCounts.get(item, 0) <NEW_LINE> itemCounts[item] = itemCount + 1 <NEW_LINE> <DEDENT> <DEDENT> filteredItems = [] <NEW_LINE> transactionCount = len(transactions) <NEW_LINE> for item, itemCount in itemCounts.items(): <NEW_LINE> <INDENT> support = itemCount / transactionCount <NEW_LINE> if support >= self.supportThreshold: <NEW_LINE> <INDENT> filteredItems.append({ 'name': item, 'count': itemCount }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for transaction in transactionSets: <NEW_LINE> <INDENT> if item in transaction: <NEW_LINE> <INDENT> transaction.remove(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> filteredTransactions = [list(transaction) for transaction in transactionSets] <NEW_LINE> return filteredItems, itemCounts, filteredTransactions <NEW_LINE> <DEDENT> def sortTransactions(self, transactions): <NEW_LINE> <INDENT> for transaction in transactions: <NEW_LINE> <INDENT> transaction.sort(key=lambda itemName: self.itemCounts[itemName], reverse=True) <NEW_LINE> <DEDENT> <DEDENT> def updateTree(self, items, treeNode): <NEW_LINE> <INDENT> if len(items) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> item = items[0] <NEW_LINE> if item not in treeNode.children: <NEW_LINE> <INDENT> childNode = FpTreeNode(name=item, count=0, parent=treeNode) <NEW_LINE> treeNode.children[item] = childNode <NEW_LINE> self.updateHeaderTable(item, childNode) <NEW_LINE> <DEDENT> childNode = treeNode.children[item] <NEW_LINE> childNode.addCount(1) <NEW_LINE> self.updateTree(items[1:], childNode) <NEW_LINE> <DEDENT> def updateHeaderTable(self, item, childNode): <NEW_LINE> <INDENT> if item not in self.headerTable: <NEW_LINE> <INDENT> self.headerTable[item] = childNode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> currentNode = self.headerTable[item] <NEW_LINE> while currentNode.nextNode is not None: <NEW_LINE> <INDENT> currentNode = currentNode.nextNode <NEW_LINE> <DEDENT> currentNode.nextNode = childNode
FPTree树实现,继承自Tree
625990745fc7496912d48efe
class KwargsOrDoubleStarred: <NEW_LINE> <INDENT> pass
kwarg_or_double_starred: | NAME '=' expression | '**' expression
6259907416aa5153ce401e03
class Employee: <NEW_LINE> <INDENT> empCount = 0 <NEW_LINE> def __init__(self, name, salary): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.salary = salary <NEW_LINE> Employee.empCount += 1 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def display_count(): <NEW_LINE> <INDENT> print("Total Employee %d" % Employee.empCount) <NEW_LINE> <DEDENT> def display_employee(self): <NEW_LINE> <INDENT> print("Name : ", self.name, ", Salary: ", self.salary)
Common base class for all employees
6259907426068e7796d4e266
class ReleasingByMerge(ReleasingMixin): <NEW_LINE> <INDENT> def release_can_be_skipped(self, ticket): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def release_ticket(self, ticket): <NEW_LINE> <INDENT> from .q import Q <NEW_LINE> if not self.settings.RELEASE_BRANCH: <NEW_LINE> <INDENT> raise QError("Must set RELEASE_BRANCH in order to use ReleasingByMerge.") <NEW_LINE> <DEDENT> self.Q('my','revert') <NEW_LINE> Git(self.settings)('checkout "'+ self.settings.RELEASE_BRANCH + '"') <NEW_LINE> Git(self.settings)('pull') <NEW_LINE> Git(self.settings)('merge "'+ ticket.branch_name() + '"') <NEW_LINE> self.Q('my','apply') <NEW_LINE> Git(self.settings)('push "' + self.settings.GIT_REMOTE + '" "' + self.settings.RELEASE_BRANCH + '"') <NEW_LINE> Git(self.settings)('checkout "'+ ticket.branch_name() + '"') <NEW_LINE> self.Q('done') <NEW_LINE> Git(self.settings)('checkout "'+ self.settings.RELEASE_BRANCH + '"') <NEW_LINE> return True
Simply merge the ticket to the master.
62599074097d151d1a2c299d
class NlpApiCaller(ApiCaller): <NEW_LINE> <INDENT> update_state = pyqtSignal(str, int, dict) <NEW_LINE> signal_indicator = pyqtSignal(str, str) <NEW_LINE> def __init__(self, text): <NEW_LINE> <INDENT> ApiCaller.__init__(self, text) <NEW_LINE> self.logger = logging.getLogger(type(self).__name__) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> @ehpyqtSlot(str, str, int) <NEW_LINE> def memory(self, field, value, user_id): <NEW_LINE> <INDENT> data = { 'field': field, 'value': value, 'user_id': 'SURI{}'.format(user_id) } <NEW_LINE> data = json.dumps(data) <NEW_LINE> r = requests.post(self.url + '/nlp/memory', data=data, headers={'Content-Type': 'application/json'}) <NEW_LINE> if r.status_code != 200: <NEW_LINE> <INDENT> self.logger.error('HTTP {} error occurred while updating memory.'.format(r.status_code)) <NEW_LINE> self.logger.error(r.content) <NEW_LINE> self.signal_indicator.emit("converse", "orange") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info('Memory updated successfully.') <NEW_LINE> <DEDENT> <DEDENT> @ehpyqtSlot(str, int) <NEW_LINE> @ehpyqtSlot(str) <NEW_LINE> def answer(self, text, user_id=None): <NEW_LINE> <INDENT> data = { 'text': text, 'language': self.DEFAULT_LANGUAGE, } <NEW_LINE> if user_id: <NEW_LINE> <INDENT> data['user_id'] = user_id <NEW_LINE> <DEDENT> r = requests.post(self.url + '/nlp/answer', data=data) <NEW_LINE> if r.status_code != 200: <NEW_LINE> <INDENT> self.logger.error('HTTP {} error occurred while retrieving nlp answer.'.format(r.status_code)) <NEW_LINE> self.logger.error(r.content) <NEW_LINE> self.signal_indicator.emit("converse", "orange") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_object = r.json() <NEW_LINE> message = json_object["messages"][0]["content"] <NEW_LINE> intent = json_object["nlp"]["intents"][0]["slug"] <NEW_LINE> self.update_state.emit("converse", State.CONVERSE_NEW, {"intent": intent, "reply": message}) <NEW_LINE> <DEDENT> <DEDENT> @ehpyqtSlot(str, int) <NEW_LINE> @ehpyqtSlot(str) <NEW_LINE> def intent(self, text, user_id=None): <NEW_LINE> <INDENT> data = { 'text': text, 'language': self.DEFAULT_LANGUAGE, } <NEW_LINE> if id: <NEW_LINE> <INDENT> data['user_id'] = user_id <NEW_LINE> <DEDENT> r = requests.post(self.url + '/nlp/intent', data=data) <NEW_LINE> if r.status_code != 200: <NEW_LINE> <INDENT> self.logger.error('HTTP {} error occurred while retrieving intent answer.'.format(r.status_code)) <NEW_LINE> self.logger.error(r.content) <NEW_LINE> self.signal_indicator.emit("converse", "orange") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_object = r.json() <NEW_LINE> intent = json_object["intents"][0]["slug"] <NEW_LINE> self.update_state.emit("converse", State.CONVERSE_NEW, {"intent": intent, "reply": "?"})
API class for NLP API https://github.com/suricats/surirobot-api-converse
62599074dd821e528d6da617
class DPEncoder(FairseqEncoder): <NEW_LINE> <INDENT> def __init__(self, dictionary, embed_dim=256, max_positions=1024, pos="learned", num_layers=2, num_heads=8, filter_size=256, hidden_size=256, dropout=0.1, attention_dropout=0.1, relu_dropout=0.1, convolutions=4): <NEW_LINE> <INDENT> super().__init__(dictionary) <NEW_LINE> assert pos == "learned" or pos == "timing" or pos == "nopos" <NEW_LINE> self.dropout = dropout <NEW_LINE> self.attention_dropout = attention_dropout <NEW_LINE> self.relu_dropout = relu_dropout <NEW_LINE> self.pos = pos <NEW_LINE> num_embeddings = len(dictionary) <NEW_LINE> padding_idx = dictionary.pad() <NEW_LINE> self.embed_tokens = Embedding(num_embeddings, embed_dim, padding_idx) <NEW_LINE> if self.pos == "learned": <NEW_LINE> <INDENT> self.embed_positions = PositionalEmbedding(max_positions, embed_dim, padding_idx, left_pad=LanguagePairDataset.LEFT_PAD_SOURCE) <NEW_LINE> <DEDENT> if self.pos == "timing": <NEW_LINE> <INDENT> self.embed_positions = SinusoidalPositionalEmbedding(embed_dim, padding_idx, left_pad=LanguagePairDataset.LEFT_PAD_SOURCE) <NEW_LINE> <DEDENT> self.layers = num_layers <NEW_LINE> self.attnpath = AttnPathEncoder(self.layers, num_heads=num_heads, filter_size=filter_size, hidden_size=hidden_size, dropout=dropout, attention_dropout=attention_dropout, relu_dropout=relu_dropout) <NEW_LINE> self.cnnpath = CNNPathEncoder(self.layers, hidden_size=hidden_size, dropout=dropout, in_embed=hidden_size, out_embed=hidden_size) <NEW_LINE> <DEDENT> def forward(self, src_tokens, src_lengths): <NEW_LINE> <INDENT> input_to_padding = attention_bias_ignore_padding(src_tokens, self.dictionary.pad()) <NEW_LINE> encoder_self_attention_bias = encoder_attention_bias(input_to_padding) <NEW_LINE> encoder_input = self.embed_tokens(src_tokens) <NEW_LINE> if self.pos != "nopos": <NEW_LINE> <INDENT> encoder_input += self.embed_positions(src_tokens) <NEW_LINE> <DEDENT> x = F.dropout(encoder_input, p=self.dropout, training=self.training) <NEW_LINE> attn_x = self.attnpath(x) <NEW_LINE> cnn_x = self.cnnpath(x) <NEW_LINE> return (attn_x, cnn_x) <NEW_LINE> <DEDENT> def max_positions(self): <NEW_LINE> <INDENT> if self.pos == "learned": <NEW_LINE> <INDENT> return self.embed_positions.max_positions() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1024
Transformer encoder.
625990742ae34c7f260aca0f
class ChanceEventSampler(object): <NEW_LINE> <INDENT> def __init__(self, seed=None): <NEW_LINE> <INDENT> self.seed(seed) <NEW_LINE> <DEDENT> def seed(self, seed=None): <NEW_LINE> <INDENT> self._rng = np.random.RandomState(seed) <NEW_LINE> <DEDENT> def __call__(self, state): <NEW_LINE> <INDENT> actions, probs = zip(*state.chance_outcomes()) <NEW_LINE> return self._rng.choice(actions, p=probs)
Default sampler for external chance events.
62599074adb09d7d5dc0be95
class LoteImportacion12(RN3811): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(LoteImportacion12, self).__init__() <NEW_LINE> self.cuit_contribuyente = None <NEW_LINE> self.fecha_percepcion = None <NEW_LINE> self.tipo_comprobante = None <NEW_LINE> self.letra_comprobante = None <NEW_LINE> self.nro_sucursal = None <NEW_LINE> self.nro_emision = None <NEW_LINE> self.monto_imponible = None <NEW_LINE> self.importe_percepcion = None <NEW_LINE> self.fecha_emision = None <NEW_LINE> self.tipo_operacion = None <NEW_LINE> <DEDENT> def ordered_fields(self): <NEW_LINE> <INDENT> return [ self.cuit_contribuyente, self.fecha_percepcion, self.tipo_comprobante, self.letra_comprobante, self.nro_sucursal, self.nro_emision, self.monto_imponible, self.importe_percepcion, self.fecha_emision, self.tipo_operacion, ]
Registro de campos que conforman una alícuota de un comprobante. Resolución Normativa Nº 038/11 1.2. Percepciones Act. 7 método Percibido (quincenal)
6259907432920d7e50bc7972
class Muparser(Package): <NEW_LINE> <INDENT> homepage = "http://muparser.beltoforion.de/" <NEW_LINE> url = "https://github.com/beltoforion/muparser/archive/v2.2.5.tar.gz" <NEW_LINE> version('2.2.6.1', '410d29b4c58d1cdc2fc9ed1c1c7f67fe') <NEW_LINE> version('2.2.5', '02dae671aa5ad955fdcbcd3fee313fb7') <NEW_LINE> patch('auto_ptr.patch', when='@2.2.5') <NEW_LINE> depends_on('[email protected]:', when='@2.2.6:', type='build') <NEW_LINE> @when('@2.2.6:') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> cmake_args = [ '-DENABLE_SAMPLES=OFF', '-DENABLE_OPENMP=OFF', '-DBUILD_SHARED_LIBS=ON' ] <NEW_LINE> cmake_args.extend(std_cmake_args) <NEW_LINE> with working_dir('spack-build', create=True): <NEW_LINE> <INDENT> cmake('..', *cmake_args) <NEW_LINE> make() <NEW_LINE> make('install') <NEW_LINE> <DEDENT> <DEDENT> @when('@2.2.5') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> options = ['--disable-debug', '--disable-samples', '--disable-dependency-tracking', 'CXXFLAGS={0}'.format(self.compiler.cxx11_flag), '--prefix=%s' % prefix] <NEW_LINE> configure(*options) <NEW_LINE> make(parallel=False) <NEW_LINE> make("install")
C++ math expression parser library.
6259907491f36d47f2231b24
class SystemMode(enum.IntEnum): <NEW_LINE> <INDENT> OFF = 0x00 <NEW_LINE> HEAT_COOL = 0x01 <NEW_LINE> COOL = 0x03 <NEW_LINE> HEAT = 0x04 <NEW_LINE> AUX_HEAT = 0x05 <NEW_LINE> PRE_COOL = 0x06 <NEW_LINE> FAN_ONLY = 0x07 <NEW_LINE> DRY = 0x08 <NEW_LINE> SLEEP = 0x09
ZCL System Mode attribute enum.
6259907455399d3f05627e44
class _TimelimitThread(threading.Thread): <NEW_LINE> <INDENT> def __init__( self, cgroups, hardtimelimit, softtimelimit, walltimelimit, pid_to_kill, cores, callbackFn=lambda reason: None, ): <NEW_LINE> <INDENT> super(_TimelimitThread, self).__init__() <NEW_LINE> self.name = "TimelimitThread-" + self.name <NEW_LINE> self.finished = threading.Event() <NEW_LINE> if hardtimelimit or softtimelimit: <NEW_LINE> <INDENT> assert CPUACCT in cgroups <NEW_LINE> <DEDENT> assert walltimelimit is not None <NEW_LINE> if cores: <NEW_LINE> <INDENT> self.cpuCount = len(cores) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.cpuCount = multiprocessing.cpu_count() <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> self.cpuCount = 1 <NEW_LINE> <DEDENT> <DEDENT> self.cgroups = cgroups <NEW_LINE> self.timelimit = hardtimelimit or (60 * 60 * 24 * 365 * 100) <NEW_LINE> self.softtimelimit = softtimelimit or (60 * 60 * 24 * 365 * 100) <NEW_LINE> self.latestKillTime = time.monotonic() + walltimelimit <NEW_LINE> self.pid_to_kill = pid_to_kill <NEW_LINE> self.callback = callbackFn <NEW_LINE> <DEDENT> def read_cputime(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.cgroups.read_cputime() <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self.finished.is_set(): <NEW_LINE> <INDENT> usedCpuTime = self.read_cputime() if CPUACCT in self.cgroups else 0 <NEW_LINE> remainingCpuTime = self.timelimit - usedCpuTime <NEW_LINE> remainingSoftCpuTime = self.softtimelimit - usedCpuTime <NEW_LINE> remainingWallTime = self.latestKillTime - time.monotonic() <NEW_LINE> logging.debug( "TimelimitThread for process %s: used CPU time: %s, remaining CPU time: %s, " "remaining soft CPU time: %s, remaining wall time: %s.", self.pid_to_kill, usedCpuTime, remainingCpuTime, remainingSoftCpuTime, remainingWallTime, ) <NEW_LINE> if remainingCpuTime <= 0: <NEW_LINE> <INDENT> self.callback("cputime") <NEW_LINE> logging.debug( "Killing process %s due to CPU time timeout.", self.pid_to_kill ) <NEW_LINE> util.kill_process(self.pid_to_kill) <NEW_LINE> self.finished.set() <NEW_LINE> return <NEW_LINE> <DEDENT> if remainingWallTime <= 0: <NEW_LINE> <INDENT> self.callback("walltime") <NEW_LINE> logging.warning( "Killing process %s due to wall time timeout.", self.pid_to_kill ) <NEW_LINE> util.kill_process(self.pid_to_kill) <NEW_LINE> self.finished.set() <NEW_LINE> return <NEW_LINE> <DEDENT> if remainingSoftCpuTime <= 0: <NEW_LINE> <INDENT> self.callback("cputime-soft") <NEW_LINE> util.kill_process(self.pid_to_kill, signal.SIGTERM) <NEW_LINE> self.softtimelimit = self.timelimit <NEW_LINE> <DEDENT> remainingTime = min( remainingCpuTime / self.cpuCount, remainingSoftCpuTime / self.cpuCount, remainingWallTime, ) <NEW_LINE> self.finished.wait(remainingTime + 1) <NEW_LINE> <DEDENT> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> self.finished.set()
Thread that periodically checks whether the given process has already reached its timelimit. After this happens, the process is terminated.
625990742c8b7c6e89bd5113
class Config(dict): <NEW_LINE> <INDENT> _defaults = {} <NEW_LINE> _aliases = {} <NEW_LINE> def __init__( self, path: str | Path | None = None, _loaded_from_file: bool = False, *args, **kwargs, ): <NEW_LINE> <INDENT> self.path = Path(path) if path is not None else None <NEW_LINE> self._loaded_from_file = _loaded_from_file <NEW_LINE> if self._loaded_from_file and (not self.path or not self.path.exists()): <NEW_LINE> <INDENT> raise ValueError("cannot have been loaded from file as it doesn't exist.") <NEW_LINE> <DEDENT> super().__init__(*args, **kwargs) <NEW_LINE> self._migrate() <NEW_LINE> <DEDENT> def _migrate(self): <NEW_LINE> <INDENT> def check(k, v, selfdict): <NEW_LINE> <INDENT> if k in selfdict: <NEW_LINE> <INDENT> updated = False <NEW_LINE> if isinstance(v, dict): <NEW_LINE> <INDENT> for kk, vv in v.items(): <NEW_LINE> <INDENT> updated |= check(kk, vv, selfdict[k]) <NEW_LINE> <DEDENT> <DEDENT> return updated <NEW_LINE> <DEDENT> if k in self._aliases: <NEW_LINE> <INDENT> for alias in self._aliases[k]: <NEW_LINE> <INDENT> if alias in selfdict: <NEW_LINE> <INDENT> warnings.warn( f"Your configuration spec has old key '{alias}' which has " f"been re-named '{k}'." ) <NEW_LINE> selfdict[k] = selfdict[alias] <NEW_LINE> del selfdict[alias] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if k not in selfdict: <NEW_LINE> <INDENT> selfdict[k] = v <NEW_LINE> <DEDENT> if isinstance(v, dict): <NEW_LINE> <INDENT> for kk, vv in v.items(): <NEW_LINE> <INDENT> check(kk, vv, selfdict[k]) <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> updated = False <NEW_LINE> for k, v in self._defaults.items(): <NEW_LINE> <INDENT> updated |= check(k, v, self) <NEW_LINE> <DEDENT> if updated and self.path: <NEW_LINE> <INDENT> self.write() <NEW_LINE> <DEDENT> <DEDENT> def _add_to_schema(self, new: dict): <NEW_LINE> <INDENT> self._defaults.update(new) <NEW_LINE> self._migrate() <NEW_LINE> <DEDENT> @contextlib.contextmanager <NEW_LINE> def use(self, **kwargs): <NEW_LINE> <INDENT> for k in kwargs: <NEW_LINE> <INDENT> if k not in self: <NEW_LINE> <INDENT> raise KeyError( f"Cannot use {k} in config, as it doesn't exist. " f"Available keys: {list(self.keys())}." ) <NEW_LINE> <DEDENT> <DEDENT> backup = copy.deepcopy(self) <NEW_LINE> for k, v in kwargs.items(): <NEW_LINE> <INDENT> if isinstance(self[k], dict): <NEW_LINE> <INDENT> self[k].update(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[k] = v <NEW_LINE> <DEDENT> <DEDENT> yield self <NEW_LINE> for k in kwargs: <NEW_LINE> <INDENT> self[k] = backup[k] <NEW_LINE> <DEDENT> <DEDENT> def write(self, fname=None): <NEW_LINE> <INDENT> fname = fname or self.path <NEW_LINE> with open(fname, "w") as fl: <NEW_LINE> <INDENT> yaml.dump(self._as_dict(), fl) <NEW_LINE> <DEDENT> self.path = Path(fname) <NEW_LINE> <DEDENT> def _as_dict(self): <NEW_LINE> <INDENT> return {k: v for k, v in self.items()} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, file_name): <NEW_LINE> <INDENT> with open(file_name) as fl: <NEW_LINE> <INDENT> config = yaml.load(fl, Loader=yaml.FullLoader) <NEW_LINE> <DEDENT> return cls(file_name, _loaded_from_file=True, **config)
Simple over-ride of dict that adds a context manager. Allows to specify extra config options, but ensures that all specified options are defined.
625990744f88993c371f11b6
class SplitFailed(Exception): <NEW_LINE> <INDENT> pass
One of the following situations may raise this error: 1.
625990747cff6e4e811b736c
class MGMSG_MOT_REQ_PMDMOTOROUTPUTPARAMS(MessageWithoutData): <NEW_LINE> <INDENT> message_id = 0x04DB <NEW_LINE> _params_names = ['message_id'] + ['chan_ident', None] + ['dest', 'source']
See :class:`MGMSG_MOT_SET_PMDMOTOROUTPUTPARAMS`. :param chan_ident: channel number (0x01, 0x02) :type chan_ident: int
62599074a8370b77170f1cf7
class Device(base.Device): <NEW_LINE> <INDENT> COMMAND_OFF = 0x00 <NEW_LINE> COMMAND_ON = 0x01 <NEW_LINE> def on(self, addr, src_ep, seq, disable_default_rsp): <NEW_LINE> <INDENT> from ....protocol.zigbee import command <NEW_LINE> self.zcl_command(addr, src_ep, CLUSTER_ID, self.COMMAND_ON, 1, command.Packet.ZCL_FRAME_CLIENT_SERVER_DIR, 0, seq, disable_default_rsp, []) <NEW_LINE> <DEDENT> def off(self, addr, src_ep, seq, disable_default_rsp): <NEW_LINE> <INDENT> from ....protocol.zigbee import command <NEW_LINE> self.zcl_command(addr, src_ep, CLUSTER_ID, self.COMMAND_OFF, 1, command.Packet.ZCL_FRAME_CLIENT_SERVER_DIR, 0, seq, disable_default_rsp, [])
ZCL device class which supports on/off cluster
62599074460517430c432cee