code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class SliceTiming(SPMCommand): <NEW_LINE> <INDENT> input_spec = SliceTimingInputSpec <NEW_LINE> output_spec = SliceTimingOutputSpec <NEW_LINE> _jobtype = 'temporal' <NEW_LINE> _jobname = 'st' <NEW_LINE> def _format_arg(self, opt, spec, val): <NEW_LINE> <INDENT> if opt == 'in_files': <NEW_LINE> <INDENT> return scans_for_fnames(filename_to_list(val), keep4d=False, separate_sessions=True) <NEW_LINE> <DEDENT> return super(SliceTiming, self)._format_arg(opt, spec, val) <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self._outputs().get() <NEW_LINE> outputs['timecorrected_files'] = [] <NEW_LINE> filelist = filename_to_list(self.inputs.in_files) <NEW_LINE> for f in filelist: <NEW_LINE> <INDENT> if isinstance(f, list): <NEW_LINE> <INDENT> run = [fname_presuffix(in_f, prefix=self.inputs.out_prefix) for in_f in f] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> run = fname_presuffix(f, prefix=self.inputs.out_prefix) <NEW_LINE> <DEDENT> outputs['timecorrected_files'].append(run) <NEW_LINE> <DEDENT> return outputs | Use spm to perform slice timing correction.
http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=19
Examples
--------
>>> from nipype.interfaces.spm import SliceTiming
>>> st = SliceTiming()
>>> st.inputs.in_files = 'functional.nii'
>>> st.inputs.num_slices = 32
>>> st.inputs.time_repetition = 6.0
>>> st.inputs.time_acquisition = 6. - 6./32.
>>> st.inputs.slice_order = list(range(32,0,-1))
>>> st.inputs.ref_slice = 1
>>> st.run() # doctest: +SKIP | 6259905494891a1f408ba182 |
class TreillisBar: <NEW_LINE> <INDENT> def __init__(self, model, nodes, lenght, alpha=0): <NEW_LINE> <INDENT> self.material = model.material <NEW_LINE> self.lenght = lenght <NEW_LINE> self.nodes = nodes <NEW_LINE> self.alpha = alpha <NEW_LINE> self.A = np.matrix([[np.cos(self.alpha)**2, np.cos(self.alpha) * np.sin(self.alpha)], [np.cos(self.alpha) * np.sin(self.alpha), np.sin(self.alpha)**2]]) <NEW_LINE> self.k = Matrix(4, 4) <NEW_LINE> self.k.compose(self.A, 0, 0) <NEW_LINE> self.k.compose(self.A, 2, 2) <NEW_LINE> self.k.compose(-1 * self.A, 2, 0) <NEW_LINE> self.k.compose(-1 * self.A, 0, 2) <NEW_LINE> self.k *= model.material.E * model.section.S / self.lenght <NEW_LINE> <DEDENT> def deformationsTensor(self, u, v): <NEW_LINE> <INDENT> d = DeformationTensor(self) <NEW_LINE> d.vector[0] = (u**2 + v**2)**(1/2) / self.lenght <NEW_LINE> return d | Element barre pour les treillis. | 625990543cc13d1c6d466c56 |
class TailViewStartCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def is_enabled(self): <NEW_LINE> <INDENT> if not plugin.is_enabled(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> tail_view = get_tail_view(sublime_view=self.view) <NEW_LINE> if not tail_view: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return tail_view.is_stopped() <NEW_LINE> <DEDENT> def run(self, edit, **kwargs): <NEW_LINE> <INDENT> tail_view = get_tail_view(sublime_view=self.view) <NEW_LINE> if not tail_view: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return tail_view.start() | Sublime Text text command ``tail_view_start``.
Starts tailing in a Tail view. Buffer contents will be preserved and
configured sources will be tailed from their current position. | 625990548da39b475be04703 |
class Factory(object): <NEW_LINE> <INDENT> def __init__(self, channel_name, api_data_source_factory, api_list_data_source_factory, intro_data_source_factory, samples_data_source_factory, sidenav_data_source_factory, compiled_fs_factory, ref_resolver_factory, public_template_path, private_template_path): <NEW_LINE> <INDENT> self._branch_info = _MakeChannelDict(channel_name) <NEW_LINE> self._api_data_source_factory = api_data_source_factory <NEW_LINE> self._api_list_data_source_factory = api_list_data_source_factory <NEW_LINE> self._intro_data_source_factory = intro_data_source_factory <NEW_LINE> self._samples_data_source_factory = samples_data_source_factory <NEW_LINE> self._sidenav_data_source_factory = sidenav_data_source_factory <NEW_LINE> self._cache = compiled_fs_factory.Create(self._CreateTemplate, TemplateDataSource) <NEW_LINE> self._ref_resolver = ref_resolver_factory.Create() <NEW_LINE> self._public_template_path = public_template_path <NEW_LINE> self._private_template_path = private_template_path <NEW_LINE> self._static_resources = '/%s/static' % channel_name <NEW_LINE> <DEDENT> def _CreateTemplate(self, template_name, text): <NEW_LINE> <INDENT> return Handlebar(self._ref_resolver.ResolveAllLinks(text)) <NEW_LINE> <DEDENT> def Create(self, request, path): <NEW_LINE> <INDENT> return TemplateDataSource( self._branch_info, self._api_data_source_factory.Create(request), self._api_list_data_source_factory.Create(), self._intro_data_source_factory.Create(), self._samples_data_source_factory.Create(request), self._sidenav_data_source_factory.Create(path), self._cache, self._public_template_path, self._private_template_path, self._static_resources) | A factory to create lightweight TemplateDataSource instances bound to
individual Requests. | 6259905438b623060ffaa2db |
class Address(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Address" <NEW_LINE> verbose_name_plural = "Addresses" <NEW_LINE> <DEDENT> address_line = models.CharField(max_length=256) <NEW_LINE> postcode = models.CharField(max_length=10) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.address_line | Address model. | 6259905499cbb53fe6832403 |
class QueryJobForm(Form): <NEW_LINE> <INDENT> name = StringField('Name', render_kw={ 'placeholder': 'Your job name' }, validators=[ validators.DataRequired() ]) <NEW_LINE> annotation = TextAreaField('Annotation', render_kw={ 'placeholder': 'Job\'s annotation' }) <NEW_LINE> connection_id = SelectField('Connection', coerce=int) <NEW_LINE> query_string = TextAreaField('Query', validators=[validators.DataRequired()] ) <NEW_LINE> query_time_out = IntegerField('Query Time Out', validators=[ validators.DataRequired(), validators.NumberRange(min=0) ], default=config.get('DB_TIMEOUT', 0)) <NEW_LINE> emails = FieldList(StringField('Email', render_kw={ 'placeholder': '[email protected]' }, validators=[ validators.Optional(), validators.Email() ]), 'Send Result To', min_entries=1) <NEW_LINE> schedules = FieldList(FormField(ScheduleForm), 'Job\'s schedules:') <NEW_LINE> def populate_obj(self, obj): <NEW_LINE> <INDENT> for name, field in iteritems(self._fields): <NEW_LINE> <INDENT> if name not in ['query_time_out', 'emails', 'schedules']: <NEW_LINE> <INDENT> field.populate_obj(obj, name) | Used to create/edit data getting jobs. | 625990544e4d56256637391f |
class MedicalPathology(models.Model): <NEW_LINE> <INDENT> _inherit = 'medical.pathology' <NEW_LINE> carepoint_bind_ids = fields.One2many( comodel_name='carepoint.medical.pathology', inverse_name='odoo_id', string='Carepoint Bindings', ) | Adds the ``one2many`` relation to the Carepoint bindings
(``carepoint_bind_ids``) | 625990542ae34c7f260ac5ff |
class Dummy(object): <NEW_LINE> <INDENT> def __init__(self, write_function): <NEW_LINE> <INDENT> self.write = write_function | a dummy class that has the required fields to be used in
the write method call below | 625990540fa83653e46f63fd |
class BrokerConnection(object): <NEW_LINE> <INDENT> def __init__(self, host, port, buffer_size=1024 * 1024, source_host='', source_port=0): <NEW_LINE> <INDENT> self._buff = bytearray(buffer_size) <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self._socket = None <NEW_LINE> self.source_host = source_host <NEW_LINE> self.source_port = source_port <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> <DEDENT> @property <NEW_LINE> def connected(self): <NEW_LINE> <INDENT> return self._socket is not None <NEW_LINE> <DEDENT> def connect(self, timeout): <NEW_LINE> <INDENT> log.debug("Connecting to %s:%s", self.host, self.port) <NEW_LINE> self._socket = socket.create_connection( (self.host, self.port), timeout / 1000, (self.source_host, self.source_port) ) <NEW_LINE> if self._socket is not None: <NEW_LINE> <INDENT> log.debug("Successfully connected to %s:%s", self.host, self.port) <NEW_LINE> <DEDENT> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> if self._socket is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._socket.close() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._socket = None <NEW_LINE> <DEDENT> <DEDENT> def reconnect(self): <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> self.connect() <NEW_LINE> <DEDENT> def request(self, request): <NEW_LINE> <INDENT> bytes = request.get_bytes() <NEW_LINE> if not self._socket: <NEW_LINE> <INDENT> raise SocketDisconnectedError <NEW_LINE> <DEDENT> self._socket.sendall(bytes) <NEW_LINE> <DEDENT> def response(self): <NEW_LINE> <INDENT> size = self._socket.recv(4) <NEW_LINE> if len(size) == 0: <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> raise SocketDisconnectedError <NEW_LINE> <DEDENT> size = struct.unpack('!i', size)[0] <NEW_LINE> recvall_into(self._socket, self._buff, size) <NEW_LINE> return buffer(self._buff[4:4 + size]) | BrokerConnection thinly wraps a `socket.create_connection` call
and handles the sending and receiving of data that conform to the
kafka binary protocol over that socket. | 6259905491af0d3eaad3b341 |
class InventoryEntry(Base): <NEW_LINE> <INDENT> __tablename__ = 'inventory_items' <NEW_LINE> entry_id = Column(INTEGER, autoincrement=True, primary_key=True) <NEW_LINE> owner = Column(BigInteger, ForeignKey('accounts.id')) <NEW_LINE> item_type = Column(types.Enum(ItemType)) <NEW_LINE> item_info = Column(MutableList.as_mutable(JSON), server_default='[]') <NEW_LINE> owner_acc = relationship("Account") <NEW_LINE> def _create_item(self, args): <NEW_LINE> <INDENT> return item_class_map[self.item_type](**args) <NEW_LINE> <DEDENT> def get_items(self): <NEW_LINE> <INDENT> return [self._create_item(i) for i in self.item_info] <NEW_LINE> <DEDENT> def add_item(self, item): <NEW_LINE> <INDENT> logger.info(f"added {item} to <@{self.owner}>'s inventory") <NEW_LINE> self.item_info.append(item.to_json()) <NEW_LINE> <DEDENT> def remove_item(self, index=0): <NEW_LINE> <INDENT> logger.info(f"Removed {self._create_item(**self.item_info[index])} from <@{self.owner}>'s inventory") <NEW_LINE> self.item_info.pop(index) <NEW_LINE> <DEDENT> def equip_item(self, index=0): <NEW_LINE> <INDENT> logger.info(f"Equipped {self.item_info[index]} to <@{self.owner}>") <NEW_LINE> self.owner_acc.equip(self._create_item(self.item_info[index])) <NEW_LINE> <DEDENT> def update_item(self, item): <NEW_LINE> <INDENT> assert reversed_class_map[type(item)] == item_class_map[self.item_type.value] <NEW_LINE> self.item_info = item.to_json() | An inventory class | 6259905407f4c71912bb0953 |
class Accumulator: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.count = 0 <NEW_LINE> self.accum = None <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> self.count += 1 <NEW_LINE> if self.accum is None: <NEW_LINE> <INDENT> self.accum = np.array(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.accum += x | This is for testing callbacks. | 62599054097d151d1a2c2589 |
class RandomRIR(object): <NEW_LINE> <INDENT> def __init__(self, sampling_rate, max_rt_60=0.5, min_room_dim=3, max_room_dim=5): <NEW_LINE> <INDENT> self.sampling_rate = sampling_rate <NEW_LINE> self.max_rt_60 = max_rt_60 <NEW_LINE> self.max_room_dim = max_room_dim <NEW_LINE> self.min_room_dim = min_room_dim <NEW_LINE> <DEDENT> def create_rir(self, src_cnt, mic_cnt=1): <NEW_LINE> <INDENT> room_dim = utils.create_new_room(self.min_room_dim, self.max_room_dim) <NEW_LINE> room = Room(room_dim.dim) <NEW_LINE> rt60 = utils.generate_rt60(0.1, self.max_rt_60) <NEW_LINE> all_ele = [] <NEW_LINE> all_mics = [] <NEW_LINE> for mic_id in np.arange(mic_cnt): <NEW_LINE> <INDENT> mic_pos = utils.new_element_pos(room_dim, all_ele) <NEW_LINE> mic = Microphone(mic_pos.dim, 2, orientation=[0.0, 0.0, 0.0], direction='cardioid') <NEW_LINE> all_mics.append(mic) <NEW_LINE> all_ele.append(mic_pos) <NEW_LINE> <DEDENT> all_srcs = [] <NEW_LINE> for mic_id in np.arange(src_cnt): <NEW_LINE> <INDENT> src_pos = utils.new_element_pos(room_dim, all_ele) <NEW_LINE> all_srcs.append(src_pos) <NEW_LINE> all_ele.append(src_pos) <NEW_LINE> <DEDENT> all_rir = [] <NEW_LINE> sim_rir = RoomSim(self.sampling_rate, room, all_mics, RT60=rt60) <NEW_LINE> for src in all_srcs: <NEW_LINE> <INDENT> rir = sim_rir.create_rir(src.dim) <NEW_LINE> all_rir.append(rir) <NEW_LINE> <DEDENT> return all_rir <NEW_LINE> <DEDENT> def reverberate(self, src_list, mic_cnt=1): <NEW_LINE> <INDENT> src_cnt = len(src_list) <NEW_LINE> rirs = self.create_rir(src_cnt, mic_cnt=mic_cnt) <NEW_LINE> rev_sig = [] <NEW_LINE> for src_idx, src_rir in enumerate(rirs): <NEW_LINE> <INDENT> src_ch = [] <NEW_LINE> for mic_src_rir in src_rir.T: <NEW_LINE> <INDENT> data_rev = olafilt.olafilt(mic_src_rir, src_list[src_idx]) <NEW_LINE> src_ch.append(data_rev) <NEW_LINE> <DEDENT> src_ch = np.stack(src_ch, 1) <NEW_LINE> rev_sig.append(src_ch) <NEW_LINE> <DEDENT> return rev_sig | Generate a random room, microphone and source position and generate the corresponding RIR.
# Arguments
sampling_rate: Sampling rate of the RIR
max_rt_60: Maximum value of RT60 in seconds. Actual RT60 is random between [0.1, max_rt_60]
min_room_di, max_room_dim: Min and Maximum value of the room dim.
Room dimensions are random picks between [min_room_dim, max_room_dim]
# Usage
rir_if = RandomRIR(sampling_rate=16000)
src = [np.random.rand(10000), np.random.rand(10000)]
rev_sig = rir_if.reverberate(src) | 62599054b57a9660fecd2f94 |
class LinkAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('name', 'link_url', ) | How to display the Link object in the Admin UI | 62599054a79ad1619776b54a |
class ServiceEndpointPolicyDefinitionListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ServiceEndpointPolicyDefinition]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ServiceEndpointPolicyDefinition"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ServiceEndpointPolicyDefinitionListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link | Response for ListServiceEndpointPolicyDefinition API service call. Retrieves all service endpoint policy definition that belongs to a service endpoint policy.
:param value: The service endpoint policy definition in a service endpoint policy.
:type value: list[~azure.mgmt.network.v2020_07_01.models.ServiceEndpointPolicyDefinition]
:param next_link: The URL to get the next set of results.
:type next_link: str | 625990547d847024c075d8f5 |
class LiveUpdate(FullnameMixin, RedditBase): <NEW_LINE> <INDENT> STR_FIELD = "id" <NEW_LINE> _kind = "LiveUpdate" <NEW_LINE> @cachedproperty <NEW_LINE> def contrib(self) -> _LiveUpdateContribution: <NEW_LINE> <INDENT> return LiveUpdateContribution(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def thread(self) -> LiveThread: <NEW_LINE> <INDENT> return self._thread <NEW_LINE> <DEDENT> def __init__( self, reddit: Reddit, thread_id: Optional[str] = None, update_id: Optional[str] = None, _data: Optional[Dict[str, Any]] = None, ): <NEW_LINE> <INDENT> if _data is not None: <NEW_LINE> <INDENT> super().__init__(reddit, _data=_data) <NEW_LINE> self._fetched = True <NEW_LINE> <DEDENT> elif thread_id and update_id: <NEW_LINE> <INDENT> super().__init__(reddit, _data=None) <NEW_LINE> self._thread = LiveThread(self._reddit, thread_id) <NEW_LINE> self.id = update_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( "Either `thread_id` and `update_id`, or " "`_data` must be provided." ) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, attribute: str, value: Any): <NEW_LINE> <INDENT> if attribute == "author": <NEW_LINE> <INDENT> value = Redditor(self._reddit, name=value) <NEW_LINE> <DEDENT> super().__setattr__(attribute, value) <NEW_LINE> <DEDENT> def _fetch(self): <NEW_LINE> <INDENT> url = API_PATH["live_focus"].format( thread_id=self.thread.id, update_id=self.id ) <NEW_LINE> other = self._reddit.get(url)[0] <NEW_LINE> self.__dict__.update(other.__dict__) <NEW_LINE> self._fetched = True | An individual :class:`.LiveUpdate` object.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
necessarily comprehensive.
======================= ===================================================
Attribute Description
======================= ===================================================
``author`` The :class:`.Redditor` who made the update.
``body`` Body of the update, as Markdown.
``body_html`` Body of the update, as HTML.
``created_utc`` The time the update was created, as `Unix Time`_.
``stricken`` A ``bool`` representing whether or not the update
was stricken (see :meth:`.strike`).
======================= ===================================================
.. _Unix Time: https://en.wikipedia.org/wiki/Unix_time | 625990543539df3088ecd7c0 |
class Layout(object): <NEW_LINE> <INDENT> def __init__(self, *fields): <NEW_LINE> <INDENT> self.fields = list(fields) <NEW_LINE> <DEDENT> def render(self, form, form_style, context): <NEW_LINE> <INDENT> form.rendered_fields = [] <NEW_LINE> html = "" <NEW_LINE> for field in self.fields: <NEW_LINE> <INDENT> html += render_field(field, form, form_style, context) <NEW_LINE> <DEDENT> for field in form.fields.keys(): <NEW_LINE> <INDENT> if not field in form.rendered_fields: <NEW_LINE> <INDENT> html += render_field(field, form, form_style, context) <NEW_LINE> <DEDENT> <DEDENT> return html | Form Layout. It is conformed by Layout objects: `Fieldset`, `Row`, `Column`, `MultiField`,
`HTML`, `ButtonHolder`, `Button`, `Hidden`, `Reset`, `Submit` and fields. Form fields
have to be strings.
Layout objects `Fieldset`, `Row`, `Column`, `MultiField` and `ButtonHolder` can hold other
Layout objects within. Though `ButtonHolder` should only hold `HTML` and BaseInput
inherited classes: `Button`, `Hidden`, `Reset` and `Submit`.
You need to add your `Layout` to the `FormHelper` using its method `add_layout`.
Example::
layout = Layout(
Fieldset('Company data',
'is_company'
),
Fieldset(_('Contact details'),
'email',
Row('password1', 'password2'),
'first_name',
'last_name',
HTML('<img src="/media/somepicture.jpg"/>'),
'company'
),
ButtonHolder(
Submit('Save', 'Save', css_class='button white'),
),
)
helper.add_layout(layout) | 6259905499cbb53fe6832405 |
class FileMemberActionIndividualResult(bb.Union): <NEW_LINE> <INDENT> _catch_all = None <NEW_LINE> @classmethod <NEW_LINE> def success(cls, val): <NEW_LINE> <INDENT> return cls('success', val) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def member_error(cls, val): <NEW_LINE> <INDENT> return cls('member_error', val) <NEW_LINE> <DEDENT> def is_success(self): <NEW_LINE> <INDENT> return self._tag == 'success' <NEW_LINE> <DEDENT> def is_member_error(self): <NEW_LINE> <INDENT> return self._tag == 'member_error' <NEW_LINE> <DEDENT> def get_success(self): <NEW_LINE> <INDENT> if not self.is_success(): <NEW_LINE> <INDENT> raise AttributeError("tag 'success' not set") <NEW_LINE> <DEDENT> return self._value <NEW_LINE> <DEDENT> def get_member_error(self): <NEW_LINE> <INDENT> if not self.is_member_error(): <NEW_LINE> <INDENT> raise AttributeError("tag 'member_error' not set") <NEW_LINE> <DEDENT> return self._value <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, processor): <NEW_LINE> <INDENT> super(FileMemberActionIndividualResult, self)._process_custom_annotations(annotation_type, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'FileMemberActionIndividualResult(%r, %r)' % (self._tag, self._value) | This class acts as a tagged union. Only one of the ``is_*`` methods will
return true. To get the associated value of a tag (if one exists), use the
corresponding ``get_*`` method.
:ivar Optional[sharing.AccessLevel]
sharing.FileMemberActionIndividualResult.success: Member was
successfully removed from this file. If AccessLevel is given, the member
still has access via a parent shared folder.
:ivar FileMemberActionError
sharing.FileMemberActionIndividualResult.member_error: User was not able
to perform this action. | 6259905407d97122c42181c4 |
class WarningValue(_messages.Message): <NEW_LINE> <INDENT> class CodeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> CLEANUP_FAILED = 0 <NEW_LINE> DEPRECATED_RESOURCE_USED = 1 <NEW_LINE> DISK_SIZE_LARGER_THAN_IMAGE_SIZE = 2 <NEW_LINE> FIELD_VALUE_OVERRIDEN = 3 <NEW_LINE> INJECTED_KERNELS_DEPRECATED = 4 <NEW_LINE> NEXT_HOP_ADDRESS_NOT_ASSIGNED = 5 <NEW_LINE> NEXT_HOP_CANNOT_IP_FORWARD = 6 <NEW_LINE> NEXT_HOP_INSTANCE_NOT_FOUND = 7 <NEW_LINE> NEXT_HOP_INSTANCE_NOT_ON_NETWORK = 8 <NEW_LINE> NEXT_HOP_NOT_RUNNING = 9 <NEW_LINE> NOT_CRITICAL_ERROR = 10 <NEW_LINE> NO_RESULTS_ON_PAGE = 11 <NEW_LINE> REQUIRED_TOS_AGREEMENT = 12 <NEW_LINE> RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING = 13 <NEW_LINE> RESOURCE_NOT_DELETED = 14 <NEW_LINE> SINGLE_INSTANCE_PROPERTY_TEMPLATE = 15 <NEW_LINE> UNREACHABLE = 16 <NEW_LINE> <DEDENT> class DataValueListEntry(_messages.Message): <NEW_LINE> <INDENT> key = _messages.StringField(1) <NEW_LINE> value = _messages.StringField(2) <NEW_LINE> <DEDENT> code = _messages.EnumField('CodeValueValuesEnum', 1) <NEW_LINE> data = _messages.MessageField('DataValueListEntry', 2, repeated=True) <NEW_LINE> message = _messages.StringField(3) | [Output Only] Informational warning which replaces the list of
operations when the list is empty.
Enums:
CodeValueValuesEnum: [Output Only] A warning code, if applicable. For
example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no
results in the response.
Messages:
DataValueListEntry: A DataValueListEntry object.
Fields:
code: [Output Only] A warning code, if applicable. For example, Compute
Engine returns NO_RESULTS_ON_PAGE if there are no results in the
response.
data: [Output Only] Metadata about this warning in key: value format.
For example: "data": [ { "key": "scope", "value": "zones/us-east1-d" }
message: [Output Only] A human-readable description of the warning code. | 62599054462c4b4f79dbcf1f |
class CrosstalkConfig(Config): <NEW_LINE> <INDENT> minPixelToMask = Field( dtype=float, doc="Set crosstalk mask plane for pixels over this value.", default=45000 ) <NEW_LINE> crosstalkMaskPlane = Field( dtype=str, doc="Name for crosstalk mask plane.", default="CROSSTALK" ) <NEW_LINE> crosstalkBackgroundMethod = ChoiceField( dtype=str, doc="Type of background subtraction to use when applying correction.", default="None", allowed={ "None": "Do no background subtraction.", "AMP": "Subtract amplifier-by-amplifier background levels.", "DETECTOR": "Subtract detector level background." }, ) <NEW_LINE> useConfigCoefficients = Field( dtype=bool, doc="Ignore the detector crosstalk information in favor of CrosstalkConfig values?", default=False, ) <NEW_LINE> crosstalkValues = ListField( dtype=float, doc=("Amplifier-indexed crosstalk coefficients to use. This should be arranged as a 1 x nAmp**2 " "list of coefficients, such that when reshaped by crosstalkShape, the result is nAmp x nAmp. " "This matrix should be structured so CT * [amp0 amp1 amp2 ...]^T returns the column " "vector [corr0 corr1 corr2 ...]^T."), default=[0.0], ) <NEW_LINE> crosstalkShape = ListField( dtype=int, doc="Shape of the coefficient array. This should be equal to [nAmp, nAmp].", default=[1], ) <NEW_LINE> def getCrosstalk(self, detector=None): <NEW_LINE> <INDENT> if self.useConfigCoefficients is True: <NEW_LINE> <INDENT> coeffs = np.array(self.crosstalkValues).reshape(self.crosstalkShape) <NEW_LINE> if detector is not None: <NEW_LINE> <INDENT> nAmp = len(detector) <NEW_LINE> if coeffs.shape != (nAmp, nAmp): <NEW_LINE> <INDENT> raise RuntimeError("Constructed crosstalk coeffients do not match detector shape. " f"{coeffs.shape} {nAmp}") <NEW_LINE> <DEDENT> <DEDENT> return coeffs <NEW_LINE> <DEDENT> elif detector is not None and detector.hasCrosstalk() is True: <NEW_LINE> <INDENT> return detector.getCrosstalk() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError("Attempted to correct crosstalk without crosstalk coefficients") <NEW_LINE> <DEDENT> <DEDENT> def hasCrosstalk(self, detector=None): <NEW_LINE> <INDENT> if self.useConfigCoefficients is True and self.crosstalkValues is not None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif detector is not None and detector.hasCrosstalk() is True: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Configuration for intra-detector crosstalk removal. | 62599054d53ae8145f91997c |
class TextCoderView(LoginRequiredMixin, generic.DetailView): <NEW_LINE> <INDENT> template_name = 'textcoder.html' <NEW_LINE> pk_url_kwarg = 'dictionary_pk' <NEW_LINE> default_dictionary_pk = 1 <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return enhance_models.Dictionary.objects.all() <NEW_LINE> <DEDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> if queryset is None: <NEW_LINE> <INDENT> queryset = self.get_queryset() <NEW_LINE> <DEDENT> pk = self.kwargs.get(self.pk_url_kwarg, None) <NEW_LINE> if pk is None: <NEW_LINE> <INDENT> pk = self.default_dictionary_pk <NEW_LINE> <DEDENT> queryset = queryset.filter(pk=pk) <NEW_LINE> try: <NEW_LINE> <INDENT> obj = queryset.get() <NEW_LINE> <DEDENT> except queryset.model.DoesNotExist: <NEW_LINE> <INDENT> raise Http404(_("No %(verbose_name)s found matching the query") % {'verbose_name': queryset.model._meta.verbose_name}) <NEW_LINE> <DEDENT> return obj | The view for the visualization tool. | 6259905424f1403a9268635c |
class LogoutView(authviews.LogoutView): <NEW_LINE> <INDENT> url = reverse_lazy('home') <NEW_LINE> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> auth.logout(self.request) <NEW_LINE> messages.success(self.request, "Your successfully log-out") <NEW_LINE> return super(LogoutView, self).get(*args, **kwargs) | User logout view | 62599054f7d966606f749345 |
class Cookies(object): <NEW_LINE> <INDENT> def __init__(self, template, output_factory): <NEW_LINE> <INDENT> self._template = template <NEW_LINE> self._output_factory = output_factory <NEW_LINE> self._counter = 0 <NEW_LINE> <DEDENT> def _new_output_dir(self): <NEW_LINE> <INDENT> dirname = 'bake{:02d}'.format(self._counter) <NEW_LINE> output_dir = self._output_factory(dirname) <NEW_LINE> self._counter += 1 <NEW_LINE> return str(output_dir) <NEW_LINE> <DEDENT> def bake(self, extra_context=None): <NEW_LINE> <INDENT> exception = None <NEW_LINE> exit_code = 0 <NEW_LINE> project_dir = None <NEW_LINE> try: <NEW_LINE> <INDENT> project_dir = cookiecutter( self._template, no_input=True, extra_context=extra_context, output_dir=self._new_output_dir() ) <NEW_LINE> <DEDENT> except SystemExit as e: <NEW_LINE> <INDENT> if e.code != 0: <NEW_LINE> <INDENT> exception = e <NEW_LINE> <DEDENT> exit_code = e.code <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> exception = e <NEW_LINE> exit_code = -1 <NEW_LINE> <DEDENT> return Result(exception, exit_code, project_dir) | Class to provide convenient access to the cookiecutter API. | 625990540fa83653e46f63ff |
class ApplicationGatewayProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> HTTP = "Http" <NEW_LINE> HTTPS = "Https" | Protocol.
| 6259905463b5f9789fe8668c |
class Decoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, vocab_size, inputsize=opt_model_options.rnn_size, hidden_dim=opt_model_options.rnn_size, embedding_dim=300, vectors=None): <NEW_LINE> <INDENT> super(Decoder, self).__init__() <NEW_LINE> self.hidden_dim, self.embedding_dim, self.vocab_size = int(hidden_dim), int(embedding_dim), int(vocab_size) <NEW_LINE> self.inputsize = inputsize <NEW_LINE> self.bidirectional = False <NEW_LINE> self.mode = 'LSTM' <NEW_LINE> if vectors is not None: <NEW_LINE> <INDENT> self.embedding_layer = nn.Embedding.from_pretrained(torch.FloatTensor(vectors)) <NEW_LINE> self.embedding_layer.weight.requires_grad = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.embedding_layer = nn.Embedding(self.vocab_size, self.embedding_dim) <NEW_LINE> <DEDENT> self.rnn = nn.LSTM(self.inputsize + self.embedding_dim, self.hidden_dim) <NEW_LINE> <DEDENT> def forward(self, y_prev, h_prev): <NEW_LINE> <INDENT> y_emb = self.embedding_layer(y_prev).transpose(0, 1) <NEW_LINE> o, h = self.rnn.forward(torch.cat([y_emb, h_prev[0]], dim=-1), h_prev) <NEW_LINE> return o, h | TODO: Base paper (https://arxiv.org/pdf/1704.04368.pdf) might want us to use same embedding in enc, dec.
Tie them up in that case. | 62599054be8e80087fbc059c |
class TransactionPolicy_two: <NEW_LINE> <INDENT> def __init__(self, policy_data, **extra_data): <NEW_LINE> <INDENT> self._data = {**policy_data} <NEW_LINE> self._data.update(**extra_data) <NEW_LINE> <DEDENT> def change_in_policy(self, customer_id, **new_policy_data): <NEW_LINE> <INDENT> self._data[customer_id].update(**new_policy_data) <NEW_LINE> <DEDENT> def __getitem__(self, customer_id): <NEW_LINE> <INDENT> return self._data[customer_id] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._data) | 컴포지션을 활용한 해결책
( TransactionPolicy 자체가 사전이 되는 것이 아니라 사전을 활용하는 것. )
1. 사전을 private 속성에 저장 ( _data )
2. __getitem()__ 으로 사전의 프록시를 만들고
3. 필요한 public 메서드를 추가적으로 구현 | 62599054b5575c28eb713759 |
class CreatingMetricSourceException(Exception): <NEW_LINE> <INDENT> pass | Metric creation task hasn't completed yet | 6259905476e4537e8c3f0aa6 |
class ZCheckerTools(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://github.com/CODARcode/z-checker-installer" <NEW_LINE> url = "https://github.com/CODARcode/z-checker-installer/archive/refs/tags/0.7.0.tar.gz" <NEW_LINE> git = "https://github.com/CODARcode/z-checker-installer" <NEW_LINE> maintainers = ['disheng222', 'robertu94'] <NEW_LINE> version('develop', branch='develop') <NEW_LINE> version('develop-robertu94', branch='develop', git="https://github.com/robertu94/z-checker-installer") <NEW_LINE> depends_on('[email protected]+sz+zfp+fpzip+json') <NEW_LINE> depends_on('z-checker') <NEW_LINE> depends_on('perl') <NEW_LINE> depends_on('git') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> depends_on('tif22pnm') <NEW_LINE> depends_on('sam2p') <NEW_LINE> depends_on('texlive') <NEW_LINE> depends_on('ghostscript') <NEW_LINE> def cmake_args(self): <NEW_LINE> <INDENT> args = [] <NEW_LINE> return args | A collection of analysis tools that build upon z-checker | 62599054dd821e528d6da3fa |
class MovieAPI(API): <NEW_LINE> <INDENT> @login_required <NEW_LINE> def _put(self, movie_id: int) -> dict: <NEW_LINE> <INDENT> logger.debug("Updating Movie.") <NEW_LINE> title = request.json.get(constants.Movie.TITLE) <NEW_LINE> released_at = request.json.get(constants.Movie.RELEASED_AT) <NEW_LINE> is_active = request.json.get(constants.Movie.ACTIVE, True) <NEW_LINE> try: <NEW_LINE> <INDENT> movie = movies.MoviesController.update(title=title, released_at=released_at, movie_id=movie_id, is_active=is_active) <NEW_LINE> logger.debug("Movie Updated!") <NEW_LINE> return { constants.Movie.SINGULAR: MovieSerializer.serialize(movie), } <NEW_LINE> <DEDENT> except movies.TitleException: <NEW_LINE> <INDENT> raise errors.TitleFormException() <NEW_LINE> <DEDENT> except movies.ReleaseDateException: <NEW_LINE> <INDENT> raise errors.ReleaseDateFormException() <NEW_LINE> <DEDENT> except movies.MovieIDException: <NEW_LINE> <INDENT> raise errors.MovieNotFoundException() <NEW_LINE> <DEDENT> except movies.MovieNotFoundException: <NEW_LINE> <INDENT> raise errors.MovieNotFoundException() <NEW_LINE> <DEDENT> <DEDENT> def _get(self, movie_id: int) -> dict: <NEW_LINE> <INDENT> logger.debug("Getting Movie.") <NEW_LINE> try: <NEW_LINE> <INDENT> movie = movies.MoviesController.get_by_id(movie_id=movie_id) <NEW_LINE> logger.debug("Movie loaded!") <NEW_LINE> return { constants.Movie.SINGULAR: MovieSerializer.serialize(movie), } <NEW_LINE> <DEDENT> except movies.MovieIDException: <NEW_LINE> <INDENT> raise errors.MovieNotFoundException() <NEW_LINE> <DEDENT> except movies.MovieNotFoundException: <NEW_LINE> <INDENT> raise errors.MovieNotFoundException() <NEW_LINE> <DEDENT> <DEDENT> @login_required <NEW_LINE> def _delete(self, movie_id: int) -> dict: <NEW_LINE> <INDENT> logger.debug("Deleting Movie.") <NEW_LINE> try: <NEW_LINE> <INDENT> movie = movies.MoviesController.delete(movie_id=movie_id) <NEW_LINE> logger.debug("Movie deleted!") <NEW_LINE> return { constants.Movie.SINGULAR: MovieSerializer.serialize(movie), } <NEW_LINE> <DEDENT> except movies.MovieIDException: <NEW_LINE> <INDENT> raise errors.MovieNotFoundException() <NEW_LINE> <DEDENT> except movies.MovieNotFoundException: <NEW_LINE> <INDENT> raise errors.MovieNotFoundException() | Movie API. | 625990540c0af96317c577ed |
class PlaceholderNode(template.Node): <NEW_LINE> <INDENT> def __init__(self, name, plugins=None): <NEW_LINE> <INDENT> self.name = name.lower() <NEW_LINE> if plugins: <NEW_LINE> <INDENT> self.plugins = plugins <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.plugins = [] <NEW_LINE> <DEDENT> <DEDENT> def render(self, context): <NEW_LINE> <INDENT> if not 'request' in context: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> l = get_language_from_request(context['request']) <NEW_LINE> request = context['request'] <NEW_LINE> CMSPluginModel = get_cmsplugin_model(request) <NEW_LINE> page = request.current_page <NEW_LINE> if page == "dummy": <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> plugins = CMSPluginModel.objects.filter(page=page, language=l, placeholder__iexact=self.name, parent__isnull=True).order_by('position').select_related() <NEW_LINE> if settings.CMS_PLACEHOLDER_CONF and self.name in settings.CMS_PLACEHOLDER_CONF: <NEW_LINE> <INDENT> if "extra_context" in settings.CMS_PLACEHOLDER_CONF[self.name]: <NEW_LINE> <INDENT> context.update(settings.CMS_PLACEHOLDER_CONF[self.name]["extra_context"]) <NEW_LINE> <DEDENT> <DEDENT> c = "" <NEW_LINE> for plugin in plugins: <NEW_LINE> <INDENT> c += plugin.render_plugin(context, self.name) <NEW_LINE> <DEDENT> return c <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Placeholder Node: %s>" % self.name | This template node is used to output page content and
is also used in the admin to dynamicaly generate input fields.
eg: {% placeholder content-type-name page-object widget-name %}
Keyword arguments:
content-type-name -- the content type you want to show/create
page-object -- the page object
widget-name -- the widget name you want into the admin interface. Take
a look into pages.admin.widgets to see which widgets are available. | 62599054e5267d203ee6ce0a |
class NannyState(object): <NEW_LINE> <INDENT> DEFAULT_TIMEOUT = 10 <NEW_LINE> ZK_TREE_ROOT = '/monitor/hosts' <NEW_LINE> def __init__(self, zk_hosts='localhost:2181', suffix=None): <NEW_LINE> <INDENT> self._suffix = suffix <NEW_LINE> self._zk = kazoo.client.KazooClient(hosts=zk_hosts, timeout=NannyState.DEFAULT_TIMEOUT) <NEW_LINE> try: <NEW_LINE> <INDENT> self._zk.start(timeout=NannyState.DEFAULT_TIMEOUT) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error('Timeout trying to connect to a ZK ensemble [{}]'.format(e)) <NEW_LINE> raise RuntimeError('No Zookeeper ensemble available at {}'.format(zk_hosts)) <NEW_LINE> <DEDENT> <DEDENT> def _build_fullpath(self, server): <NEW_LINE> <INDENT> node_name = '_'.join([server.hostname, self._suffix]) if self._suffix is not None else server.hostname <NEW_LINE> full_path = '/'.join([NannyState.ZK_TREE_ROOT, node_name]) <NEW_LINE> return full_path <NEW_LINE> <DEDENT> def register(self, server): <NEW_LINE> <INDENT> path = self._build_fullpath(server) <NEW_LINE> value = json.dumps(server.info) <NEW_LINE> try: <NEW_LINE> <INDENT> real_path = self._zk.create(path, value=value, ephemeral=True, makepath=True) <NEW_LINE> logging.info('Server {} registered with ZK at {}'.format(server.hostname, real_path)) <NEW_LINE> return real_path <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error('Could not create node {}: {}'.format(path, e)) <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> def update(self, server): <NEW_LINE> <INDENT> node = self._build_fullpath(server) <NEW_LINE> stat = self._zk.exists(node) <NEW_LINE> if stat: <NEW_LINE> <INDENT> new_version = stat.version <NEW_LINE> self._zk.set(node, value=json.dumps(server.info), version=new_version) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise kazoo.exceptions.NoNodeError( 'Server {} should be registered first'.format(node)) | Base abstract class to attach a server process to a monitoring ZooKeeper instance
| 6259905416aa5153ce401a00 |
class NotifyEditForm(EditForm): <NEW_LINE> <INDENT> form_fields = form.FormFields(INotifyAction) <NEW_LINE> label = _(u"Edit Notify Action") <NEW_LINE> description = _(u"A notify action can show a message to the user.") <NEW_LINE> form_name = _(u"Configure element") | An edit form for notify rule actions.
Formlib does all the magic here. | 625990548e7ae83300eea5b3 |
class ImageCSSPlugin(Plugin): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ImageCSSPlugin, self).__init__(*args, **kwargs) <NEW_LINE> random.seed(12001) <NEW_LINE> self._random_state = random.getstate() <NEW_LINE> <DEDENT> def text_resource_complete(self, resource, text): <NEW_LINE> <INDENT> if resource.source_file.kind != 'html': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> d = pq(text, parser='html') <NEW_LINE> images = d.items('article p > img') <NEW_LINE> nb = 0 <NEW_LINE> for img in images: <NEW_LINE> <INDENT> el = img.parent() <NEW_LINE> el.addClass("oasis-image") <NEW_LINE> random.setstate(self._random_state) <NEW_LINE> r = random.uniform(-4, 4) <NEW_LINE> self._random_state = random.getstate() <NEW_LINE> el.css.transform = f"rotate({r:.3f}deg)" <NEW_LINE> nb += 1 <NEW_LINE> if nb % 2 == 0: <NEW_LINE> <INDENT> el.addClass("oasis-image-alternate") <NEW_LINE> <DEDENT> img.attr.loading = "lazy" <NEW_LINE> <DEDENT> return u'<!DOCTYPE html>\n' + d.outer_html() | Add some CSS class to images and rotate them. | 62599054fff4ab517ebced3e |
@Predictor.register('textual-entailment') <NEW_LINE> class DecomposableAttentionPredictor(Predictor): <NEW_LINE> <INDENT> def predict(self, premise: str, hypothesis: str) -> JsonDict: <NEW_LINE> <INDENT> return self.predict_json({"premise" : premise, "hypothesis": hypothesis}) <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def _json_to_instance(self, json_dict: JsonDict) -> Tuple[Instance, JsonDict]: <NEW_LINE> <INDENT> premise_text = json_dict["premise"] <NEW_LINE> hypothesis_text = json_dict["hypothesis"] <NEW_LINE> snli_reader: SnliReader = self._dataset_reader <NEW_LINE> tokenizer = snli_reader._tokenizer <NEW_LINE> return self._dataset_reader.text_to_instance(premise_text, hypothesis_text), { 'premise_tokens': [token.text for token in tokenizer.tokenize(premise_text)], 'hypothesis_tokens': [token.text for token in tokenizer.tokenize(hypothesis_text)] } | Predictor for the :class:`~allennlp.models.bidaf.DecomposableAttention` model. | 6259905482261d6c52730957 |
class SecurityDoor(Door): <NEW_LINE> <INDENT> color = 'gray' <NEW_LINE> locked = True <NEW_LINE> def open(self): <NEW_LINE> <INDENT> if self.locked: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> super().open() | Inherit from Door class. | 625990544a966d76dd5f040b |
class DeclineParticipantSerializer(serializers.Serializer): <NEW_LINE> <INDENT> def update(self, instance, data): <NEW_LINE> <INDENT> now = timezone.now().date() <NEW_LINE> if instance.fair.date_init >= now: <NEW_LINE> <INDENT> raise serializers.ValidationError('Ongoing fair cannot be modified.') <NEW_LINE> <DEDENT> user = self.context['request_user'] <NEW_LINE> instance.state = 'DD' <NEW_LINE> instance.updated_by = user.pk <NEW_LINE> instance.save() <NEW_LINE> return instance | decline participant user_description serializer.
Handle the decline join request to fair.
Fair object must be provide in the context | 6259905416aa5153ce401a01 |
class ShowNotify(View): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> notification = Notification.objects.filter(user=user).order_by('-date') <NEW_LINE> Notification.objects.filter(user=user, is_seen=False).update(is_seen=True) <NEW_LINE> return render(request, 'notification/notification.html', {'notify': notification}) | Display notification | 625990544e696a045264e8b0 |
class ABC(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta | Base class for abstract base classes.
Completely unecessary, but makes ABCs slightly more convenient. | 6259905423e79379d538da17 |
class UnrolledGradientDescent(UnrolledOptimizer): <NEW_LINE> <INDENT> def __init__(self, lr): <NEW_LINE> <INDENT> self._lr = lr <NEW_LINE> <DEDENT> def _apply_gradients(self, grads, x, optim_state): <NEW_LINE> <INDENT> new_x = [None] * len(x) <NEW_LINE> for i in xrange(len(x)): <NEW_LINE> <INDENT> new_x[i] = x[i] - self._lr * grads[i] <NEW_LINE> <DEDENT> return new_x, optim_state | Vanilla Gradient Descent UnrolledOptimizer. | 62599054498bea3a75a59042 |
class IDManyRelatedField(relations.ManyRelatedField): <NEW_LINE> <INDENT> field_name_suffix = '_ids' <NEW_LINE> def bind(self, field_name, parent): <NEW_LINE> <INDENT> self.source = field_name[:-len(self.field_name_suffix)] <NEW_LINE> super().bind(field_name, parent) | Many-to-many field that appends an '_ids' suffix to the field name. | 6259905471ff763f4b5e8ccb |
class quaternion: <NEW_LINE> <INDENT> def __init__(self, real=None, img=None, R=None): <NEW_LINE> <INDENT> self.real = 0. <NEW_LINE> self.img = zeros(3) <NEW_LINE> if R is not None: <NEW_LINE> <INDENT> self.real, self.img = rot2quat(R) <NEW_LINE> <DEDENT> if real is not None: <NEW_LINE> <INDENT> self.real = real <NEW_LINE> <DEDENT> if img is not None: <NEW_LINE> <INDENT> assert(len(img) == 3) <NEW_LINE> self.img[:] = img <NEW_LINE> <DEDENT> <DEDENT> def normalize(self): <NEW_LINE> <INDENT> n = self.norm() <NEW_LINE> return quaternion(real=self.real/n, img=self.img/n) <NEW_LINE> <DEDENT> def norm(self): <NEW_LINE> <INDENT> return sqrt(self.real**2 + dot(self.img, self.img)) <NEW_LINE> <DEDENT> def to_rot(self): <NEW_LINE> <INDENT> n = self.norm() <NEW_LINE> return quat2rot(self.real/n, self.img/n) <NEW_LINE> <DEDENT> def to_array(self): <NEW_LINE> <INDENT> return array([self.real, self.img[0], self.img[1], self.img[2]]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def C(self): <NEW_LINE> <INDENT> return quaternion(real=self.real, img=-self.img) <NEW_LINE> <DEDENT> @property <NEW_LINE> def inv(self): <NEW_LINE> <INDENT> n = self.real**2 + dot(self.img, self.img) <NEW_LINE> return quaternion(real=self.real/n, img=-self.img/n) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return quaternion(real=self.real, img=self.img) <NEW_LINE> <DEDENT> def __add__(self, Q): <NEW_LINE> <INDENT> assert (isinstance(Q, quaternion)) <NEW_LINE> real = self.real + Q.real <NEW_LINE> img = self.img + Q.img <NEW_LINE> return quaternion(real=real, img=img) <NEW_LINE> <DEDENT> def __sub__(self, Q): <NEW_LINE> <INDENT> assert (isinstance(Q, quaternion)) <NEW_LINE> real = self.real - Q.real <NEW_LINE> img = self.img - Q.img <NEW_LINE> return quaternion(real=real, img=img) <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> return quaternion(real=-self.real, img=-self.img) <NEW_LINE> <DEDENT> def __mul__(self, Q): <NEW_LINE> <INDENT> real = self.real*Q.real - dot(self.img, Q.img) <NEW_LINE> img = self.real*Q.img + Q.real*self.img + cross(self.img, Q.img) <NEW_LINE> return quaternion(real=real, img=img) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[{0},({1}, {2}, {3})]'.format( self.real, self.img[0], self.img[1], self.img[2]) | The class for the quaternion
functions:
normalize: normalize the quaternion
norm: give the norm of the quaternion
to_rot: convert the quaternion into a 3x3 rotation matrix
C: give the conjugate of the instance quaternion: q(real, -img)
inv: give the inverse of the instance quaternion: q.C/q.norm() | 62599054435de62698e9d31f |
class Tool(object): <NEW_LINE> <INDENT> def __init__(self, command_executor, filenames=set()): <NEW_LINE> <INDENT> self.executor = command_executor <NEW_LINE> self.filenames = filenames <NEW_LINE> <DEDENT> def get_configs(self): <NEW_LINE> <INDENT> return list() <NEW_LINE> <DEDENT> def invoke(self, dirname, filenames=set(), linter_configs=set()): <NEW_LINE> <INDENT> retval = defaultdict(lambda: defaultdict(list)) <NEW_LINE> extensions = ' -o '.join(['-name "*%s"' % ext for ext in self.get_file_extensions()]) <NEW_LINE> cmd = 'find %s %s | xargs %s' % ( dirname, extensions, self.get_command( dirname, linter_configs=linter_configs)) <NEW_LINE> result = self.executor(cmd) <NEW_LINE> for line in result.split('\n'): <NEW_LINE> <INDENT> output = self.process_line(dirname, line) <NEW_LINE> if output is not None: <NEW_LINE> <INDENT> filename, lineno, messages = output <NEW_LINE> if filename.startswith(dirname): <NEW_LINE> <INDENT> filename = filename[len(dirname) + 1:] <NEW_LINE> <DEDENT> retval[filename][lineno].append(messages) <NEW_LINE> <DEDENT> <DEDENT> return retval <NEW_LINE> <DEDENT> def process_line(self, dirname, line): <NEW_LINE> <INDENT> if not hasattr(self, 'response_format'): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> match = self.response_format.search(line) <NEW_LINE> if match is not None: <NEW_LINE> <INDENT> if len(self.filenames) != 0: <NEW_LINE> <INDENT> if match.group('filename') not in self.filenames: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> filename, line, messages = match.groups() <NEW_LINE> return filename, line, messages <NEW_LINE> <DEDENT> <DEDENT> def get_file_extensions(self): <NEW_LINE> <INDENT> if not self.file_extensions: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> return self.file_extensions <NEW_LINE> <DEDENT> def get_command(self, dirname, linter_configs=set()): <NEW_LINE> <INDENT> raise NotImplementedError() | Tool represents a program that runs over source code. It returns a nested
dictionary structure like:
{'relative_filename': {'line_number': [error1, error2]}}
eg: {'imhotep/app.py': {'103': ['line too long']}} | 625990544e4d562566373923 |
class ButtonMenu(tk.Menubutton): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> l_args = {'text': u"\u25be" + self.get_text()} <NEW_LINE> super().__init__(parent, **l_args) <NEW_LINE> self.menu = tk.Menu(self, tearoff=False) <NEW_LINE> self['menu'] = self.menu <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_text(self): <NEW_LINE> <INDENT> return '' | Base class for a top menu button (with a dropdown) | 625990548a43f66fc4bf36a9 |
class Icmp: <NEW_LINE> <INDENT> ICMP_Data = namedtuple('ICMP_Data', 'type code checksum id seq payload') <NEW_LINE> @staticmethod <NEW_LINE> def pack(*, id_, seq, size): <NEW_LINE> <INDENT> _type, _code = 8, 0 <NEW_LINE> v_id = int(id_) & 0xffff <NEW_LINE> v_seq = int(seq) & 0xffff <NEW_LINE> v_data = ('x' * size).encode('utf8') <NEW_LINE> pack_format = f'!BBHHH{size}s' <NEW_LINE> temp_cksum = 0 <NEW_LINE> temp_packet = struct.pack(pack_format, _type, _code, temp_cksum, v_id, v_seq, v_data) <NEW_LINE> icmp_cksum = checksum(temp_packet) <NEW_LINE> icmp_packet = struct.pack(pack_format, _type, _code, icmp_cksum, v_id, v_seq, v_data) <NEW_LINE> return icmp_packet <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def unpack(icmp_packet): <NEW_LINE> <INDENT> icmp_header = icmp_packet[:8] <NEW_LINE> icmp_payload = icmp_packet[8:] <NEW_LINE> unpack_format = '!BBHHH' <NEW_LINE> unpacked_data = struct.unpack(unpack_format, icmp_header) <NEW_LINE> return Icmp.ICMP_Data(*unpacked_data, icmp_payload) | The ICMP header structure:
ICMP Header (8 bytes) & ICMP Payload
+--------+--------+----------------+
| type | code | checksum |
+--------+--------+----------------+
| identifier | sequence |
+-----------------+----------------+
| payload |
+----------------------------------+ | 6259905424f1403a9268635d |
class SeekAheadThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, seek_ahead_iterator, cancel_event, status_queue): <NEW_LINE> <INDENT> super(SeekAheadThread, self).__init__() <NEW_LINE> self.status_queue = status_queue <NEW_LINE> self.seek_ahead_iterator = seek_ahead_iterator <NEW_LINE> self.cancel_event = cancel_event <NEW_LINE> self.terminate = False <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> num_objects = 0 <NEW_LINE> num_data_bytes = 0 <NEW_LINE> for seek_ahead_result in self.seek_ahead_iterator: <NEW_LINE> <INDENT> if self.terminate: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (num_objects % constants.NUM_OBJECTS_PER_LIST_PAGE) == 0: <NEW_LINE> <INDENT> if self.cancel_event.isSet(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> num_objects += seek_ahead_result.est_num_ops <NEW_LINE> num_data_bytes += seek_ahead_result.data_bytes <NEW_LINE> <DEDENT> if self.cancel_event.isSet(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _PutToQueueWithTimeout(self.status_queue, thread_message.SeekAheadMessage( num_objects, num_data_bytes, time.time())) | Thread to estimate total work to be performed by all processes and threads.
Because the ProducerThread can only buffer a certain number of tasks on the
global task queue, it cannot reliably provide the total count or size of
iterated results for operations involving many iterated arguments until it
nears the end of iteration.
This thread consumes an iterator that should be functionally identical
to the ProducerThread, but iterates to the end without adding tasks to the
global task queue in an effort to estimate the amount of total work that the
call to Apply will perform. It should be used only for large operations, and
thus it is created by the main ProducerThread only when the number of
iterated arguments exceeds a threshold.
This thread may produce an inaccurate estimate if its iterator produces
different results than the iterator used by the ProducerThread. This can
happen due to eventual listing consistency or due to the source being
modified as iteration occurs.
This thread estimates operations for top-level objects only;
sub-operations (such as a parallel composite upload) should be reported via
the iterator as a single object including the total number of bytes affected. | 62599054f7d966606f749346 |
class FourierExpansionWrapper : <NEW_LINE> <INDENT> def fourier_expansion(self, cache = True) : <NEW_LINE> <INDENT> try : <NEW_LINE> <INDENT> return self.__fourier_expansion <NEW_LINE> <DEDENT> except AttributeError : <NEW_LINE> <INDENT> if cache : <NEW_LINE> <INDENT> self.__fourier_expansion = self.parent()._fourier_expansion_of_element(self) <NEW_LINE> return self.__fourier_expansion <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return self.parent()._fourier_expansion_of_element(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _set_fourier_expansion(self, expansion ) : <NEW_LINE> <INDENT> self.__fourier_expansion = expansion | Abstract class for elements, which do not represent Fourier
expansions on their own, but encapsulate one.
SEE:
:class:~`fourier_expansion_framework.gradedexpansions.GradedExpansion_class`. | 625990542ae34c7f260ac603 |
class TestProcessChecker(AbstractServer): <NEW_LINE> <INDENT> def tearDown(self, annotate=True): <NEW_LINE> <INDENT> super(TestProcessChecker, self).tearDown(annotate=annotate) <NEW_LINE> if self.process: <NEW_LINE> <INDENT> self.process.terminate() <NEW_LINE> <DEDENT> <DEDENT> def setUp(self, annotate=True): <NEW_LINE> <INDENT> super(TestProcessChecker, self).setUp(annotate=annotate) <NEW_LINE> self.process = None <NEW_LINE> self.state_dir = self.getStateDir() <NEW_LINE> <DEDENT> def create_lock_file_with_pid(self, pid): <NEW_LINE> <INDENT> with open(os.path.join(self.state_dir, LOCK_FILE_NAME), 'wb') as lock_file: <NEW_LINE> <INDENT> lock_file.write(str(pid)) <NEW_LINE> <DEDENT> <DEDENT> def test_no_lock_file(self): <NEW_LINE> <INDENT> process_checker = ProcessChecker(state_directory=self.state_dir) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.state_dir, LOCK_FILE_NAME))) <NEW_LINE> self.assertFalse(process_checker.already_running) <NEW_LINE> <DEDENT> def test_invalid_pid_in_lock_file(self): <NEW_LINE> <INDENT> with open(os.path.join(self.state_dir, LOCK_FILE_NAME), 'wb') as lock_file: <NEW_LINE> <INDENT> lock_file.write("Hello world") <NEW_LINE> <DEDENT> process_checker = ProcessChecker() <NEW_LINE> self.assertGreater(int(process_checker.get_pid_from_lock_file()), 0) <NEW_LINE> <DEDENT> def test_own_pid_in_lock_file(self): <NEW_LINE> <INDENT> self.create_lock_file_with_pid(os.getpid()) <NEW_LINE> process_checker = ProcessChecker(state_directory=self.state_dir) <NEW_LINE> self.assertFalse(process_checker.already_running) <NEW_LINE> <DEDENT> def test_other_instance_running(self): <NEW_LINE> <INDENT> self.process = Process(target=process_dummy_function) <NEW_LINE> self.process.start() <NEW_LINE> self.create_lock_file_with_pid(self.process.pid) <NEW_LINE> process_checker = ProcessChecker(state_directory=self.state_dir) <NEW_LINE> self.assertTrue(process_checker.is_pid_running(self.process.pid)) <NEW_LINE> self.assertTrue(process_checker.already_running) <NEW_LINE> <DEDENT> def test_dead_pid_in_lock_file(self): <NEW_LINE> <INDENT> dead_pid = 134824733 <NEW_LINE> self.create_lock_file_with_pid(dead_pid) <NEW_LINE> process_checker = ProcessChecker(state_directory=self.state_dir) <NEW_LINE> self.assertFalse(process_checker.is_pid_running(dead_pid)) <NEW_LINE> self.assertFalse(process_checker.already_running) | A test class for the ProcessChecker which checks if the Tribler Core is already running. | 6259905491af0d3eaad3b345 |
class MainPage(Handler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> blogs = Blogs.all().order("-created") <NEW_LINE> self.render("/index.html", blogs=blogs, auth_user=self.user) | Renders home page with blog posts | 62599054097d151d1a2c258b |
class Date(Property): <NEW_LINE> <INDENT> def __init__(self, default=datetime.date.today()): <NEW_LINE> <INDENT> super(Date, self).__init__(default=default) <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> super(Date, self).validate(value) <NEW_LINE> if not (value is None or isinstance(value, (datetime.date,) + string_types + (float,) + bokeh_integer_types)): <NEW_LINE> <INDENT> raise ValueError("expected a date, string or timestamp, got %r" % value) <NEW_LINE> <DEDENT> <DEDENT> def transform(self, value): <NEW_LINE> <INDENT> value = super(Date, self).transform(value) <NEW_LINE> if isinstance(value, (float,) + bokeh_integer_types): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = datetime.date.fromtimestamp(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> value = datetime.date.fromtimestamp(value/1000) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(value, string_types): <NEW_LINE> <INDENT> value = dateutil.parser.parse(value).date() <NEW_LINE> <DEDENT> return value | Date (not datetime) type property. | 62599054baa26c4b54d507c1 |
class FCM: <NEW_LINE> <INDENT> __version__ = "0.2" <NEW_LINE> def __init__(self, title="FCM Example"): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.graph = nx.MultiDiGraph() <NEW_LINE> self.ActivationFunction = lambda x: 1*(x >= 0.5) <NEW_LINE> return <NEW_LINE> <DEDENT> def add_edges(self, edge_lst): <NEW_LINE> <INDENT> self.graph.add_weighted_edges_from(edge_lst) <NEW_LINE> <DEDENT> def label_edges(self, label_dict): <NEW_LINE> <INDENT> self.graph = nx.relabel_nodes(self.graph, label_dict, copy=False) <NEW_LINE> <DEDENT> def set_activation(self, actvn): <NEW_LINE> <INDENT> self.ActivationFunction = actvn <NEW_LINE> return <NEW_LINE> <DEDENT> def get_FCM_Matrix(self): <NEW_LINE> <INDENT> return(nx.adjacency_matrix(self.graph).todense()) <NEW_LINE> <DEDENT> def VizFCM(self): <NEW_LINE> <INDENT> nx.draw(self.graph, with_labels=True, node_size=700, nodecolor='g', edge_color='b') <NEW_LINE> plt.title(self.title) <NEW_LINE> return <NEW_LINE> <DEDENT> def EvolveOnce(self, inp, mask): <NEW_LINE> <INDENT> assert(len(inp) == len(mask)) <NEW_LINE> return Clamp(self.ActivationFunction( np.asarray( np.matmul( Clamp(inp, mask), nx.adjacency_matrix(self.graph).todense())).ravel() ), mask) <NEW_LINE> <DEDENT> def EvolveToLimit(self, inp, mask, nmax=1000): <NEW_LINE> <INDENT> assert(len(inp) == len(mask)) <NEW_LINE> seq = NestWhileList( lambda inp_vec: self.EvolveOnce(inp_vec, mask), inp, stopcritQ, nmax ) <NEW_LINE> seq = [inp] + seq <NEW_LINE> return(np.matrix(seq)) <NEW_LINE> <DEDENT> def VizFCMEvol(self): <NEW_LINE> <INDENT> pass | Fuzzy Cognitive Map (FCM) class. Version 0.2
Does both FCM representation, manipulation, and visualization.
Representation: FCM's are inherently directed graphs (digraphs).
Using networkx graphs as base.
[OO-NB: consider inheriting nx.Digraph directly]
Manipulation: Can evolve FCM from input state to either one-step advance or
to convergent fixed-point. Both subject to a node-clamping mask.
[OO-NB: need to figure out how identify non-fixed-point limit cycles]
Visualization: Graph plot of FCM structure. Just using default layout.
Version 0.2 Changes:
- switch from Digraph to MultiDigraph to allow for self-loops
-
To Do:
- Evolve to limit-cycle sequence
- Visualize evolutions on FCM graph
- Backward Inference (!!!***) | 6259905416aa5153ce401a02 |
class Foo(): <NEW_LINE> <INDENT> _i = 97 <NEW_LINE> def get_i(self): <NEW_LINE> <INDENT> return type(self)._i <NEW_LINE> <DEDENT> def set_i(self, val): <NEW_LINE> <INDENT> type(self)._i = val <NEW_LINE> <DEDENT> i = property(get_i, set_i) | demo static data member of a python class | 6259905429b78933be26ab53 |
class PalletViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Pallet.objects.all() <NEW_LINE> serializer_class = PalletSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsStaff,) <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(create_user=self.request.user) | This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions. | 6259905416aa5153ce401a03 |
class Notification(object): <NEW_LINE> <INDENT> def __init__(self, interval=60.0): <NEW_LINE> <INDENT> super(Notification, self).__init__() <NEW_LINE> self.interval = interval <NEW_LINE> self.t_notify = 0.0 <NEW_LINE> self.muted = False <NEW_LINE> <DEDENT> def notify(self, ap, alarm): <NEW_LINE> <INDENT> raise ValueError("Subclass should implement this method!") | Notification base class for an alarm. | 6259905415baa723494634b0 |
class Sensor(Device): <NEW_LINE> <INDENT> def get_value(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Base sensor class | 62599054cc0a2c111447c50a |
class Agent: <NEW_LINE> <INDENT> def __init__(self, board, heuristic_f, display=None, ): <NEW_LINE> <INDENT> self.board = board <NEW_LINE> self.display = display <NEW_LINE> self.heuristic_f = heuristic_f <NEW_LINE> <DEDENT> def step(self): <NEW_LINE> <INDENT> direction = int(input("0: left, 1: down, 2: right, 3: up = ")) % 4 <NEW_LINE> return direction | Agent Base. | 62599054b7558d58954649b9 |
class PintJSONEncoder(DjangoJSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, ureg.Quantity): <NEW_LINE> <INDENT> return to_raw_magnitude(obj) <NEW_LINE> <DEDENT> return super(PintJSONEncoder, self).default(obj) | Converts pint Quantity objects for Angular's benefit.
# TODO handle unit conversion on the server per-org | 6259905410dbd63aa1c72100 |
class PowerTool: <NEW_LINE> <INDENT> def __init__(self, config, system, api, force_user=None, force_pass=None, logger=None): <NEW_LINE> <INDENT> self.system = system <NEW_LINE> self.config = config <NEW_LINE> self.settings = config.settings() <NEW_LINE> self.api = api <NEW_LINE> self.logger = self.api.logger <NEW_LINE> self.force_user = force_user <NEW_LINE> self.force_pass = force_pass <NEW_LINE> if logger is None: <NEW_LINE> <INDENT> logger = clogger.Logger() <NEW_LINE> <DEDENT> self.logger = logger <NEW_LINE> <DEDENT> def power(self, desired_state): <NEW_LINE> <INDENT> power_command = utils.get_power(self.system.power_type) <NEW_LINE> if not power_command: <NEW_LINE> <INDENT> utils.die(self.logger, "no power type set for system") <NEW_LINE> <DEDENT> meta = utils.blender(self.api, False, self.system) <NEW_LINE> meta["power_mode"] = desired_state <NEW_LINE> if self.force_user is not None: <NEW_LINE> <INDENT> meta["power_user"] = self.force_user <NEW_LINE> <DEDENT> if self.force_pass is not None: <NEW_LINE> <INDENT> meta["power_pass"] = self.force_pass <NEW_LINE> <DEDENT> self.logger.info("cobbler power configuration is:") <NEW_LINE> self.logger.info(" type : %s" % self.system.power_type) <NEW_LINE> self.logger.info(" address: %s" % self.system.power_address) <NEW_LINE> self.logger.info(" user : %s" % self.system.power_user) <NEW_LINE> self.logger.info(" id : %s" % self.system.power_id) <NEW_LINE> if meta.get("power_user", "") == "": <NEW_LINE> <INDENT> meta["power_user"] = os.environ.get("COBBLER_POWER_USER", "") <NEW_LINE> <DEDENT> if meta.get("power_pass", "") == "": <NEW_LINE> <INDENT> meta["power_pass"] = os.environ.get("COBBLER_POWER_PASS", "") <NEW_LINE> <DEDENT> template = utils.get_power_template(self.system.power_type) <NEW_LINE> tmp = templar.Templar(self.api._config) <NEW_LINE> template_data = tmp.render(template, meta, None, self.system) <NEW_LINE> for x in range(0, 5): <NEW_LINE> <INDENT> output, rc = utils.subprocess_sp(self.logger, power_command, shell=False, input=template_data) <NEW_LINE> if rc == 0: <NEW_LINE> <INDENT> if desired_state == 'status': <NEW_LINE> <INDENT> match = re.match('^(Status:|.+power\s=)\s(on|off)$', output, re.IGNORECASE | re.MULTILINE) <NEW_LINE> if match: <NEW_LINE> <INDENT> power_status = match.groups()[1] <NEW_LINE> if power_status.lower() == 'on': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> utils.die(self.logger, "command succeeded (rc=%s), but output ('%s') was not understood" % (rc, output)) <NEW_LINE> return None <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(2) <NEW_LINE> <DEDENT> <DEDENT> if not rc == 0: <NEW_LINE> <INDENT> utils.die(self.logger, "command failed (rc=%s), please validate the physical setup and cobbler config" % rc) <NEW_LINE> <DEDENT> return rc | Handles conversion of internal state to the tftpboot tree layout | 625990548da39b475be04709 |
class PlanetViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Planet.objects.all() <NEW_LINE> serializer_class = PlanetSerializer | A simple ViewSet for listing or retrieving planets. | 62599054cb5e8a47e493cc16 |
class NetcatTransferIntegrityError(NetcatError): <NEW_LINE> <INDENT> def __init__(self, output): <NEW_LINE> <INDENT> super().__init__("Transfer integrity failed", output) | Remote error related to transfer integrity failure using netcat. | 6259905499cbb53fe6832409 |
class BI_append(LightBuiltIn, Function): <NEW_LINE> <INDENT> def evalObj(self, subj, queue, bindings, proof, query): <NEW_LINE> <INDENT> if not isinstance(subj, NonEmptyList): return None <NEW_LINE> r = [] <NEW_LINE> for x in subj: <NEW_LINE> <INDENT> if not isinstance(x, List): return None <NEW_LINE> r.extend([a for a in x]) <NEW_LINE> <DEDENT> return self.store.newList(r) | Takes a list of lists, and appends them together.
| 62599054d53ae8145f919980 |
class Texto(models.Model): <NEW_LINE> <INDENT> valor = models.CharField(max_length=300) <NEW_LINE> id_item = models.IntegerField() <NEW_LINE> nombre_atributo = models.CharField(max_length=20) <NEW_LINE> longitud = models.IntegerField() <NEW_LINE> obligatorio = models.BooleanField(default=False) | La clase Texto almacenara los valores para tipos de datos "Texto"
en el tipo de atributo
@author: Francisco Velloso | 62599054adb09d7d5dc0ba88 |
class AbsoluteMagnitudeConvertor: <NEW_LINE> <INDENT> name = "absolute_magnitude_little_h" <NEW_LINE> def __init__(self, attr, cosmo_data, cosmo_out): <NEW_LINE> <INDENT> if not isinstance(attr, str): <NEW_LINE> <INDENT> raise ValueError("Attribute '{}' must be of a str type" .format(attr)) <NEW_LINE> <DEDENT> self.attr = attr <NEW_LINE> self.cosmo_data = cosmo_data <NEW_LINE> self.cosmo_out = cosmo_out <NEW_LINE> <DEDENT> def __call__(self, X): <NEW_LINE> <INDENT> H0_ratio = (self.cosmo_data.H0 / self.cosmo_out.H0).value <NEW_LINE> return X - 5 * numpy.log10(H0_ratio) | An absolute magnitude little h convertor. Transforms the absolute
magnitude from survey's choice of little h to the user's choice of
little h.
Parameters
----------
attr : str
Selector's absolute magnitude atribute to be "little h" converted.
cosmo_data : `astropy.cosmology` object
Data cosmology.
cosmo_out : `astropy.cosmology` object
User cosmology. | 625990547b25080760ed876e |
@python_2_unicode_compatible <NEW_LINE> class Provider(UuidAuditedModel): <NEW_LINE> <INDENT> objects = ProviderManager() <NEW_LINE> PROVIDERS = ( ('ec2', 'Amazon Elastic Compute Cloud (EC2)'), ('mock', 'Mock Reference Provider'), ('rackspace', 'Rackspace Open Cloud'), ('static', 'Static Node'), ('digitalocean', 'Digital Ocean'), ) <NEW_LINE> owner = models.ForeignKey(settings.AUTH_USER_MODEL) <NEW_LINE> id = models.SlugField(max_length=64) <NEW_LINE> type = models.SlugField(max_length=16, choices=PROVIDERS) <NEW_LINE> creds = JSONField(blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = (('owner', 'id'),) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}-{}".format(self.id, self.get_type_display()) | Cloud provider settings for a user.
Available as `user.provider_set`. | 625990544e4d562566373926 |
class OccurrenceSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> location = LocationSerializer() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Occurrence <NEW_LINE> fields = ( 'id', 'location', 'basket',) | Serializer for occurrence gives full location and document information, but no basket information.
Mostly for use on basket detail pages | 62599054097d151d1a2c258c |
class BaseTenkPlayer(Player): <NEW_LINE> <INDENT> def filename(self, ai: BaseTenkAi) -> str: <NEW_LINE> <INDENT> return "./Q/%s_%s_%i.pickle" % (ai.__class__.__name__, self.TAG, self.games) <NEW_LINE> <DEDENT> def __init__( self, ais: List[BaseTenkAi], tag: str = "", load: Optional[int] = None, save: Optional[int] = None, exit: Optional[int] = None, progress: int = 100000, ): <NEW_LINE> <INDENT> self.SAVE = save <NEW_LINE> self.TAG = tag <NEW_LINE> self.EXIT = exit <NEW_LINE> self.PROGRESS = progress <NEW_LINE> self.end = False <NEW_LINE> self.games = load if load else 0 <NEW_LINE> self.scores = [] <NEW_LINE> self.maxscore = 0 <NEW_LINE> self.old_mean = 1 <NEW_LINE> self.rolls = [] <NEW_LINE> self.cur_rolls = 0 <NEW_LINE> self.start_time = time.time() <NEW_LINE> self.ais = ais <NEW_LINE> if load: <NEW_LINE> <INDENT> for ai in ais: <NEW_LINE> <INDENT> ai.load(self.filename(ai)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def choose(self, score: int, dices: List[int]) -> List[int]: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def finish(self, score: int) -> bool: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def write(self, score): <NEW_LINE> <INDENT> self.games += 1 <NEW_LINE> self.scores.append(score) <NEW_LINE> self.rolls.append(self.cur_rolls) <NEW_LINE> self.cur_rolls = 0 <NEW_LINE> if score > self.maxscore: <NEW_LINE> <INDENT> self.maxscore = score <NEW_LINE> <DEDENT> if self.games % self.PROGRESS == 0: <NEW_LINE> <INDENT> mymean = round(mean(self.scores)) <NEW_LINE> print( "%3i[%3i] - %2i/%.2f - %4i - %s | %.2f" % ( mymean, mymean - self.old_mean, max(self.rolls), mean(self.rolls), self.maxscore, str("/".join([str(len(ai.Q)) for ai in self.ais])), time.time() - self.start_time, ) ) <NEW_LINE> self.old_mean = round(mean(self.scores)) <NEW_LINE> self.scores = [] <NEW_LINE> self.rolls = [] <NEW_LINE> self.maxscore = 0 <NEW_LINE> self.start_time = time.time() <NEW_LINE> <DEDENT> if self.SAVE and (self.games % self.SAVE == 0): <NEW_LINE> <INDENT> for ai in self.ais: <NEW_LINE> <INDENT> print("Saving %s" % self.filename(ai)) <NEW_LINE> ai.save(self.filename(ai)) <NEW_LINE> <DEDENT> <DEDENT> if self.EXIT and self.games >= self.EXIT: <NEW_LINE> <INDENT> self.end = True | Base ai TenK player implementation. | 6259905401c39578d7f141c7 |
class ObjectSeriesSpecs(SeriesSpecs): <NEW_LINE> <INDENT> data: pd.Series <NEW_LINE> def __init__( self, data: pd.Series, name: str, original_tz: Optional[tzinfo] = None, feature_transformation: Optional[ReversibleTransformation] = None, post_load_processing: Optional[Transformation] = None, resampling_config: Dict[str, Any] = None, interpolation_config: Dict[str, Any] = None, ): <NEW_LINE> <INDENT> super().__init__( name, original_tz, feature_transformation, post_load_processing, resampling_config, interpolation_config, ) <NEW_LINE> if not isinstance(data.index, pd.DatetimeIndex): <NEW_LINE> <INDENT> raise IncompatibleModelSpecs( "Please provide a DatetimeIndex. Only found %s." % type(data.index).__name__ ) <NEW_LINE> <DEDENT> self.data = data <NEW_LINE> <DEDENT> def _load_series(self) -> pd.Series: <NEW_LINE> <INDENT> if self.post_load_processing is not None: <NEW_LINE> <INDENT> return self.post_load_processing.transform_series(self.data) <NEW_LINE> <DEDENT> return self.data | Spec for a pd.Series object that is being passed in and is stored directly in the specs. | 62599054b5575c28eb71375b |
class InsufficientItemsError(FunctionExecutionError): <NEW_LINE> <INDENT> def __init__(self, number_required, msg=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.number_required = number_required <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.msg: <NEW_LINE> <INDENT> return self.msg <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f"Insufficient items: needs at least {self.number_required}" | Raised directly by operations functions that request the entire stack to
indicate not enough items are on the stack to finish their work, or by
the menu logic when an operation requests more parameters than items on
the stack.
Functions may use the simplified form of the exception, providing an int
describing the number of items that should have been on the stack for the
``number_required`` constructor parameter. esc will then reraise the
exception with a more useful message; a fallback message is provided in
case this doesn't happen for some reason. | 6259905463d6d428bbee3cf2 |
class AbstractBootstrapper(QtCore.QObject): <NEW_LINE> <INDENT> PASSED_KEY = "passed" <NEW_LINE> ERROR_KEY = "error" <NEW_LINE> def __init__(self, bypass_checks=False): <NEW_LINE> <INDENT> QtCore.QObject.__init__(self) <NEW_LINE> leap_assert(self._gui_errback.im_func == AbstractBootstrapper._gui_errback.im_func, "Cannot redefine _gui_errback") <NEW_LINE> leap_assert(self._errback.im_func == AbstractBootstrapper._errback.im_func, "Cannot redefine _errback") <NEW_LINE> leap_assert(self._gui_notify.im_func == AbstractBootstrapper._gui_notify.im_func, "Cannot redefine _gui_notify") <NEW_LINE> self._fetcher = requests <NEW_LINE> self._session = self._fetcher.session() <NEW_LINE> self._bypass_checks = bypass_checks <NEW_LINE> self._signal_to_emit = None <NEW_LINE> self._err_msg = None <NEW_LINE> <DEDENT> def _gui_errback(self, failure): <NEW_LINE> <INDENT> if self._signal_to_emit: <NEW_LINE> <INDENT> err_msg = self._err_msg if self._err_msg is not None else str(failure.value) <NEW_LINE> self._signal_to_emit.emit({ self.PASSED_KEY: False, self.ERROR_KEY: err_msg }) <NEW_LINE> failure.trap(Exception) <NEW_LINE> <DEDENT> <DEDENT> def _errback(self, failure, signal=None): <NEW_LINE> <INDENT> if self._signal_to_emit is None: <NEW_LINE> <INDENT> self._signal_to_emit = signal <NEW_LINE> <DEDENT> return failure <NEW_LINE> <DEDENT> def _gui_notify(self, _, signal=None): <NEW_LINE> <INDENT> if signal: <NEW_LINE> <INDENT> signal.emit({self.PASSED_KEY: True, self.ERROR_KEY: ""}) <NEW_LINE> <DEDENT> <DEDENT> def _callback_threader(self, cb, res, *args, **kwargs): <NEW_LINE> <INDENT> return threads.deferToThread(cb, res, *args, **kwargs) <NEW_LINE> <DEDENT> def addCallbackChain(self, callbacks): <NEW_LINE> <INDENT> leap_assert_type(callbacks, list) <NEW_LINE> self._signal_to_emit = None <NEW_LINE> self._err_msg = None <NEW_LINE> d = None <NEW_LINE> for cb, sig in callbacks: <NEW_LINE> <INDENT> if d is None: <NEW_LINE> <INDENT> d = threads.deferToThread(cb) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d.addCallback(partial(self._callback_threader, cb)) <NEW_LINE> <DEDENT> d.addErrback(self._errback, signal=sig) <NEW_LINE> d.addCallback(self._gui_notify, signal=sig) <NEW_LINE> <DEDENT> d.addErrback(self._gui_errback) <NEW_LINE> return d | Abstract Bootstrapper that implements the needed deferred callbacks | 62599054d486a94d0ba2d4e7 |
class InlineHiliteExtension(Extension): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.inlinehilite = [] <NEW_LINE> self.config = { 'style_plain_text': [ False, "Process inline code even when a language is not specified " "or langauge is specified as 'text'. " "When 'False', no classes will be added to 'text' code blocks" "and no scoping will performed. The content will just be escaped." "- Default: False" ], 'css_class': [ '', "Set class name for wrapper element. The default of CodeHilite or Highlight will be used" "if nothing is set. - " "Default: ''" ], 'custom_inline': [[], "Custom inline - default []"] } <NEW_LINE> super(InlineHiliteExtension, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def extendMarkdown(self, md): <NEW_LINE> <INDENT> config = self.getConfigs() <NEW_LINE> md.inlinePatterns.register(InlineHilitePattern(BACKTICK_CODE_RE, config, md), "backtick", 190) <NEW_LINE> md.registerExtensions(["pymdownx.highlight"], {"pymdownx.highlight": {"_enabled": False}}) | Add inline highlighting extension to Markdown class. | 625990547cff6e4e811b6f60 |
class DataSourceMixin(ShowFieldTypeAndContent, PolymorphicModel): <NEW_LINE> <INDENT> PRIORITY_REALTIME = 30 <NEW_LINE> PRIORITY_DAILY = 20 <NEW_LINE> PRIORITY_LOW = 10 <NEW_LINE> priority = models.IntegerField(default=PRIORITY_DAILY) <NEW_LINE> @classmethod <NEW_LINE> def is_higher_priority(cls, old, new): <NEW_LINE> <INDENT> if new == cls.PRIORITY_REALTIME: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return new >= old <NEW_LINE> <DEDENT> def _Retrieve(self, start, end): <NEW_LINE> <INDENT> return pandas.Series() | A mixin class for retrieving data in a well-defined format.
Just need to override a function that grabs the data. | 6259905415baa723494634b2 |
class SubscriptionCreateFormTestCase(TestCase): <NEW_LINE> <INDENT> longMessage = True <NEW_LINE> def test_save(self): <NEW_LINE> <INDENT> user = mixer.blend('auth.User') <NEW_LINE> dummy = mixer.blend('test_app.DummyModel') <NEW_LINE> form = SubscriptionCreateForm(user=user, content_object=dummy, data={}) <NEW_LINE> self.assertTrue(form.is_valid(), msg=( 'Errors: {0}'.format(form.errors.items()))) <NEW_LINE> instance = form.save() <NEW_LINE> self.assertTrue(instance.pk) | Tests for the ``SubscriptionCreateForm`` form class. | 62599054498bea3a75a59046 |
class KeyboardInput: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.stdscr = curses.initscr() <NEW_LINE> curses.noecho() <NEW_LINE> curses.cbreak() <NEW_LINE> self.stdscr.keypad(True) <NEW_LINE> self.q = Queue() <NEW_LINE> self.p = Process(target=self._read_events) <NEW_LINE> self.p.start() <NEW_LINE> self.translate = {"KEY_DOWN": DOWN, "KEY_UP": UP, "KEY_LEFT": LEFT, "KEY_RIGHT": RIGHT} <NEW_LINE> <DEDENT> def _read_events(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.q.put(self.stdscr.getkey()) <NEW_LINE> <DEDENT> <DEDENT> def get_events(self): <NEW_LINE> <INDENT> events = [] <NEW_LINE> while not self.q.empty(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> events.append(self.translate[self.q.get()]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> events.append(BUTTON) <NEW_LINE> <DEDENT> <DEDENT> return events <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> self.p.terminate() <NEW_LINE> curses.nocbreak() <NEW_LINE> self.stdscr.keypad(False) <NEW_LINE> curses.echo() <NEW_LINE> curses.endwin() <NEW_LINE> print("Keyboard cleanup") | Uses Curses module to read input keys directly | 625990548e71fb1e983bcfe9 |
class ColumnStyle(TableLayoutStyle): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def __new__(self,sizeType=None,width=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Width=property(lambda self: object(),lambda self,v: None,lambda self: None) | Represents the look and feel of a column in a table layout.
ColumnStyle(sizeType: SizeType,width: Single)
ColumnStyle()
ColumnStyle(sizeType: SizeType) | 6259905473bcbd0ca4bcb7b1 |
class McPrimaryKeyColumnException(McDatabaseHandlerException): <NEW_LINE> <INDENT> pass | primary_key_column() exception. | 625990548e71fb1e983bcfea |
@python_2_unicode_compatible <NEW_LINE> class HLSPlaybackEnabledFlag(ConfigurationModel): <NEW_LINE> <INDENT> enabled_for_all_courses = BooleanField(default=False) <NEW_LINE> @classmethod <NEW_LINE> def feature_enabled(cls, course_id): <NEW_LINE> <INDENT> if not HLSPlaybackEnabledFlag.is_enabled(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif not HLSPlaybackEnabledFlag.current().enabled_for_all_courses: <NEW_LINE> <INDENT> feature = (CourseHLSPlaybackEnabledFlag.objects .filter(course_id=course_id) .order_by('-change_date') .first()) <NEW_LINE> return feature.enabled if feature else False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> current_model = HLSPlaybackEnabledFlag.current() <NEW_LINE> return u"HLSPlaybackEnabledFlag: enabled {is_enabled}".format( is_enabled=current_model.is_enabled() ) | Enables HLS Playback across the platform.
When this feature flag is set to true, individual courses
must also have HLS Playback enabled for this feature to
take effect.
.. no_pii:
.. toggle_name: HLSPlaybackEnabledFlag.enabled_for_all_courses
.. toggle_implementation: ConfigurationModel
.. toggle_default: False
.. toggle_description: Add the "hls" profile to all displayed videos on the platform.
.. toggle_use_cases: open_edx
.. toggle_creation_date: 2017-04-19
.. toggle_target_removal_date: None
.. toggle_warnings: None
.. toggle_tickets: https://github.com/edx/edx-platform/pull/14924 | 6259905407f4c71912bb095b |
class FLOIPRenderer(JSONRenderer): <NEW_LINE> <INDENT> media_type = 'application/vnd.org.flowinterop.results+json' <NEW_LINE> format = 'json' <NEW_LINE> charset = 'utf-8' <NEW_LINE> def render(self, data, accepted_media_type=None, renderer_context=None): <NEW_LINE> <INDENT> request = renderer_context['request'] <NEW_LINE> response = renderer_context['response'] <NEW_LINE> results = data <NEW_LINE> if request.method == 'GET' and response.status_code == 200: <NEW_LINE> <INDENT> if isinstance(data, dict): <NEW_LINE> <INDENT> results = [i for i in floip_rows_list(data)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results = [i for i in floip_list(data)] <NEW_LINE> <DEDENT> <DEDENT> return super(FLOIPRenderer, self).render(results, accepted_media_type, renderer_context) | FLOIP Results data renderer. | 62599054e76e3b2f99fd9f1f |
class WarehousesMock(Warehouses): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(None) <NEW_LINE> self.load_by_name = pass_ <NEW_LINE> self.load_by_tvd = pass_ <NEW_LINE> self.update = pass_ <NEW_LINE> self.update_one = pass_ <NEW_LINE> self.update_request_body = pass_ | Мок коллекции складов в БД | 62599054a219f33f346c7d25 |
class NotFoundError(APIClientError): <NEW_LINE> <INDENT> pass | Wrapper for HTTP 400 Bad Request error. | 6259905463d6d428bbee3cf4 |
class BaseChunkedUpload(models.Model): <NEW_LINE> <INDENT> upload_id = models.CharField(max_length=32, unique=True, editable=False, default=generate_upload_id) <NEW_LINE> file = models.FileField(max_length=255, upload_to=UPLOAD_PATH, storage=STORAGE) <NEW_LINE> filename = models.CharField(max_length=255) <NEW_LINE> offset = models.BigIntegerField(default=0) <NEW_LINE> created_on = models.DateTimeField(auto_now_add=True) <NEW_LINE> status = models.PositiveSmallIntegerField(choices=CHUNKED_UPLOAD_CHOICES, default=UPLOADING) <NEW_LINE> completed_on = models.DateTimeField(null=True, blank=True) <NEW_LINE> completed_md5 = models.CharField(max_length=32, null=True, blank=True) <NEW_LINE> @property <NEW_LINE> def expires_on(self): <NEW_LINE> <INDENT> return self.created_on + EXPIRATION_DELTA <NEW_LINE> <DEDENT> @property <NEW_LINE> def expired(self): <NEW_LINE> <INDENT> return self.expires_on <= timezone.now() <NEW_LINE> <DEDENT> @property <NEW_LINE> def md5(self): <NEW_LINE> <INDENT> if getattr(self, '_md5', None) is None: <NEW_LINE> <INDENT> md5 = hashlib.md5() <NEW_LINE> for chunk in self.file.chunks(): <NEW_LINE> <INDENT> md5.update(chunk) <NEW_LINE> <DEDENT> self._md5 = md5.hexdigest() <NEW_LINE> <DEDENT> return self._md5 <NEW_LINE> <DEDENT> def delete(self, delete_file=True, *args, **kwargs): <NEW_LINE> <INDENT> storage, path = self.file.storage, self.file.path <NEW_LINE> super(BaseChunkedUpload, self).delete(*args, **kwargs) <NEW_LINE> if delete_file: <NEW_LINE> <INDENT> storage.delete(path) <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'<%s - upload_id: %s - bytes: %s - status: %s>' % ( self.filename, self.upload_id, self.offset, self.status) <NEW_LINE> <DEDENT> def close_file(self): <NEW_LINE> <INDENT> file_ = self.file <NEW_LINE> while file_ is not None: <NEW_LINE> <INDENT> file_.close() <NEW_LINE> file_ = getattr(file_, 'file', None) <NEW_LINE> <DEDENT> <DEDENT> def append_chunk(self, chunk, chunk_size=None, save=True): <NEW_LINE> <INDENT> self.close_file() <NEW_LINE> self.file.open(mode='ab') <NEW_LINE> self.file.write(chunk.read()) <NEW_LINE> if chunk_size is not None: <NEW_LINE> <INDENT> self.offset += chunk_size <NEW_LINE> <DEDENT> elif hasattr(chunk, 'size'): <NEW_LINE> <INDENT> self.offset += chunk.size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.offset = self.file.size <NEW_LINE> <DEDENT> self._md5 = None <NEW_LINE> if save: <NEW_LINE> <INDENT> self.save() <NEW_LINE> <DEDENT> self.close_file() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True | Base chunked upload model. This model is abstract (doesn't create a table
in the database).
Inherit from this model to implement your own. | 62599054ac7a0e7691f73a02 |
class TaskWithArgs(object): <NEW_LINE> <INDENT> argv = [] <NEW_LINE> kwargs = {} <NEW_LINE> arg_prompt = None <NEW_LINE> def os_path_split_asunder(self, path): <NEW_LINE> <INDENT> parts = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> newpath, tail = os.path.split(path) <NEW_LINE> if newpath == path: <NEW_LINE> <INDENT> assert not tail <NEW_LINE> if path: <NEW_LINE> <INDENT> parts.append(path) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> parts.append(tail) <NEW_LINE> path = newpath <NEW_LINE> <DEDENT> parts.reverse() <NEW_LINE> return parts <NEW_LINE> <DEDENT> def __init__(self, *deco_args, **deco_kwargs): <NEW_LINE> <INDENT> self.deco_args = deco_args <NEW_LINE> self.deco_kwargs = deco_kwargs <NEW_LINE> self._settings = None <NEW_LINE> global name_spaces <NEW_LINE> self.namespace = "::".join(name_spaces) <NEW_LINE> <DEDENT> def __call__(self, func): <NEW_LINE> <INDENT> depends = self.deco_kwargs.get('depends', []) <NEW_LINE> if self.namespace: <NEW_LINE> <INDENT> depends = [self.namespace + '::' + name for name in depends] <NEW_LINE> <DEDENT> private = self.deco_kwargs.get('private', False) <NEW_LINE> dependent_of = self.deco_kwargs.get('dependent_of', None) <NEW_LINE> task_help = self.deco_kwargs.get('help', None) <NEW_LINE> task_kwargs = self.deco_kwargs.get('kwargs', None) <NEW_LINE> arg_prompt = self.deco_kwargs.get('arg_prompt', None) <NEW_LINE> name_space = self.deco_kwargs.get('namespace', self.namespace) <NEW_LINE> configured = self.deco_kwargs.get('configured', 'required').lower() <NEW_LINE> if configured not in ['no', 'optional', 'required']: <NEW_LINE> <INDENT> configured = 'required' <NEW_LINE> <DEDENT> full_name = func.__name__ <NEW_LINE> if name_space: <NEW_LINE> <INDENT> full_name = name_space + '::' + func.__name__ <NEW_LINE> <DEDENT> def _wrap(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> exc_type, exc_value, exc_traceback = sys.exc_info() <NEW_LINE> tb = ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)) <NEW_LINE> error("{name} - ERROR: {err}\n{tb}".format(name=func.__name__, err=str(ex), tb=tb)) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> <DEDENT> HerringTasks[full_name] = { 'task': _wrap, 'depends': depends, 'dependent_of': dependent_of, 'private': private, 'help': task_help, 'description': func.__doc__, 'namespace': name_space, 'fullname': full_name, 'name': func.__name__, 'kwargs': task_kwargs, 'arg_prompt': arg_prompt, 'configured': configured, } <NEW_LINE> return _wrap | Task decorator
This gathers info about the task and stores it into the
HerringApp.HerringTasks dictionary. The decorator does not run the
task. The task will be invoked via the HerringApp.HerringTasks
dictionary. | 625990546e29344779b01b6b |
class CartAdd(View): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> if not user.is_authenticated(): <NEW_LINE> <INDENT> return JsonResponse({'res': 0, 'msg': '请先登陆'}) <NEW_LINE> <DEDENT> count = request.POST.get('count') <NEW_LINE> sku_id = request.POST.get('sku_id') <NEW_LINE> if not all([count, sku_id]): <NEW_LINE> <INDENT> return JsonResponse({'res': 1, 'msg': '数据不完整'}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> count = int(count) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return JsonResponse({'res': 2, 'msg': '商品数量发生错误'}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> sku = GoodsSKU.objects.get(id=sku_id, status=1) <NEW_LINE> <DEDENT> except GoodsSKU.DoesNotExist: <NEW_LINE> <INDENT> return JsonResponse({'res': 3, 'msg': '商品不存在或已下架'}) <NEW_LINE> <DEDENT> conn = get_redis_connection('default') <NEW_LINE> cart_key = 'cart_%s' % user.id <NEW_LINE> sku_count = conn.hget(cart_key, sku_id) <NEW_LINE> if sku_count: <NEW_LINE> <INDENT> count += int(sku_count) + count <NEW_LINE> <DEDENT> if count > int(sku.stock): <NEW_LINE> <INDENT> return JsonResponse({'res': 4, 'msg': '超出商品库存'}) <NEW_LINE> <DEDENT> conn.hset(cart_key, sku_id, count) <NEW_LINE> kind = conn.hlen(cart_key) <NEW_LINE> context = {'res': 5, 'cart_count': kind, 'msg': '商品添加成功'} <NEW_LINE> return JsonResponse(data=context) | 将商品添加到购物车 | 625990543eb6a72ae038bb81 |
class vertex(topological_representation_item): <NEW_LINE> <INDENT> def __init__( self , inherited0__name , ): <NEW_LINE> <INDENT> topological_representation_item.__init__(self , inherited0__name , ) | Entity vertex definition.
| 62599054d6c5a102081e3640 |
class _InterProcessLock(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.lockfile = None <NEW_LINE> self.fname = name <NEW_LINE> <DEDENT> def acquire(self): <NEW_LINE> <INDENT> basedir = os.path.dirname(self.fname) <NEW_LINE> if not os.path.exists(basedir): <NEW_LINE> <INDENT> fileutils.ensure_tree(basedir) <NEW_LINE> LOG.info(_('Created lock path: %s'), basedir) <NEW_LINE> <DEDENT> self.lockfile = open(self.fname, 'w') <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.trylock() <NEW_LINE> LOG.debug(_('Got file lock "%s"'), self.fname) <NEW_LINE> return True <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> if e.errno in (errno.EACCES, errno.EAGAIN): <NEW_LINE> <INDENT> time.sleep(0.01) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise threading.ThreadError(_("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s") % { 'filename': self.fname, 'exception': e, }) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.acquire() <NEW_LINE> return self <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.unlock() <NEW_LINE> self.lockfile.close() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> LOG.exception(_("Could not release the acquired lock `%s`"), self.fname) <NEW_LINE> <DEDENT> LOG.debug(_('Released file lock "%s"'), self.fname) <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.release() <NEW_LINE> <DEDENT> def trylock(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def unlock(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Lock implementation which allows multiple locks, working around
issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
not require any cleanup. Since the lock is always held on a file
descriptor rather than outside of the process, the lock gets dropped
automatically if the process crashes, even if __exit__ is not executed.
There are no guarantees regarding usage by multiple green threads in a
single process here. This lock works only between processes. Exclusive
access between local threads should be achieved using the semaphores
in the @synchronized decorator.
Note these locks are released when the descriptor is closed, so it's not
safe to close the file descriptor while another green thread holds the
lock. Just opening and closing the lock file can break synchronisation,
so lock files must be accessed only using this abstraction. | 625990543617ad0b5ee07669 |
class People(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def initialize(self): <NEW_LINE> <INDENT> with open('people.json') as people_fp: <NEW_LINE> <INDENT> self.people = json.load(people_fp)['people'] <NEW_LINE> <DEDENT> <DEDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> self.set_header("Content-Type", 'application/json; charset="utf-8"') <NEW_LINE> self.write(json.dumps(self.people)) | Send the list of people to the client.
NOTE: It would be nice to flatten the data a bit. | 6259905423e79379d538da1d |
class PostgressDatabase(): <NEW_LINE> <INDENT> dataSourceClassName = 'org.postgresql.ds.PGSimpleDataSource' <NEW_LINE> connectionPort = 5432 <NEW_LINE> connectionUrl = 'jdbc:postgresql://0.0.0.0:%d/corda' <NEW_LINE> username = 'postgresuser' <NEW_LINE> password = 'secretpassword' <NEW_LINE> driver = 'postgresql-42.2.5.jar' <NEW_LINE> @classmethod <NEW_LINE> def copyDriver(cls, dir): <NEW_LINE> <INDENT> url = 'https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.5/%s' % cls.driver <NEW_LINE> wget.download(url, out=dir, bar=None) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def getMappedPort(cls, testcase): <NEW_LINE> <INDENT> return testcase.getPort() | PostgreSQL database static configuration. | 6259905423849d37ff8525e6 |
class PINN_TC(Elasticity2D): <NEW_LINE> <INDENT> def net_uv(self, x, y): <NEW_LINE> <INDENT> X = tf.concat([x, y], 1) <NEW_LINE> uv = self.neural_net(X,self.weights,self.biases) <NEW_LINE> u = x*uv[:, 0:1] <NEW_LINE> v = y*uv[:, 1:2] <NEW_LINE> return u, v | Class including (symmetry) boundary conditions for the thick cylinder problem | 62599054e64d504609df9e61 |
class OrcSystemException(OrcException): <NEW_LINE> <INDENT> code = OrcException.code + exc_grade(0x2, 0x1) <NEW_LINE> def __init__(self, code, info=None): <NEW_LINE> <INDENT> self.code += code <NEW_LINE> self.info = info or "System exception." | System Exception 01 | 62599054435de62698e9d325 |
@xnmt.require_dynet <NEW_LINE> class MomentumSGDTrainer(XnmtOptimizerDynet, Serializable): <NEW_LINE> <INDENT> yaml_tag = '!MomentumSGDTrainer' <NEW_LINE> @serializable_init <NEW_LINE> def __init__(self, e0: numbers.Real = 0.01, mom: numbers.Real = 0.9, skip_noisy: bool = False, rescale_grads: numbers.Real = 5.0) -> None: <NEW_LINE> <INDENT> super().__init__(optimizer=dy.MomentumSGDTrainer(ParamManager.global_collection(), e0, mom), skip_noisy=skip_noisy, rescale_grads=rescale_grads) | Stochastic gradient descent with momentum
This is a modified version of the SGD algorithm with momentum to stablize the gradient trajectory.
Args:
e0: Initial learning rate
mom: Momentum
skip_noisy: keep track of a moving average and a moving standard deviation of the log of the gradient norm
values, and abort a step if the norm of the gradient exceeds four standard deviations of the
moving average. Reference: https://arxiv.org/pdf/1804.09849.pdf
rescale_grads: rescale gradients if the observed norm should be larger than this given norm | 625990544e4d562566373929 |
class Box(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.coord = (None, None) <NEW_LINE> self.big = False <NEW_LINE> self.big_color = None <NEW_LINE> self.big_rad = 80 <NEW_LINE> self.med = False <NEW_LINE> self.med_color = None <NEW_LINE> self.med_rad = 50 <NEW_LINE> self.small = False <NEW_LINE> self.small_color = None <NEW_LINE> self.small_rad = 25 <NEW_LINE> self.rect_color = (0, 0, 0) <NEW_LINE> self.center = None <NEW_LINE> self.rect = None <NEW_LINE> self.side_length = 200 <NEW_LINE> self.line_width = 10 <NEW_LINE> <DEDENT> def init_rect(self, left, top): <NEW_LINE> <INDENT> self.rect = pygame.Rect(left, top, self.side_length, self.side_length) <NEW_LINE> self.center = (int(left + self.side_length / 2), int(top + self.side_length / 2)) <NEW_LINE> <DEDENT> def add_piece(self, size, color): <NEW_LINE> <INDENT> if size == Sizes.BIG: <NEW_LINE> <INDENT> self.big = True <NEW_LINE> self.big_color = color <NEW_LINE> <DEDENT> elif size == Sizes.MED: <NEW_LINE> <INDENT> self.med = True <NEW_LINE> self.med_color = color <NEW_LINE> <DEDENT> elif size == Sizes.SMALL: <NEW_LINE> <INDENT> self.small = True <NEW_LINE> self.small_color = color <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Error: invalid piece size') <NEW_LINE> <DEDENT> <DEDENT> def draw_box(self, surface): <NEW_LINE> <INDENT> pygame.draw.rect(surface, self.rect_color, self.rect, self.line_width) <NEW_LINE> if self.big is True: <NEW_LINE> <INDENT> pygame.draw.circle(surface, self.big_color, self.center, self.big_rad, self.line_width) <NEW_LINE> <DEDENT> if self.med is True: <NEW_LINE> <INDENT> pygame.draw.circle(surface, self.med_color, self.center, self.med_rad, self.line_width) <NEW_LINE> <DEDENT> if self.small is True: <NEW_LINE> <INDENT> pygame.draw.circle(surface, self.small_color, self.center, self.small_rad, self.line_width) | Contains all relevant info for each box in board grid | 62599054f7d966606f749349 |
class ResetPassword(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> pass | Reset a new password.
**Workflow:**
1] User enters in new password, confirms password, and submits.
2] Application registers the new password and the User can progress with their brand new password.
**Required arguments:**
new_password, re_new_password
**Model(s):**
User
**Validation:**
1] Validates new_password, re_new_password should be match.
**Processing:**
Reset password for a user in Users record.
**Context:**
Success message OR Error message
**Form:**
ResetPassword Form
**Template:**
resetpassword.html | 625990544e4d56256637392a |
class DummyModel(HasStrictTraits): <NEW_LINE> <INDENT> name = Str() <NEW_LINE> children = List(This) | Dummy model with children. | 6259905491af0d3eaad3b34b |
class Collection(Sum): <NEW_LINE> <INDENT> def __init__(self, level=logging.NOTSET, default=[], op=operator.add): <NEW_LINE> <INDENT> Sum.__init__(self, level=level, default=default, op=op) <NEW_LINE> <DEDENT> def getvalue(self, record): <NEW_LINE> <INDENT> return [Sum.getvalue(self, record)] | A collection of records values. | 625990543c8af77a43b689d1 |
class Solution: <NEW_LINE> <INDENT> def findSubtree(self, root): <NEW_LINE> <INDENT> self.subtreeSum = sys.maxsize <NEW_LINE> self.result = None <NEW_LINE> self.helper(root) <NEW_LINE> return self.result <NEW_LINE> <DEDENT> def helper(self, root): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> left_sum = self.helper(root.left) <NEW_LINE> right_sum = self.helper(root.right) <NEW_LINE> if left_sum + right_sum + root.val <= self.subtreeSum: <NEW_LINE> <INDENT> self.subtreeSum = left_sum + right_sum + root.val <NEW_LINE> self.result = root <NEW_LINE> <DEDENT> return left_sum + right_sum + root.val | @param root: the root of binary tree
@return: the root of the minimum subtree | 6259905401c39578d7f141c9 |
class SizeCheckWrapper: <NEW_LINE> <INDENT> def __init__(self, rfile, maxlen): <NEW_LINE> <INDENT> self.rfile = rfile <NEW_LINE> self.maxlen = maxlen <NEW_LINE> self.bytes_read = 0 <NEW_LINE> <DEDENT> def _check_length(self): <NEW_LINE> <INDENT> if self.maxlen and self.bytes_read > self.maxlen: <NEW_LINE> <INDENT> raise errors.MaxSizeExceeded() <NEW_LINE> <DEDENT> <DEDENT> def read(self, size=None): <NEW_LINE> <INDENT> data = self.rfile.read(size) <NEW_LINE> self.bytes_read += len(data) <NEW_LINE> self._check_length() <NEW_LINE> return data <NEW_LINE> <DEDENT> def readline(self, size=None): <NEW_LINE> <INDENT> if size is not None: <NEW_LINE> <INDENT> data = self.rfile.readline(size) <NEW_LINE> self.bytes_read += len(data) <NEW_LINE> self._check_length() <NEW_LINE> return data <NEW_LINE> <DEDENT> res = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> data = self.rfile.readline(256) <NEW_LINE> self.bytes_read += len(data) <NEW_LINE> self._check_length() <NEW_LINE> res.append(data) <NEW_LINE> if len(data) < 256 or data[-1:] == LF: <NEW_LINE> <INDENT> return EMPTY.join(res) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def readlines(self, sizehint=0): <NEW_LINE> <INDENT> total = 0 <NEW_LINE> lines = [] <NEW_LINE> line = self.readline(sizehint) <NEW_LINE> while line: <NEW_LINE> <INDENT> lines.append(line) <NEW_LINE> total += len(line) <NEW_LINE> if 0 < sizehint <= total: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> line = self.readline(sizehint) <NEW_LINE> <DEDENT> return lines <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.rfile.close() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> data = next(self.rfile) <NEW_LINE> self.bytes_read += len(data) <NEW_LINE> self._check_length() <NEW_LINE> return data <NEW_LINE> <DEDENT> next = __next__ | Wraps a file-like object, raising MaxSizeExceeded if too large.
:param rfile: ``file`` of a limited size
:param int maxlen: maximum length of the file being read | 62599054b5575c28eb71375d |
class ConfigurationTest(unittest2.TestCase): <NEW_LINE> <INDENT> def test_defaults(self): <NEW_LINE> <INDENT> self.assertEqual('br-int', cfg.CONF.OVS.integration_bridge) <NEW_LINE> self.assertEqual('sqlite://', cfg.CONF.DATABASE.sql_connection) <NEW_LINE> self.assertEqual(-1, cfg.CONF.DATABASE.sql_max_retries) <NEW_LINE> self.assertEqual(2, cfg.CONF.DATABASE.reconnect_interval) <NEW_LINE> self.assertEqual(2, cfg.CONF.AGENT.polling_interval) <NEW_LINE> self.assertEqual('sudo', cfg.CONF.AGENT.root_helper) <NEW_LINE> self.assertEqual('127.0.0.1:6633', cfg.CONF.OVS.openflow_controller) <NEW_LINE> self.assertEqual('127.0.0.1:8080', cfg.CONF.OVS.openflow_rest_api) | Configuration file Tests | 62599054dd821e528d6da402 |
class WarningLog(OnceLogger): <NEW_LINE> <INDENT> def __init__(self, btlevels=10, btdefault=False, maxcount=1, *args, **kwargs): <NEW_LINE> <INDENT> OnceLogger.__init__(self, *args, **kwargs) <NEW_LINE> self.__btlevels = btlevels <NEW_LINE> self.__btdefault = btdefault <NEW_LINE> self.__maxcount = maxcount <NEW_LINE> self.__explanation_seen = False <NEW_LINE> <DEDENT> def __call__(self, msg, bt=None): <NEW_LINE> <INDENT> import traceback <NEW_LINE> if bt is None: <NEW_LINE> <INDENT> bt = self.__btdefault <NEW_LINE> <DEDENT> tb = traceback.extract_stack(limit=2) <NEW_LINE> msgid = repr(tb[-2]) <NEW_LINE> fullmsg = "WARNING: %s" % msg <NEW_LINE> if not self.__explanation_seen: <NEW_LINE> <INDENT> self.__explanation_seen = True <NEW_LINE> fullmsg += "\n * Please note: warnings are " + "printed only once, but underlying problem might " + "occur many times *" <NEW_LINE> <DEDENT> if bt and self.__btlevels > 0: <NEW_LINE> <INDENT> fullmsg += "Top-most backtrace:\n" <NEW_LINE> fullmsg += reduce(lambda x, y: x + "\t%s:%d in %s where '%s'\n" % y, traceback.extract_stack(limit=self.__btlevels), "") <NEW_LINE> <DEDENT> OnceLogger.__call__(self, msgid, fullmsg, self.__maxcount) <NEW_LINE> <DEDENT> def _set_max_count(self, value): <NEW_LINE> <INDENT> self.__maxcount = value <NEW_LINE> <DEDENT> maxcount = property(fget=lambda x:x.__maxcount, fset=_set_max_count) | Logging class of messsages to be printed just once per each message
| 62599054b830903b9686ef0f |
class EnergyLargestClusterFilter(Filter): <NEW_LINE> <INDENT> def __init__(self, energy=0): <NEW_LINE> <INDENT> self.criteria = "BE_LC" <NEW_LINE> Filter.__init__(self, self.criteria) <NEW_LINE> self.energyF = EnergyFilter(energy=energy) <NEW_LINE> <DEDENT> def filter(self, docking, rms=2.0): <NEW_LINE> <INDENT> d = docking <NEW_LINE> cl = d.clusterer <NEW_LINE> clg = cl.clustering_dict[rms] <NEW_LINE> conf = cl.data[cl.argsort[0]] <NEW_LINE> e_val = conf.energy <NEW_LINE> if not self.energyF.filter(docking,rms): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> cl_lengths = [] <NEW_LINE> for cl in clg: cl_lengths.append(len(cl)) <NEW_LINE> LC_ind = cl_lengths.index(max(cl_lengths)) <NEW_LINE> largest_cluster = clg[LC_ind] <NEW_LINE> return conf in largest_cluster | class for object to screen docking results
based on the magnitude of best energy and whether that conf is in the largest cluster
input: an AutoDock docking
output: whether the docking passes these criteria | 62599054596a897236129041 |
class AvgTimesList(ListBox): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> super(AvgTimesList, self).__init__(SimpleListWalker([])) <NEW_LINE> self.key = key <NEW_LINE> <DEDENT> def add_data(self, data): <NEW_LINE> <INDENT> while len(self.body): <NEW_LINE> <INDENT> self.body.pop(0) <NEW_LINE> <DEDENT> self.body.append(Text(("stat-hdr", " Average Times: "), align=RIGHT)) <NEW_LINE> overall = data.get(self.key).get('', KPISet()) <NEW_LINE> recv = overall[KPISet.AVG_RESP_TIME] <NEW_LINE> recv -= overall[KPISet.AVG_CONN_TIME] <NEW_LINE> recv -= overall[KPISet.AVG_LATENCY] <NEW_LINE> self.body.append( Text(("stat-txt", "Full: %.3f" % overall[KPISet.AVG_RESP_TIME]), align=RIGHT)) <NEW_LINE> self.body.append( Text(("stat-txt", "Connect: %.3f" % overall[KPISet.AVG_CONN_TIME]), align=RIGHT)) <NEW_LINE> self.body.append( Text(("stat-txt", "Latency: %.3f" % overall[KPISet.AVG_LATENCY]), align=RIGHT)) <NEW_LINE> self.body.append(Text(("stat-txt", "~Receive: %.3f" % recv), align=RIGHT)) | Average times block
:type key: str | 62599054be383301e0254d1e |
class Astar: <NEW_LINE> <INDENT> def __init__(self, startPoint, endPoint, blockList, size): <NEW_LINE> <INDENT> self.__start=startPoint[:] <NEW_LINE> self.__end=endPoint[:] <NEW_LINE> self.__closelist = blockList[:] <NEW_LINE> self.__size=size[:] <NEW_LINE> self.__openlist = [] <NEW_LINE> self.shortestPath=[] <NEW_LINE> self.__cost=1 <NEW_LINE> self.__findpath=False <NEW_LINE> g=0 <NEW_LINE> h=abs(self.__start[0]-self.__end[0])+abs(self.__start[1]-self.__end[1]) <NEW_LINE> f=g+h <NEW_LINE> parentPoint=[-1,-1] <NEW_LINE> cur_node=[startPoint]+[g]+[h]+[f]+[parentPoint] <NEW_LINE> self.__minNode=cur_node <NEW_LINE> self.__openlist.append(cur_node) <NEW_LINE> <DEDENT> def __AddOpen(self, expanded_point): <NEW_LINE> <INDENT> g=self.__minNode[1]+self.__cost <NEW_LINE> h=abs(expanded_point[0]-self.__end[0])+abs(expanded_point[1]-self.__end[1]) <NEW_LINE> f=g+h <NEW_LINE> cur_node=[expanded_point]+[g]+[h]+[f]+[self.__minNode[0]] <NEW_LINE> for node in self.__openlist: <NEW_LINE> <INDENT> if cur_node[0]==node[0] and cur_node[3]<node[3]: <NEW_LINE> <INDENT> node=cur_node <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> else: self.__openlist.append(cur_node) <NEW_LINE> <DEDENT> def __RemoveMin(self): <NEW_LINE> <INDENT> self.__closelist.append(self.__minNode) <NEW_LINE> self.__openlist.remove(self.__minNode) <NEW_LINE> if self.__openlist!=[]: <NEW_LINE> <INDENT> self.__minNode=[[0,0]]+[0]+[sys.maxsize]+[sys.maxsize]+[[-1,-1]] <NEW_LINE> for node in self.__openlist: <NEW_LINE> <INDENT> if node[3]<self.__minNode[3]: <NEW_LINE> <INDENT> self.__minNode=node <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __shortestPath(self): <NEW_LINE> <INDENT> cur_index=-1 <NEW_LINE> self.shortestPath.insert(0,self.__closelist[cur_index][0]) <NEW_LINE> parent=self.__closelist[cur_index][4] <NEW_LINE> while parent!=[-1,-1]: <NEW_LINE> <INDENT> if self.__closelist[cur_index][0]==parent: <NEW_LINE> <INDENT> self.shortestPath.insert(0,parent) <NEW_LINE> parent=self.__closelist[cur_index][4] <NEW_LINE> <DEDENT> cur_index-=1 <NEW_LINE> <DEDENT> <DEDENT> def nextStep(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> if self.__minNode[0]==self.__end: <NEW_LINE> <INDENT> self.__RemoveMin() <NEW_LINE> self.__shortestPath() <NEW_LINE> if len(self.shortestPath)==1: <NEW_LINE> <INDENT> return self.__end <NEW_LINE> <DEDENT> else: return self.shortestPath[1] <NEW_LINE> <DEDENT> elif self.__openlist!=[]: <NEW_LINE> <INDENT> direct4=[[0,-1],[0,1],[-1,0],[1,0]] <NEW_LINE> for direction in direct4: <NEW_LINE> <INDENT> point_x=self.__minNode[0][0]+direction[0] <NEW_LINE> point_y=self.__minNode[0][1]+direction[1] <NEW_LINE> if 0<=point_x<self.__size[0] and 0<=point_y<self.__size[1]: <NEW_LINE> <INDENT> point=[point_x,point_y] <NEW_LINE> for node in self.__closelist: <NEW_LINE> <INDENT> if point==node[0] or point==node: break <NEW_LINE> <DEDENT> else: self.__AddOpen(point) <NEW_LINE> <DEDENT> <DEDENT> self.__RemoveMin() <NEW_LINE> <DEDENT> else: return | Perform A* algorithm to find the shortest path.
find the shortest path from __start point to __end point
blockSet are the points that cannot be positioned
size is the window size | 6259905416aa5153ce401a09 |
class _Appsrc(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._signals = _Signals() <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.prepare(None, None, None, None) <NEW_LINE> <DEDENT> def prepare(self, caps, need_data, enough_data, seek_data): <NEW_LINE> <INDENT> self._signals.clear() <NEW_LINE> self._source = None <NEW_LINE> self._caps = caps <NEW_LINE> self._need_data_callback = need_data <NEW_LINE> self._seek_data_callback = seek_data <NEW_LINE> self._enough_data_callback = enough_data <NEW_LINE> <DEDENT> def configure(self, source): <NEW_LINE> <INDENT> source.set_property('caps', self._caps) <NEW_LINE> source.set_property('format', b'time') <NEW_LINE> source.set_property('stream-type', b'seekable') <NEW_LINE> source.set_property('max-bytes', 1 * MB) <NEW_LINE> source.set_property('min-percent', 50) <NEW_LINE> if self._need_data_callback: <NEW_LINE> <INDENT> self._signals.connect(source, 'need-data', self._on_signal, self._need_data_callback) <NEW_LINE> <DEDENT> if self._seek_data_callback: <NEW_LINE> <INDENT> self._signals.connect(source, 'seek-data', self._on_signal, self._seek_data_callback) <NEW_LINE> <DEDENT> if self._enough_data_callback: <NEW_LINE> <INDENT> self._signals.connect(source, 'enough-data', self._on_signal, None, self._enough_data_callback) <NEW_LINE> <DEDENT> self._source = source <NEW_LINE> <DEDENT> def push(self, buffer_): <NEW_LINE> <INDENT> return self._source.emit('push-buffer', buffer_) == gst.FLOW_OK <NEW_LINE> <DEDENT> def end_of_stream(self): <NEW_LINE> <INDENT> self._source.emit('end-of-stream') <NEW_LINE> <DEDENT> def _on_signal(self, element, clocktime, func): <NEW_LINE> <INDENT> if clocktime is None: <NEW_LINE> <INDENT> func() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> func(utils.clocktime_to_millisecond(clocktime)) <NEW_LINE> <DEDENT> return True | Helper class for dealing with appsrc based playback. | 62599054a17c0f6771d5d633 |
class KerasClassifier(BaseWrapper): <NEW_LINE> <INDENT> def fit(self, x, y, **kwargs): <NEW_LINE> <INDENT> y = np.array(y) <NEW_LINE> if len(y.shape) != 1: <NEW_LINE> <INDENT> self.classes_ = np.arange(y.shape[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.classes_ = np.unique(y) <NEW_LINE> y = np.searchsorted(self.classes_, y) <NEW_LINE> <DEDENT> self.n_classes_ = len(self.classes_) <NEW_LINE> return super(KerasClassifier, self).fit(x, y, **kwargs) <NEW_LINE> <DEDENT> def predict(self, x, **kwargs): <NEW_LINE> <INDENT> kwargs = self.filter_sk_params(Sequential.predict_classes, kwargs) <NEW_LINE> classes = self.model.predict_classes(x, **kwargs) <NEW_LINE> return self.classes_[classes] <NEW_LINE> <DEDENT> def predict_proba(self, x, **kwargs): <NEW_LINE> <INDENT> kwargs = self.filter_sk_params(Sequential.predict_proba, kwargs) <NEW_LINE> probs = self.model.predict_proba(x, **kwargs) <NEW_LINE> if probs.shape[1] == 1: <NEW_LINE> <INDENT> probs = np.hstack([1 - probs, probs]) <NEW_LINE> <DEDENT> return probs <NEW_LINE> <DEDENT> def score(self, x, y, **kwargs): <NEW_LINE> <INDENT> y = np.searchsorted(self.classes_, y) <NEW_LINE> kwargs = self.filter_sk_params(Sequential.evaluate, kwargs) <NEW_LINE> loss_name = self.model.loss <NEW_LINE> if hasattr(loss_name, '__name__'): <NEW_LINE> <INDENT> loss_name = loss_name.__name__ <NEW_LINE> <DEDENT> if loss_name == 'categorical_crossentropy' and len(y.shape) != 2: <NEW_LINE> <INDENT> y = to_categorical(y) <NEW_LINE> <DEDENT> outputs = self.model.evaluate(x, y, **kwargs) <NEW_LINE> if not isinstance(outputs, list): <NEW_LINE> <INDENT> outputs = [outputs] <NEW_LINE> <DEDENT> for name, output in zip(self.model.metrics_names, outputs): <NEW_LINE> <INDENT> if name == 'acc': <NEW_LINE> <INDENT> return output <NEW_LINE> <DEDENT> <DEDENT> raise ValueError('The model is not configured to compute accuracy. ' 'You should pass `metrics=["accuracy"]` to ' 'the `model.compile()` method.') | Implementation of the scikit-learn classifier API for Keras.
| 6259905415baa723494634b6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.