code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Translate: <NEW_LINE> <INDENT> def __init__(self, src_lang: str, target_lang: str) -> None: <NEW_LINE> <INDENT> self.model = None <NEW_LINE> self.load_model(src_lang, target_lang) <NEW_LINE> <DEDENT> def load_model(self, src_lang: str, target_lang: str): <NEW_LINE> <INDENT> if src_lang == "th" and target_lang == "en": <NEW_LINE> <INDENT> from pythainlp.translate.en_th import ThEnTranslator <NEW_LINE> self.model = ThEnTranslator() <NEW_LINE> <DEDENT> elif src_lang == "en" and target_lang == "th": <NEW_LINE> <INDENT> from pythainlp.translate.en_th import EnThTranslator <NEW_LINE> self.model = EnThTranslator() <NEW_LINE> <DEDENT> elif src_lang == "th" and target_lang == "zh": <NEW_LINE> <INDENT> from pythainlp.translate.zh_th import ThZhTranslator <NEW_LINE> self.model = ThZhTranslator() <NEW_LINE> <DEDENT> elif src_lang == "zh" and target_lang == "th": <NEW_LINE> <INDENT> from pythainlp.translate.zh_th import ZhThTranslator <NEW_LINE> self.model = ZhThTranslator() <NEW_LINE> <DEDENT> elif src_lang == "th" and target_lang == "fr": <NEW_LINE> <INDENT> from pythainlp.translate.th_fr import ThFrTranslator <NEW_LINE> self.model = ThFrTranslator() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Not support language!") <NEW_LINE> <DEDENT> <DEDENT> def translate(self, text) -> str: <NEW_LINE> <INDENT> return self.model.translate(text)
Machine Translation :param str src_lang: source language :param str target_lang: target language **Options for source & target language** * *th* - *en* - Thai to English * *en* - *th* - English to Thai * *th* - *zh* - Thai to Chinese * *zh* - *th* - Chinese to Thai * *th* - *fr* - Thai to French :Example: Translate text from Thai to English:: from pythainlp.translate import Translate th2en = Translate('th', 'en') th2en.translate("ฉันรักแมว") # output: I love cat.
62599031be8e80087fbc0124
class LazyProperty(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self.func(instance) <NEW_LINE> setattr(instance, self.func.__name__, value) <NEW_LINE> return value
属性延迟初始化
625990318c3a8732951f7600
class SermonList(APIView): <NEW_LINE> <INDENT> def get(self, request, format=None): <NEW_LINE> <INDENT> sermons = Sermon.objects.all() <NEW_LINE> serializer = SermonSerializer(sermons, many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly,)
List all code series, or create a new sermon.
625990318a43f66fc4bf322f
class ConfigSampleColumn(Config): <NEW_LINE> <INDENT> sample_column = ParamCreator.create_int( help_string="what column to sample by", )
Configuration options for sampling
6259903156b00c62f0fb396a
class NoteLine(models.Model): <NEW_LINE> <INDENT> note = models.CharField(max_length=400) <NEW_LINE> is_permanent = models.BooleanField(default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s" % self.note <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "Quoted Order Note"
Model to handle storage of note line.
625990315e10d32532ce4158
class DcmStack(NiftiGeneratorBase): <NEW_LINE> <INDENT> input_spec = DcmStackInputSpec <NEW_LINE> output_spec = DcmStackOutputSpec <NEW_LINE> def _get_filelist(self, trait_input): <NEW_LINE> <INDENT> if isinstance(trait_input, six.string_types): <NEW_LINE> <INDENT> if path.isdir(trait_input): <NEW_LINE> <INDENT> return glob(path.join(trait_input, '*.dcm')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return glob(trait_input) <NEW_LINE> <DEDENT> <DEDENT> return trait_input <NEW_LINE> <DEDENT> def _run_interface(self, runtime): <NEW_LINE> <INDENT> src_paths = self._get_filelist(self.inputs.dicom_files) <NEW_LINE> include_regexes = dcmstack.default_key_incl_res <NEW_LINE> if isdefined(self.inputs.include_regexes): <NEW_LINE> <INDENT> include_regexes += self.inputs.include_regexes <NEW_LINE> <DEDENT> exclude_regexes = dcmstack.default_key_excl_res <NEW_LINE> if isdefined(self.inputs.exclude_regexes): <NEW_LINE> <INDENT> exclude_regexes += self.inputs.exclude_regexes <NEW_LINE> <DEDENT> meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, include_regexes) <NEW_LINE> stack = dcmstack.DicomStack(meta_filter=meta_filter) <NEW_LINE> for src_path in src_paths: <NEW_LINE> <INDENT> if not imghdr.what(src_path)=="gif": <NEW_LINE> <INDENT> src_dcm = dicom.read_file(src_path, force=self.inputs.force_read) <NEW_LINE> stack.add_dcm(src_dcm) <NEW_LINE> <DEDENT> <DEDENT> nii = stack.to_nifti(embed_meta=True) <NEW_LINE> nw = NiftiWrapper(nii) <NEW_LINE> self.out_path = self._get_out_path(nw.meta_ext.get_class_dict(('global', 'const'))) <NEW_LINE> if not self.inputs.embed_meta: <NEW_LINE> <INDENT> nw.remove_extension() <NEW_LINE> <DEDENT> nb.save(nii, self.out_path) <NEW_LINE> return runtime <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self._outputs().get() <NEW_LINE> outputs["out_file"] = self.out_path <NEW_LINE> return outputs
Create one Nifti file from a set of DICOM files. Can optionally embed meta data. Example ------- >>> from nipype.interfaces.dcmstack import DcmStack >>> stacker = DcmStack() >>> stacker.inputs.dicom_files = 'path/to/series/' >>> stacker.run() # doctest: +SKIP >>> result.outputs.out_file # doctest: +SKIP '/path/to/cwd/sequence.nii.gz'
62599031d53ae8145f91950d
class Game: <NEW_LINE> <INDENT> def __init__(self, caption: str = "Game", size: ('x', 'y') = (600, 600), clear_screen: bool = True): <NEW_LINE> <INDENT> os.environ['SDL_VIDEO_WINDOW_POS'] = "15,30" <NEW_LINE> self.size = size <NEW_LINE> self.clear_screen = clear_screen <NEW_LINE> self.running = False <NEW_LINE> pygame.display.init() <NEW_LINE> pygame.display.set_caption(caption) <NEW_LINE> self.screen = pygame.display.set_mode(size=self.size) <NEW_LINE> <DEDENT> def kbin(self, code: str, key: "pygame constant") -> None: <NEW_LINE> <INDENT> print("Overwrite kbin") <NEW_LINE> <DEDENT> def update(self) -> None: <NEW_LINE> <INDENT> print("Overwrite update") <NEW_LINE> <DEDENT> def play(self) -> None: <NEW_LINE> <INDENT> self.running = True <NEW_LINE> while self.running: <NEW_LINE> <INDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type == pygame.QUIT: <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> elif event.type == pygame.KEYDOWN: <NEW_LINE> <INDENT> self.kbin(event.unicode, event.key) <NEW_LINE> <DEDENT> <DEDENT> if self.clear_screen: self.screen.fill((0, 0, 0)) <NEW_LINE> self.update() <NEW_LINE> pygame.display.update()
A wrapper to make pygame games more easily. inherit this class and overwrite kbin and update fns
625990313eb6a72ae038b711
class Graph(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.vertices = [] <NEW_LINE> self.edges = [] <NEW_LINE> self.vertex_map = {} <NEW_LINE> <DEDENT> def new_vertex(self): <NEW_LINE> <INDENT> vertex = Vertex(len(self.vertices)) <NEW_LINE> self.vertices.append(vertex) <NEW_LINE> return vertex <NEW_LINE> <DEDENT> def get_vertex(self, key): <NEW_LINE> <INDENT> if key in self.vertex_map: <NEW_LINE> <INDENT> return self.vertex_map[key] <NEW_LINE> <DEDENT> vertex = self.new_vertex() <NEW_LINE> self.vertex_map[key] = vertex <NEW_LINE> return vertex <NEW_LINE> <DEDENT> def add_edge(self, source, target): <NEW_LINE> <INDENT> edge = Edge(len(self.edges)) <NEW_LINE> self.edges.append(edge) <NEW_LINE> source.out_edges.append(edge.idx) <NEW_LINE> target.in_edges.append(edge.idx) <NEW_LINE> edge.source = source.idx <NEW_LINE> edge.target = target.idx <NEW_LINE> return edge <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return { "node": [v.to_dict() for v in self.vertices], "edge": [e.to_dict() for e in self.edges] }
A directed graph that can easily be JSON serialized for visualization. When serializing, it generates the following fields: edge: The list of all serialized Edge instances. node: The list of all serialized Vertex instances.
62599031d4950a0f3b111693
class NotInTeam(LaunchpadFault): <NEW_LINE> <INDENT> error_code = 250 <NEW_LINE> msg_template = '%(person_name)s is not a member of %(team_name)s.' <NEW_LINE> def __init__(self, person_name, team_name): <NEW_LINE> <INDENT> LaunchpadFault.__init__( self, person_name=person_name, team_name=team_name)
Raised when a person needs to be a member of a team, but is not. In particular, this is used when a user tries to register a branch as being owned by a team that they themselves are not a member of.
62599031d6c5a102081e31d1
class PortBinding(model_base.BASEV2): <NEW_LINE> <INDENT> __tablename__ = 'ml2_port_bindings' <NEW_LINE> port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete="CASCADE"), primary_key=True) <NEW_LINE> host = sa.Column(sa.String(255), nullable=False, default='', server_default='') <NEW_LINE> vnic_type = sa.Column(sa.String(64), nullable=False, default=portbindings.VNIC_NORMAL, server_default=portbindings.VNIC_NORMAL) <NEW_LINE> profile = sa.Column(sa.String(BINDING_PROFILE_LEN), nullable=False, default='', server_default='') <NEW_LINE> vif_type = sa.Column(sa.String(64), nullable=False) <NEW_LINE> vif_details = sa.Column(sa.String(4095), nullable=False, default='', server_default='') <NEW_LINE> port = orm.relationship( models_v2.Port, backref=orm.backref("port_binding", lazy='joined', uselist=False, cascade='delete'))
Represent binding-related state of a port. A port binding stores the port attributes required for the portbindings extension, as well as internal ml2 state such as which MechanismDriver and which segment are used by the port binding.
6259903115baa72349463044
class NIPSDQNHead(chainer.ChainList): <NEW_LINE> <INDENT> def __init__(self, n_input_channels=4, n_output_channels=256, activation=F.relu, bias=0.1): <NEW_LINE> <INDENT> self.n_input_channels = n_input_channels <NEW_LINE> self.activation = activation <NEW_LINE> self.n_output_channels = n_output_channels <NEW_LINE> layers = [ L.Convolution2D(n_input_channels, 16, 8, stride=4, bias=bias), L.Convolution2D(16, 32, 4, stride=2, bias=bias), L.Linear(2592, n_output_channels, bias=bias), ] <NEW_LINE> super(NIPSDQNHead, self).__init__(*layers) <NEW_LINE> <DEDENT> def __call__(self, state): <NEW_LINE> <INDENT> h = state <NEW_LINE> for layer in self: <NEW_LINE> <INDENT> h = self.activation(layer(h)) <NEW_LINE> <DEDENT> return h
DQN's head (NIPS workshop version)
62599031c432627299fa40a1
class GradientDescentOptimizer(Optimizer): <NEW_LINE> <INDENT> def __init__(self, objective, *vars, minimize=True, eta=1.0, constraints={}, **kwargs): <NEW_LINE> <INDENT> super().__init__(objective, *vars, minimize=minimize, constraints=constraints, **kwargs) <NEW_LINE> self.eta = eta <NEW_LINE> <DEDENT> def grad_steps(self): <NEW_LINE> <INDENT> return tuple(self.eta*x for x in self.grads)
x <-- x - eta*grad(f)(x)
6259903171ff763f4b5e8843
class ListPort(quantumv20.ListCommand): <NEW_LINE> <INDENT> resource = 'port' <NEW_LINE> log = logging.getLogger(__name__ + '.ListPort') <NEW_LINE> _formatters = {'fixed_ips': _format_fixed_ips, } <NEW_LINE> list_columns = ['id', 'name', 'mac_address', 'fixed_ips'] <NEW_LINE> pagination_support = True <NEW_LINE> sorting_support = True
List ports that belong to a given tenant.
62599031be8e80087fbc0128
class Usuario(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('username', 'first_name', 'last_name', 'email')
Clase que representa el modelo de Usuario de Django @typ ModelForm:
6259903130c21e258be998b8
class Solution: <NEW_LINE> <INDENT> def postorderTraversal(self, root): <NEW_LINE> <INDENT> results = [] <NEW_LINE> self.traverse(root, results) <NEW_LINE> return results <NEW_LINE> <DEDENT> def traverse(self, root, results): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.traverse(root.left, results) <NEW_LINE> self.traverse(root.right, results) <NEW_LINE> results.append(root.val)
@param root: A Tree @return: Postorder in ArrayList which contains node values.
625990311d351010ab8f4bc5
class CourseTeamMembership(models.Model): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> unique_together = (('user', 'team'),) <NEW_LINE> <DEDENT> user = models.ForeignKey(User) <NEW_LINE> team = models.ForeignKey(CourseTeam, related_name='membership') <NEW_LINE> date_joined = models.DateTimeField(auto_now_add=True) <NEW_LINE> last_activity_at = models.DateTimeField() <NEW_LINE> immutable_fields = ('user', 'team', 'date_joined') <NEW_LINE> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name in self.immutable_fields: <NEW_LINE> <INDENT> current_value = getattr(self, name, None) <NEW_LINE> if current_value is not None: <NEW_LINE> <INDENT> raise ImmutableMembershipFieldException( "Field %r shouldn't change from %r to %r" % (name, current_value, value) ) <NEW_LINE> <DEDENT> <DEDENT> super(CourseTeamMembership, self).__setattr__(name, value) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> should_reset_team_size = False <NEW_LINE> if self.pk is None: <NEW_LINE> <INDENT> should_reset_team_size = True <NEW_LINE> <DEDENT> if not self.last_activity_at: <NEW_LINE> <INDENT> self.last_activity_at = datetime.utcnow().replace(tzinfo=pytz.utc) <NEW_LINE> <DEDENT> super(CourseTeamMembership, self).save(*args, **kwargs) <NEW_LINE> if should_reset_team_size: <NEW_LINE> <INDENT> self.team.reset_team_size() <NEW_LINE> <DEDENT> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CourseTeamMembership, self).delete(*args, **kwargs) <NEW_LINE> self.team.reset_team_size() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_memberships(cls, username=None, course_ids=None, team_id=None): <NEW_LINE> <INDENT> queryset = cls.objects.all() <NEW_LINE> if username is not None: <NEW_LINE> <INDENT> queryset = queryset.filter(user__username=username) <NEW_LINE> <DEDENT> if course_ids is not None: <NEW_LINE> <INDENT> queryset = queryset.filter(team__course_id__in=course_ids) <NEW_LINE> <DEDENT> if team_id is not None: <NEW_LINE> <INDENT> queryset = queryset.filter(team__team_id=team_id) <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def user_in_team_for_course(cls, user, course_id): <NEW_LINE> <INDENT> return cls.objects.filter(user=user, team__course_id=course_id).exists() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def update_last_activity(cls, user, discussion_topic_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> membership = cls.objects.get(user=user, team__discussion_topic_id=discussion_topic_id) <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> now = datetime.utcnow().replace(tzinfo=pytz.utc) <NEW_LINE> membership.last_activity_at = now <NEW_LINE> membership.team.last_activity_at = now <NEW_LINE> membership.team.save() <NEW_LINE> membership.save() <NEW_LINE> emit_team_event('edx.team.activity_updated', membership.team.course_id, { 'team_id': membership.team_id, })
This model represents the membership of a single user in a single team.
625990318c3a8732951f7605
class ZorroT6(Structure): <NEW_LINE> <INDENT> _fields_ = [('time', c_double), ('fHigh', c_float), ('fLow', c_float), ('fOpen', c_float), ('fClose', c_float), ('fVal', c_float), ('fVol', c_float)]
struct { DATE time; // timestamp of the end of the tick in UTC, OLE date/time format (double float) float fHigh,fLow; float fOpen,fClose; float fVal,fVol; // additional data, like ask-bid spread, volume etc. }
6259903156b00c62f0fb396e
class PyramidAdd(function_node.FunctionNode): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.rhs_ch = None <NEW_LINE> <DEDENT> def check_type_forward(self, in_types): <NEW_LINE> <INDENT> type_check.expect(in_types.size() == 2) <NEW_LINE> lhs = in_types[0] <NEW_LINE> rhs = in_types[1] <NEW_LINE> type_check.expect( lhs.ndim == 4, rhs.ndim == 4, lhs.shape[0] == rhs.shape[0], lhs.shape[1] >= rhs.shape[1], lhs.shape[2] == rhs.shape[2], lhs.shape[3] == rhs.shape[3], ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> lhs, rhs = x[:2] <NEW_LINE> self.rhs_ch = rhs.shape[1] <NEW_LINE> if lhs.shape[1] > rhs.shape[1]: <NEW_LINE> <INDENT> lhs[:, :self.rhs_ch, :, :] += rhs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lhs += rhs <NEW_LINE> <DEDENT> return lhs, <NEW_LINE> <DEDENT> def backward(self, indexes, gy): <NEW_LINE> <INDENT> return gy[0], gy[0][:, :self.rhs_ch, :, :]
Add different channel shaped array. This function is not commutable, lhs and rhs acts different! Add different channel shaped array. lhs is h, and rhs is x. h.shape[1] must be always equal or larger than x.shape[1]... output channel is always h.shape[1]. x.shape[1] is smaller than h.shape[1], x is virtually padded with 0
6259903191af0d3eaad3aeda
class ConfigProxy: <NEW_LINE> <INDENT> def __init__(self, cfg, user): <NEW_LINE> <INDENT> self.base = cfg <NEW_LINE> self.user = user <NEW_LINE> self.filename = None <NEW_LINE> if user is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.cfg = Config() <NEW_LINE> path = os.path.split(self.base.filename)[0] + '/users/%s.conf'%user <NEW_LINE> if not os.path.exists(path): <NEW_LINE> <INDENT> open(path, 'w').close() <NEW_LINE> <DEDENT> self.filename = path <NEW_LINE> self.cfg.load(path) <NEW_LINE> self.save = self.cfg.save <NEW_LINE> self.add_section = self.cfg.add_section <NEW_LINE> self.has_section = self.cfg.has_section <NEW_LINE> self.remove_section = self.cfg.remove_section <NEW_LINE> <DEDENT> def get(self, section, val=None, default=None): <NEW_LINE> <INDENT> if self.user is not None and self.cfg.has_option(section, val): <NEW_LINE> <INDENT> return self.cfg.get(section, val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.base.get(section, val, default) <NEW_LINE> <DEDENT> <DEDENT> def set(self, section, val, value=None): <NEW_LINE> <INDENT> if self.user is None: <NEW_LINE> <INDENT> raise Exception('Cannot modify anonymous config') <NEW_LINE> <DEDENT> self.cfg.set(section, val, value) <NEW_LINE> <DEDENT> def has_option(self, section, name): <NEW_LINE> <INDENT> if self.base.has_option(section, name): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.user is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.cfg.has_option(section, name) <NEW_LINE> <DEDENT> def options(self, section): <NEW_LINE> <INDENT> r = [] <NEW_LINE> try: <NEW_LINE> <INDENT> r.extend(self.base.options(section)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> r.extend(self.cfg.options(section)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return r <NEW_LINE> <DEDENT> def remove_option(self, section, val): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.cfg.remove_option(section, val) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def remove_section(self, section): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.cfg.remove_section(section) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False
A proxy class that directs all writes into user's personal config file while reading from both personal and common configs. - *cfg* - :class:`Config` common for all users, - *user* - user name
625990316fece00bbaccca5d
class GsCompareCommitShowDiffCommand(TextCommand, GitCommand): <NEW_LINE> <INDENT> def run(self, edit): <NEW_LINE> <INDENT> sublime.set_timeout_async(self.run_async) <NEW_LINE> <DEDENT> def run_async(self): <NEW_LINE> <INDENT> base_commit = self.view.settings().get("git_savvy.compare_commit_view.base_commit") <NEW_LINE> target_commit = self.view.settings().get("git_savvy.compare_commit_view.target_commit") <NEW_LINE> file_path = self.view.settings().get("git_savvy.compare_commit_view.file_path") <NEW_LINE> self.view.window().run_command("gs_diff", { "base_commit": base_commit, "target_commit": target_commit, "file_path": file_path, "disable_stage": True, "title": "DIFF: {}..{}".format(base_commit, target_commit) })
Refresh view of all commits diff between branches.
62599031287bf620b6272c96
class PortableModel(object): <NEW_LINE> <INDENT> def __init__(self, model_name, debug): <NEW_LINE> <INDENT> self.model_name = model_name <NEW_LINE> self.debug = debug <NEW_LINE> units_accepted, self.kim_model = model_create( kimpy.numbering.zeroBased, kimpy.length_unit.A, kimpy.energy_unit.eV, kimpy.charge_unit.e, kimpy.temperature_unit.K, kimpy.time_unit.ps, self.model_name, ) <NEW_LINE> if not units_accepted: <NEW_LINE> <INDENT> raise KIMModelInitializationError( "Requested units not accepted in kimpy.model.create" ) <NEW_LINE> <DEDENT> if self.debug: <NEW_LINE> <INDENT> l_unit, e_unit, c_unit, te_unit, ti_unit = self.kim_model.get_units() <NEW_LINE> print("Length unit is: {}".format(l_unit)) <NEW_LINE> print("Energy unit is: {}".format(e_unit)) <NEW_LINE> print("Charge unit is: {}".format(c_unit)) <NEW_LINE> print("Temperature unit is: {}".format(te_unit)) <NEW_LINE> print("Time unit is: {}".format(ti_unit)) <NEW_LINE> print() <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.destroy() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, value, traceback): <NEW_LINE> <INDENT> self.destroy() <NEW_LINE> <DEDENT> def get_model_supported_species_and_codes(self): <NEW_LINE> <INDENT> species = [] <NEW_LINE> codes = [] <NEW_LINE> num_kim_species = kimpy.species_name.get_number_of_species_names() <NEW_LINE> for i in range(num_kim_species): <NEW_LINE> <INDENT> species_name = get_species_name(i) <NEW_LINE> species_support, code = self.get_species_support_and_code(species_name) <NEW_LINE> if species_support: <NEW_LINE> <INDENT> species.append(str(species_name)) <NEW_LINE> codes.append(code) <NEW_LINE> <DEDENT> <DEDENT> return species, codes <NEW_LINE> <DEDENT> @check_call_wrapper <NEW_LINE> def compute(self, compute_args_wrapped, release_GIL): <NEW_LINE> <INDENT> return self.kim_model.compute(compute_args_wrapped.compute_args, release_GIL) <NEW_LINE> <DEDENT> @check_call_wrapper <NEW_LINE> def get_species_support_and_code(self, species_name): <NEW_LINE> <INDENT> return self.kim_model.get_species_support_and_code(species_name) <NEW_LINE> <DEDENT> def get_influence_distance(self): <NEW_LINE> <INDENT> return self.kim_model.get_influence_distance() <NEW_LINE> <DEDENT> def get_neighbor_list_cutoffs_and_hints(self): <NEW_LINE> <INDENT> return self.kim_model.get_neighbor_list_cutoffs_and_hints() <NEW_LINE> <DEDENT> def compute_arguments_create(self): <NEW_LINE> <INDENT> return ComputeArguments(self, self.debug) <NEW_LINE> <DEDENT> def compute_arguments_destroy(self, compute_args_wrapped): <NEW_LINE> <INDENT> compute_args_wrapped.destroy() <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> if self.initialized: <NEW_LINE> <INDENT> kimpy.model.destroy(self.kim_model) <NEW_LINE> del self.kim_model <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def initialized(self): <NEW_LINE> <INDENT> return hasattr(self, "kim_model")
Creates a KIM API Portable Model object and provides a minimal interface to it
625990316fece00bbaccca5e
class TestSequenceIdentifier(unittest.TestCase): <NEW_LINE> <INDENT> def test_read_casava_id(self): <NEW_LINE> <INDENT> seqid_string = "@EAS139:136:FC706VJ:2:2104:15343:197393 1:Y:18:ATCACG" <NEW_LINE> seqid = SequenceIdentifier(seqid_string) <NEW_LINE> self.assertEqual(str(seqid),seqid_string) <NEW_LINE> self.assertEqual('casava',seqid.format) <NEW_LINE> self.assertEqual('EAS139',seqid.instrument_name) <NEW_LINE> self.assertEqual('136',seqid.run_id) <NEW_LINE> self.assertEqual('FC706VJ',seqid.flowcell_id) <NEW_LINE> self.assertEqual('2',seqid.flowcell_lane) <NEW_LINE> self.assertEqual('2104',seqid.tile_no) <NEW_LINE> self.assertEqual('15343',seqid.x_coord) <NEW_LINE> self.assertEqual('197393',seqid.y_coord) <NEW_LINE> self.assertEqual('1',seqid.pair_id) <NEW_LINE> self.assertEqual('Y',seqid.bad_read) <NEW_LINE> self.assertEqual('18',seqid.control_bit_flag) <NEW_LINE> self.assertEqual('ATCACG',seqid.index_sequence) <NEW_LINE> <DEDENT> def test_read_illumina_id(self): <NEW_LINE> <INDENT> seqid_string = "@HWUSI-EAS100R:6:73:941:1973#0/1" <NEW_LINE> seqid = SequenceIdentifier(seqid_string) <NEW_LINE> self.assertEqual(str(seqid),seqid_string) <NEW_LINE> self.assertEqual('illumina',seqid.format) <NEW_LINE> self.assertEqual('HWUSI-EAS100R',seqid.instrument_name) <NEW_LINE> self.assertEqual('6',seqid.flowcell_lane) <NEW_LINE> self.assertEqual('73',seqid.tile_no) <NEW_LINE> self.assertEqual('941',seqid.x_coord) <NEW_LINE> self.assertEqual('1973',seqid.y_coord) <NEW_LINE> self.assertEqual('0',seqid.multiplex_index_no) <NEW_LINE> self.assertEqual('1',seqid.pair_id) <NEW_LINE> <DEDENT> def test_unrecognised_id_format(self): <NEW_LINE> <INDENT> seqid_string = "@SEQID" <NEW_LINE> seqid = SequenceIdentifier(seqid_string) <NEW_LINE> self.assertEqual(str(seqid),seqid_string) <NEW_LINE> self.assertEqual(None,seqid.format)
Tests of the SequenceIdentifier class
62599031e76e3b2f99fd9abb
class CompanyCategoriesEditView(CompanyCategoriesAddView): <NEW_LINE> <INDENT> save_button = Button('Save') <NEW_LINE> reset_button = Button('Reset') <NEW_LINE> @property <NEW_LINE> def is_displayed(self): <NEW_LINE> <INDENT> return ( self.company_categories.is_active() and self.name.is_displayed and self.save_button.is_displayed )
Edit Company Categories View
625990319b70327d1c57fe33
class M2Html(object): <NEW_LINE> <INDENT> def __init__(self, args=[]): <NEW_LINE> <INDENT> self.logger = logging.getLogger(type(self).__name__) <NEW_LINE> self.parser = argparse.ArgumentParser() <NEW_LINE> def getonoffargs(isOn=False): <NEW_LINE> <INDENT> return dict(choices=['on', 'off'], default=isOn, action=OnOffAction) <NEW_LINE> <DEDENT> add_arg = self.parser.add_argument <NEW_LINE> add_arg('--mFiles', default=os.path.curdir) <NEW_LINE> add_arg('--htmlDir', default='doc') <NEW_LINE> add_arg('--recursive', **getonoffargs()) <NEW_LINE> add_arg('--source', **getonoffargs(True)) <NEW_LINE> add_arg('--download', **getonoffargs()) <NEW_LINE> add_arg('--syntaxHighlighting', **getonoffargs()) <NEW_LINE> add_arg('--tabs') <NEW_LINE> add_arg('--globalHypertextLinks', **getonoffargs()) <NEW_LINE> add_arg('--graph', **getonoffargs()) <NEW_LINE> add_arg('--todo', **getonoffargs()) <NEW_LINE> add_arg('--load') <NEW_LINE> add_arg('--save') <NEW_LINE> add_arg('--search', **getonoffargs()) <NEW_LINE> add_arg('--helptocxml', **getonoffargs()) <NEW_LINE> add_arg('--indexFile', default='index') <NEW_LINE> add_arg('--extension', default='html') <NEW_LINE> add_arg('--template', default='default') <NEW_LINE> add_arg('--rootdir', default=os.path.abspath(os.path.curdir)) <NEW_LINE> add_arg('--ignoreDir', default=['.svn', 'cvs', '.git']) <NEW_LINE> add_arg('--language', choices=['english'], default='english') <NEW_LINE> add_arg('--debug', action='store_true') <NEW_LINE> self.opts = self.parser.parse_args(args) <NEW_LINE> if self.opts.debug: <NEW_LINE> <INDENT> self.logger.setLevel(logging.DEBUG) <NEW_LINE> <DEDENT> self.logger.debug("Parsed options:% s", self.opts) <NEW_LINE> <DEDENT> def get_mfiles(self, topdir): <NEW_LINE> <INDENT> filepaths = [] <NEW_LINE> dirpaths = [] <NEW_LINE> for dirname, dirnames, filenames in os.walk(topdir): <NEW_LINE> <INDENT> new_dirpaths = [os.path.join(dirname, subdirname) for subdirname in dirnames] <NEW_LINE> dirpaths += new_dirpaths <NEW_LINE> new_filepaths = [os.path.join(dirname, filename) for filename in filenames if os.path.splitext(filename)[1] == '.m'] <NEW_LINE> filepaths += new_filepaths <NEW_LINE> ignored_dirs = [idir for idir in self.opts.ignoreDir if idir in dirnames] <NEW_LINE> for dirname in ignored_dirs: <NEW_LINE> <INDENT> dirnames.remove(dirname) <NEW_LINE> <DEDENT> if not self.opts.recursive: <NEW_LINE> <INDENT> break
Port for m2html
62599031d6c5a102081e31d5
class Celebrity(models.Model): <NEW_LINE> <INDENT> CELEBRITY_CHOICES = (('A','Actors'), ('M','Musicians'), ('T','TV'), ('R','Radio'), ('S','Sports'), ('P','Politicians')) <NEW_LINE> name = models.CharField(max_length=200) <NEW_LINE> slug = models.SlugField(unique=True) <NEW_LINE> specificity = models.CharField(max_length=1, choices=CELEBRITY_CHOICES) <NEW_LINE> description = models.TextField(blank=True) <NEW_LINE> image1 = models.ImageField(upload_to="images/celebritypics") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Celebrity Model
6259903115baa72349463048
class TipViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Tip.objects.all() <NEW_LINE> serializer_class = TipSerializer <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> http_method_names = ('options', 'head', 'get', 'post') <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = super().get_queryset() <NEW_LINE> return self.get_serializer_class().setup_prefetch_related(queryset) <NEW_LINE> <DEDENT> def list(self, request, *args, **kwargs): <NEW_LINE> <INDENT> serializer = self.get_serializer(self.get_queryset().order_by('-latest_revision_created_at').filter(live=True), many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> def retrieve(self, request, pk=None, *args, **kwargs): <NEW_LINE> <INDENT> serializer = self.get_serializer(self.get_object()) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> @list_route(methods=['get']) <NEW_LINE> def favourites(self, request, *args, **kwargs): <NEW_LINE> <INDENT> tips = self.get_queryset().filter(favourites__user_id=request.user.id, favourites__state=TipFavourite.TFST_ACTIVE, live=True).order_by('-first_published_at') <NEW_LINE> serializer = self.get_serializer(tips, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> @detail_route(methods=['post']) <NEW_LINE> def favourite(self, request, pk=None, *args, **kwargs): <NEW_LINE> <INDENT> tip = self.get_object() <NEW_LINE> fav, created = TipFavourite.objects.get_or_create( user=request.user, tip=tip ) <NEW_LINE> if not created: <NEW_LINE> <INDENT> fav.favourite() <NEW_LINE> fav.save() <NEW_LINE> <DEDENT> return Response(status=status.HTTP_204_NO_CONTENT) <NEW_LINE> <DEDENT> @detail_route(methods=['post']) <NEW_LINE> def unfavourite(self, request, pk=None, *args, **kwargs): <NEW_LINE> <INDENT> tip = self.get_object() <NEW_LINE> try: <NEW_LINE> <INDENT> fav = TipFavourite.objects.get(tip_id=tip.id, user_id=request.user.id) <NEW_LINE> fav.unfavourite() <NEW_LINE> fav.save() <NEW_LINE> <DEDENT> except TipFavourite.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return Response(status=status.HTTP_204_NO_CONTENT)
Follow the article url to get the CMS page.
6259903150485f2cf55dc02d
class Tags: <NEW_LINE> <INDENT> __slots__ = ('tags', 'tagstr') <NEW_LINE> def __init__(self, *, tags=None, tagstr=None): <NEW_LINE> <INDENT> self.tags = tags <NEW_LINE> self.tagstr = tagstr <NEW_LINE> if not self.tags: <NEW_LINE> <INDENT> self = self.parse(self.tagstr) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def parse(cls, raw): <NEW_LINE> <INDENT> if not raw: <NEW_LINE> <INDENT> _logger.debug("No tags on this message") <NEW_LINE> return <NEW_LINE> <DEDENT> tags = dict() <NEW_LINE> for tag in raw.split(';'): <NEW_LINE> <INDENT> key, _, value = tag.partition('=') <NEW_LINE> if value == '': <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> tags[key] = value <NEW_LINE> <DEDENT> return cls(tags=tags, tagstr=raw) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Tags(tags={})".format(repr(self.tags)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> for key, value in self.tags.items(): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> value = '' <NEW_LINE> <DEDENT> ret.append('{}={}'.format(key, value)) <NEW_LINE> <DEDENT> return ';'.join(ret)
Stores message tags. Message tags are a new feature proposed by IRCv3 to add enhanced out-of-band data to messages. Not presently tested a whole lot due to the lack of conforming servers.
6259903171ff763f4b5e8847
class File(ParamType): <NEW_LINE> <INDENT> name = 'filename' <NEW_LINE> envvar_list_splitter = os.path.pathsep <NEW_LINE> def __init__(self, mode='r', encoding=None, errors='strict', lazy=None): <NEW_LINE> <INDENT> self.mode = mode <NEW_LINE> self.encoding = encoding <NEW_LINE> self.errors = errors <NEW_LINE> self.lazy = lazy <NEW_LINE> <DEDENT> def resolve_lazy_flag(self, value): <NEW_LINE> <INDENT> if self.lazy is not None: <NEW_LINE> <INDENT> return self.lazy <NEW_LINE> <DEDENT> if value == '-': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif 'w' in self.mode: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def convert(self, value, param, ctx): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if hasattr(value, 'read') or hasattr(value, 'write'): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> lazy = self.resolve_lazy_flag(value) <NEW_LINE> if lazy: <NEW_LINE> <INDENT> f = LazyFile(value, self.mode, self.encoding, self.errors) <NEW_LINE> if ctx is not None: <NEW_LINE> <INDENT> ctx.call_on_close(f.close_intelligently) <NEW_LINE> <DEDENT> return f <NEW_LINE> <DEDENT> f, should_close = open_stream(value, self.mode, self.encoding, self.errors) <NEW_LINE> if ctx is not None: <NEW_LINE> <INDENT> if should_close: <NEW_LINE> <INDENT> ctx.call_on_close(safecall(f.close)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ctx.call_on_close(safecall(f.flush)) <NEW_LINE> <DEDENT> <DEDENT> return f <NEW_LINE> <DEDENT> except (IOError, OSError) as e: <NEW_LINE> <INDENT> self.fail('Could not open file: %s: %s' % ( filename_to_ui(value), get_streerror(e), ), param, ctx)
Declares a parameter to be a file for reading or writing. The file is automatically closed once the context tears down (after the command finished working). Files can be opened for reading or writing. The special value ``-`` indicates stdin or stdout depending on the mode. By default the file is opened for reading text data but it can also be opened in binary mode or for writing. The encoding parameter can be used to force a specific encoding. The `lazy` flag controls if the file should be opened immediately or upon first IO. The default is to be non lazy for standard input and output streams as well as files opened for reading, lazy otherwise. See :ref:`file-args` for more information.
62599031287bf620b6272c97
class SystemReady(unittest.TestCase): <NEW_LINE> <INDENT> def test_docker_installed(self): <NEW_LINE> <INDENT> self.assertTrue(application_is_installed('docker')) <NEW_LINE> <DEDENT> def test_java_installed(self): <NEW_LINE> <INDENT> self.assertTrue(application_is_installed('java')) <NEW_LINE> <DEDENT> def test_spark_installed(self): <NEW_LINE> <INDENT> self.assertTrue(application_is_installed('spark-submit'))
Check if everything is installed for lofn to run.
625990318a43f66fc4bf3237
class MockingDeviceDependenciesTestCase1(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> dev1 = DiskDevice("name", fmt=get_format("mdmember"), size=Size("1 GiB")) <NEW_LINE> dev2 = DiskDevice("other") <NEW_LINE> self.part = PartitionDevice("part", fmt=get_format("mdmember"), parents=[dev2]) <NEW_LINE> self.dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1, self.part], fmt=get_format("luks"), total_devices=2, member_devices=2) <NEW_LINE> self.luks = LUKSDevice("luks", parents=[self.dev], fmt=get_format("ext4")) <NEW_LINE> self.mdraid_method = availability.BLOCKDEV_MDRAID_PLUGIN._method <NEW_LINE> self.dm_method = availability.BLOCKDEV_DM_PLUGIN._method <NEW_LINE> self.hfsplus_method = availability.MKFS_HFSPLUS_APP._method <NEW_LINE> self.cache_availability = availability.CACHE_AVAILABILITY <NEW_LINE> self.addCleanup(self._clean_up) <NEW_LINE> <DEDENT> def test_availability_mdraidplugin(self): <NEW_LINE> <INDENT> availability.CACHE_AVAILABILITY = False <NEW_LINE> availability.BLOCKDEV_DM_PLUGIN._method = availability.AvailableMethod <NEW_LINE> self.assertIn(availability.BLOCKDEV_MDRAID_PLUGIN, self.luks.external_dependencies) <NEW_LINE> availability.BLOCKDEV_MDRAID_PLUGIN._method = availability.AvailableMethod <NEW_LINE> availability.MKFS_HFSPLUS_APP._method = availability.AvailableMethod <NEW_LINE> self.assertNotIn(availability.BLOCKDEV_MDRAID_PLUGIN, self.luks.unavailable_dependencies) <NEW_LINE> self.assertIsNotNone(ActionCreateDevice(self.luks)) <NEW_LINE> self.assertIsNotNone(ActionDestroyDevice(self.luks)) <NEW_LINE> self.assertIsNotNone(ActionCreateFormat(self.luks, fmt=get_format("macefi"))) <NEW_LINE> self.assertIsNotNone(ActionDestroyFormat(self.luks)) <NEW_LINE> availability.BLOCKDEV_MDRAID_PLUGIN._method = availability.UnavailableMethod <NEW_LINE> self.assertIn(availability.BLOCKDEV_MDRAID_PLUGIN, self.luks.unavailable_dependencies) <NEW_LINE> with self.assertRaises(DependencyError): <NEW_LINE> <INDENT> ActionCreateDevice(self.luks) <NEW_LINE> <DEDENT> with self.assertRaises(DependencyError): <NEW_LINE> <INDENT> ActionDestroyDevice(self.dev) <NEW_LINE> <DEDENT> <DEDENT> def _clean_up(self): <NEW_LINE> <INDENT> availability.BLOCKDEV_MDRAID_PLUGIN._method = self.mdraid_method <NEW_LINE> availability.BLOCKDEV_DM_PLUGIN._method = self.dm_method <NEW_LINE> availability.MKFS_HFSPLUS_APP._method = self.hfsplus_method <NEW_LINE> availability.CACHE_AVAILABILITY = False <NEW_LINE> availability.BLOCKDEV_MDRAID_PLUGIN.available <NEW_LINE> availability.BLOCKDEV_DM_PLUGIN.available <NEW_LINE> availability.MKFS_HFSPLUS_APP.available <NEW_LINE> availability.CACHE_AVAILABILITY = self.cache_availability
Test availability of external device dependencies.
62599031e76e3b2f99fd9abd
class ParticleObj(gameObjects.GameObject): <NEW_LINE> <INDENT> PARAMS = ["parent_id", "rect", "duration"] <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.parent_id = None <NEW_LINE> self.duration = None <NEW_LINE> self.dead = False <NEW_LINE> self.die_on_impact = False <NEW_LINE> self.t0 = 0.0 <NEW_LINE> self.update_rate = 0.1 <NEW_LINE> self._started = False <NEW_LINE> self._done = False <NEW_LINE> kwargs["moveable"] = False <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._started = True <NEW_LINE> thread = Thread(target = self.thread) <NEW_LINE> thread.start() <NEW_LINE> tup = make_gen_msg(self) <NEW_LINE> MESSAGES.put(tup) <NEW_LINE> <DEDENT> def set_done(self): <NEW_LINE> <INDENT> self._done = True <NEW_LINE> <DEDENT> def thread(self): <NEW_LINE> <INDENT> t0 = get_game_time() <NEW_LINE> tt = t0 <NEW_LINE> while not self._done: <NEW_LINE> <INDENT> dt = get_game_time() - tt <NEW_LINE> time.sleep(self.update_rate) <NEW_LINE> tt = get_game_time() <NEW_LINE> if (tt - t0) > self.duration: <NEW_LINE> <INDENT> self.set_done() <NEW_LINE> <DEDENT> <DEDENT> self.die() <NEW_LINE> <DEDENT> def die(self): <NEW_LINE> <INDENT> if not self.dead: <NEW_LINE> <INDENT> self.dead = True <NEW_LINE> tup = make_del_msg(self) <NEW_LINE> MESSAGES.put(tup)
generic particle object
625990313eb6a72ae038b717
class DP6(IterDataPipe[int]): <NEW_LINE> <INDENT> def __iter__(self) -> Iterator: <NEW_LINE> <INDENT> raise NotImplementedError
DataPipe with plain Iterator
625990314e696a045264e67a
class Project(object): <NEW_LINE> <INDENT> def __init__(self, name, id=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.id = id <NEW_LINE> if self.id == None: <NEW_LINE> <INDENT> self.id = re.sub(r'_+', '_', re.sub(r'[^A-z0-9]', '_', self.name)) <NEW_LINE> <DEDENT> self.active = True <NEW_LINE> self.disabled_plugins = [] <NEW_LINE> self.locations = {} <NEW_LINE> self.start = None <NEW_LINE> self.end = None <NEW_LINE> <DEDENT> def isActive(self, timestamp=None): <NEW_LINE> <INDENT> if self.active == False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif self.active == True: <NEW_LINE> <INDENT> if timestamp == None: <NEW_LINE> <INDENT> timestamp = int(time.time()) <NEW_LINE> <DEDENT> if self.start != None and self.end == None: <NEW_LINE> <INDENT> return timestamp >= self.start <NEW_LINE> <DEDENT> elif self.start == None and self.end != None: <NEW_LINE> <INDENT> return timestamp < self.end <NEW_LINE> <DEDENT> elif self.start != None and self.end != None: <NEW_LINE> <INDENT> if self.end <= self.start: <NEW_LINE> <INDENT> raise ValueError("Start should predate end.") <NEW_LINE> <DEDENT> return self.start <= timestamp < self.end <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def addLocation(self, location): <NEW_LINE> <INDENT> self.locations[location.id] = location <NEW_LINE> location.projects.add(self)
Class that represents a project.
62599031711fe17d825e14f3
class KeyDoesNotExistError(Exception): <NEW_LINE> <INDENT> pass
Simple custom error
6259903191af0d3eaad3aede
class TestReleaseResources(TransactionTestCase): <NEW_LINE> <INDENT> fixtures = ['resource_ut.json'] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> _, token_object = request(client=self.client, path="tests/get_token", method="get") <NEW_LINE> self.token = token_object.token <NEW_LINE> self.requester = partial(request, client=self.client, path="resources/release_resources") <NEW_LINE> <DEDENT> def test_resource_doesnt_exist(self): <NEW_LINE> <INDENT> response, _ = self.requester(json_data={ "resources": ["invalid_resource_name"], "token": self.token }) <NEW_LINE> self.assertEqual(response.status_code, http_client.BAD_REQUEST) <NEW_LINE> <DEDENT> def test_release_complex_resource_with_sub_resource_available(self): <NEW_LINE> <INDENT> resources = DemoComplexResourceData.objects.filter( name='complex_resource1') <NEW_LINE> resource, = resources <NEW_LINE> resource.owner = "localhost" <NEW_LINE> resource.save() <NEW_LINE> SESSIONS[self.token].resources = [resource] <NEW_LINE> response, content = self.requester(json_data={ "resources": ["complex_resource1"], "token": self.token }) <NEW_LINE> self.assertEqual(response.status_code, http_client.BAD_REQUEST) <NEW_LINE> self.assertEqual(len(content.errors["complex_resource1"]), 2) <NEW_LINE> resources = DemoComplexResourceData.objects.filter( name='complex_resource1') <NEW_LINE> resource, = resources <NEW_LINE> sub_resource = resource.demo1 <NEW_LINE> sub_resource2 = resource.demo2 <NEW_LINE> self.assertEqual(resource.owner, "") <NEW_LINE> self.assertEqual(sub_resource.owner, "") <NEW_LINE> self.assertEqual(sub_resource2.owner, "") <NEW_LINE> <DEDENT> def test_try_to_release_not_owned_resource(self): <NEW_LINE> <INDENT> resources = DemoResourceData.objects.filter( name='available_resource1') <NEW_LINE> resource, = resources <NEW_LINE> resource.owner = "unknown_user" <NEW_LINE> resource.save() <NEW_LINE> SESSIONS[self.token].resources = [resource] <NEW_LINE> response, _ = self.requester(json_data={ "resources": ["available_resource1"], "token": self.token }) <NEW_LINE> resources = DemoResourceData.objects.filter( name='available_resource1') <NEW_LINE> resource, = resources <NEW_LINE> self.assertEqual(response.status_code, http_client.BAD_REQUEST) <NEW_LINE> self.assertEqual(resource.owner, "unknown_user") <NEW_LINE> <DEDENT> def test_valid_release(self): <NEW_LINE> <INDENT> resources = DemoResourceData.objects.filter( name='available_resource1') <NEW_LINE> resource, = resources <NEW_LINE> resource.owner = "localhost" <NEW_LINE> resource.save() <NEW_LINE> SESSIONS[self.token].resources = [resource] <NEW_LINE> response, _ = self.requester(json_data={ "resources": ["available_resource1"], "token": self.token }) <NEW_LINE> resources = DemoResourceData.objects.filter( name='available_resource1') <NEW_LINE> resource, = resources <NEW_LINE> self.assertEqual(response.status_code, http_client.NO_CONTENT) <NEW_LINE> self.assertEqual(resource.owner, "")
Assert operations of release resources request.
6259903196565a6dacd2d7e7
class CharityCollectionForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = CharityCollection <NEW_LINE> fields = ['purpose', 'items_needed', 'address', 'deadline'] <NEW_LINE> widgets = { 'deadline': forms.DateInput( attrs={'placeholder': 'Termin końcowy'}), 'purpose': forms.Textarea(attrs={'placeholder': 'Cel zbiórki'}), 'items_needed': forms.Textarea( attrs={'placeholder': 'Potrzebne rzeczy'}), 'address': forms.Textarea(attrs={'placeholder': 'Adres'}), }
New charity collection form.
6259903130c21e258be998be
class StockFdmtKpiItem(scrapy.Item): <NEW_LINE> <INDENT> date = scrapy.Field() <NEW_LINE> code = scrapy.Field() <NEW_LINE> current_assets = scrapy.Field() <NEW_LINE> total_fixed_assets = scrapy.Field() <NEW_LINE> total_assets = scrapy.Field() <NEW_LINE> current_liabilities = scrapy.Field() <NEW_LINE> non_current_liabilities = scrapy.Field() <NEW_LINE> total_liabilities = scrapy.Field() <NEW_LINE> equity_parent_company = scrapy.Field() <NEW_LINE> total_equity = scrapy.Field() <NEW_LINE> revenue = scrapy.Field() <NEW_LINE> operating_revenue = scrapy.Field() <NEW_LINE> total_expense = scrapy.Field() <NEW_LINE> operating_expense = scrapy.Field() <NEW_LINE> profit_before_tax = scrapy.Field() <NEW_LINE> net_profit = scrapy.Field() <NEW_LINE> net_profit_parent_company = scrapy.Field() <NEW_LINE> total_income = scrapy.Field() <NEW_LINE> total_income_parent_company = scrapy.Field() <NEW_LINE> cash_flow_from_operating_activities = scrapy.Field() <NEW_LINE> cash_flow_from_investing_activities = scrapy.Field() <NEW_LINE> cash_flow_from_financing_activities = scrapy.Field()
The fundamental KPIs of a stock. They change on quarterly.
6259903130c21e258be998bf
class WordAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> Model = None <NEW_LINE> list_display = [ 'id', 'active', stats_field('num_funny_votes', 'NFV'), stats_field('percent_funny_votes', '%FV'), 'text' ] <NEW_LINE> list_editable = ['text'] <NEW_LINE> list_filter = ('active', 'tags') <NEW_LINE> list_per_page = 1000 <NEW_LINE> save_on_top = True <NEW_LINE> formfield_overrides = wider_tag_edit_fields <NEW_LINE> actions = ['mark_active', 'mark_inactive', 'create_using', 'create_from_active', 'refresh_stats', 'claim_to_have_created_these'] <NEW_LINE> def mark_active(self, request, queryset): <NEW_LINE> <INDENT> rows_updated = queryset.update(active=True) <NEW_LINE> if rows_updated == 1: <NEW_LINE> <INDENT> message_bit = "1 word was" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message_bit = "%s words were" % rows_updated <NEW_LINE> <DEDENT> self.message_user(request, "%s successfully marked as active." % message_bit) <NEW_LINE> <DEDENT> def claim_to_have_created_these(self, request, queryset): <NEW_LINE> <INDENT> rows_updated = queryset.update(created_by=request.user) <NEW_LINE> self.message_user(request, "Successfully claimed creation of %d words." % rows_updated) <NEW_LINE> <DEDENT> def mark_inactive(self, request, queryset): <NEW_LINE> <INDENT> rows_updated = queryset.update(active=False) <NEW_LINE> if rows_updated == 1: <NEW_LINE> <INDENT> message_bit = "1 word was" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message_bit = "%s words were" % rows_updated <NEW_LINE> <DEDENT> self.message_user(request, "%s successfully marked as inactive." % message_bit) <NEW_LINE> <DEDENT> def create_using(self, request, queryset): <NEW_LINE> <INDENT> request.session['validate_using'] = [q.id for q in queryset] <NEW_LINE> if self.model == Prompt: <NEW_LINE> <INDENT> return redirect('response_new') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect('prompt_new') <NEW_LINE> <DEDENT> <DEDENT> def create_from_active(self, request, queryset): <NEW_LINE> <INDENT> request.session['validate_using'] = [q.id for q in self.Model.objects.filter(active=True)] <NEW_LINE> if self.Model == Prompt: <NEW_LINE> <INDENT> return redirect('prompt_new') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect('response_new') <NEW_LINE> <DEDENT> <DEDENT> def refresh_stats(self, request, queryset): <NEW_LINE> <INDENT> if len(queryset) == 0: <NEW_LINE> <INDENT> queryset = self.Model.objects.all() <NEW_LINE> <DEDENT> for m in queryset: <NEW_LINE> <INDENT> m.refresh_stats()
Abstract base word admin class.
6259903123e79379d538d5bd
class AudioReader(object): <NEW_LINE> <INDENT> def __init__(self, audio_dir, sample_rate, batch_size, num_mini_batches, mini_batch_size, window_size): <NEW_LINE> <INDENT> self.audio_dir = audio_dir <NEW_LINE> self.sample_rate = sample_rate <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.num_mini_batches = num_mini_batches <NEW_LINE> self.mini_batch_size = mini_batch_size <NEW_LINE> self.window_size = window_size <NEW_LINE> files = find_files(audio_dir) <NEW_LINE> if not files: <NEW_LINE> <INDENT> raise ValueError("No audio files found in '{}'.".format(audio_dir)) <NEW_LINE> <DEDENT> <DEDENT> def get_batches(self): <NEW_LINE> <INDENT> batch = [] <NEW_LINE> for audio in self.load_generic_audio(): <NEW_LINE> <INDENT> batch.append(audio) <NEW_LINE> if len(batch) == self.batch_size: <NEW_LINE> <INDENT> yield batch <NEW_LINE> batch = [] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load_generic_audio(self): <NEW_LINE> <INDENT> files = find_files(self.audio_dir) <NEW_LINE> print("files length: {}".format(len(files))) <NEW_LINE> randomized_files = randomize_files(files) <NEW_LINE> for filename in randomized_files: <NEW_LINE> <INDENT> size = (self.num_mini_batches * self.mini_batch_size + 1 + (self.window_size - 1)) <NEW_LINE> start = random.randint(0, 46000000 - size) <NEW_LINE> audio, _ = sf.read(filename, start=start, stop = start + size) <NEW_LINE> yield audio
Randomly load files from a directory and load samples into a batch. Note that all the files are assumed to be at least 48 minutes and 16000 sample rate.
625990318c3a8732951f760b
class StartSignalledError(Exception): <NEW_LINE> <INDENT> pass
User's start method threw an exception
62599031c432627299fa40a9
class ModifierCell(BaseRNNCell): <NEW_LINE> <INDENT> def __init__(self, base_cell): <NEW_LINE> <INDENT> super(ModifierCell, self).__init__() <NEW_LINE> base_cell._modified = True <NEW_LINE> self.base_cell = base_cell <NEW_LINE> <DEDENT> @property <NEW_LINE> def params(self): <NEW_LINE> <INDENT> self._own_params = False <NEW_LINE> return self.base_cell.params <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_shape(self): <NEW_LINE> <INDENT> return self.base_cell.state_shape <NEW_LINE> <DEDENT> def begin_state(self, init_sym=symbol.zeros, **kwargs): <NEW_LINE> <INDENT> assert not self._modified, "After applying modifier cells (e.g. DropoutCell) the base " "cell cannot be called directly. Call the modifier cell instead." <NEW_LINE> self.base_cell._modified = False <NEW_LINE> begin = self.base_cell.begin_state(init_sym, **kwargs) <NEW_LINE> self.base_cell._modified = True <NEW_LINE> return begin <NEW_LINE> <DEDENT> def unpack_weights(self, args): <NEW_LINE> <INDENT> return self.base_cell.unpack_weights(args) <NEW_LINE> <DEDENT> def pack_weights(self, args): <NEW_LINE> <INDENT> return self.base_cell.pack_weights(args) <NEW_LINE> <DEDENT> def __call__(self, inputs, states): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for modifier cells. A modifier cell takes a base cell, apply modifications on it (e.g. Dropout), and returns a new cell. After applying modifiers the base cell should no longer be called directly. The modifer cell should be used instead.
6259903150485f2cf55dc031
class LoginView(MethodView): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = json.loads(request.data.decode()) <NEW_LINE> user = AdminUser.query.filter_by(email=data['email']).first() <NEW_LINE> if user and user.password_is_valid(data['password']): <NEW_LINE> <INDENT> access_token = user.generate_token(user.id) <NEW_LINE> if access_token: <NEW_LINE> <INDENT> response = { 'message': 'You logged in successfully.', 'token': access_token.decode() } <NEW_LINE> return make_response(jsonify(response)), 200 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> response = { 'message': 'Invalid email or password, Please try again' } <NEW_LINE> return make_response(jsonify(response)), 401 <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> response = { 'message': str(e) } <NEW_LINE> return make_response(jsonify(response)), 500
This class-based view handles user login and access token generation.
625990311f5feb6acb163ca4
class CardDataIterator(): <NEW_LINE> <INDENT> def __init__(self, cards): <NEW_LINE> <INDENT> self.cards = cards <NEW_LINE> self.size = len(self.cards) <NEW_LINE> self.epochs = 0 <NEW_LINE> self.shuffle() <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> self.df = self.df.sample(frac=1).reset_index(drop=True) <NEW_LINE> self.cursor = 0 <NEW_LINE> <DEDENT> def next_batch(self, n): <NEW_LINE> <INDENT> if self.cursor+n-1 > self.size: <NEW_LINE> <INDENT> self.epochs += 1 <NEW_LINE> self.shuffle() <NEW_LINE> <DEDENT> res = self.df.ix[self.cursor:self.cursor+n-1] <NEW_LINE> self.cursor += n
This is the iterator we use to assemble cards into batches for training.
625990315e10d32532ce415d
class APIInfo(NamedTuple): <NEW_LINE> <INDENT> method: Callable <NEW_LINE> parameters: Set[str]
Parameter information about an API.
6259903196565a6dacd2d7e8
class EarlyStopping(Callback): <NEW_LINE> <INDENT> def __init__(self, monitor='acc', batch_period=2000, patience=2, min_delta=0.0002): <NEW_LINE> <INDENT> self.monitor = monitor <NEW_LINE> self.batch_period = batch_period <NEW_LINE> self.batch = 0 <NEW_LINE> self.patience = patience <NEW_LINE> self.wait = 0 <NEW_LINE> self.min_delta = min_delta <NEW_LINE> self._reset() <NEW_LINE> <DEDENT> def _reset(self): <NEW_LINE> <INDENT> if 'acc' not in self.monitor: <NEW_LINE> <INDENT> self.monitor_op = lambda a, b: np.less(a, b - self.min_delta) <NEW_LINE> self.best = np.Inf <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta) <NEW_LINE> self.best = -np.Inf <NEW_LINE> <DEDENT> self.wait = 0 <NEW_LINE> <DEDENT> def on_batch_end(self, batch, logs={}): <NEW_LINE> <INDENT> if batch-self.batch < self.batch_period: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.batch = batch <NEW_LINE> logs = logs or {} <NEW_LINE> current = logs.get(self.monitor) <NEW_LINE> if self.monitor_op(current, self.best): <NEW_LINE> <INDENT> self.best = current <NEW_LINE> self.wait = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.wait += 1 <NEW_LINE> if self.wait >= self.patience: <NEW_LINE> <INDENT> self.model.stop_training = True <NEW_LINE> print(f'\nEarly stopping: {self.monitor} did not improve. ' f'Before: {self.best:.4f}, afer: {current:.4f}, ' f'diff = {current-self.best:.4f}.\n')
Monitor val_acc, save model if it increases, early stop if it stops improving. Works on batch end, executed periodically after a certain number of training steps. Args: val_sequence (Sequence object): validation sequence. batch_period (int): monitor metric every number of batches. patience (int): number of periods that produced the monitored quantity with no improvement after which training will be stopped. min_delta (float): threshold for measuring the new optimum.
6259903173bcbd0ca4bcb346
@attr.dataclass <NEW_LINE> class AuthExternalConfig: <NEW_LINE> <INDENT> tls_client_cert: str = attr.ib(validator=attr.validators.instance_of(str), default="./none")
External authentication configuration
6259903126238365f5fadc06
class RTmixclust(RPackage): <NEW_LINE> <INDENT> homepage = "https://bioconductor.org/packages/TMixClust/" <NEW_LINE> git = "https://git.bioconductor.org/packages/TMixClust.git" <NEW_LINE> version('1.0.1', commit='0ac800210e3eb9da911767a80fb5582ab33c0cad') <NEW_LINE> depends_on('r-gss', type=('build', 'run')) <NEW_LINE> depends_on('r-mvtnorm', type=('build', 'run')) <NEW_LINE> depends_on('r-zoo', type=('build', 'run')) <NEW_LINE> depends_on('r-cluster', type=('build', 'run')) <NEW_LINE> depends_on('r-biocparallel', type=('build', 'run')) <NEW_LINE> depends_on('r-flexclust', type=('build', 'run')) <NEW_LINE> depends_on('r-biobase', type=('build', 'run')) <NEW_LINE> depends_on('r-spem', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:3.4.9', when='@1.0.1')
Implementation of a clustering method for time series gene expression data based on mixed-effects models with Gaussian variables and non-parametric cubic splines estimation. The method can robustly account for the high levels of noise present in typical gene expression time series datasets.
6259903130c21e258be998c0
class A(Segment): <NEW_LINE> <INDENT> def __init__(self, dist: int): <NEW_LINE> <INDENT> super().__init__(dist)
This class itself is an instance of the metaclass Road. This class also has instances, each of which is a Segment. Similarly for the B and C classes.
6259903130c21e258be998c1
class FalseDateMatcher(object): <NEW_LINE> <INDENT> name = 'false_date' <NEW_LINE> regex_pat = re.compile(r"^([4-9][\d]|3[2-9]|(([0-9]{1,3},)*[0-9]{3}([.][0-9])?))$") <NEW_LINE> def __init__(self, nlp, pattern_list, match_id='FALSE_DATE', label='FALSE_DATE', regex_pat=regex_pat): <NEW_LINE> <INDENT> self.label = nlp.vocab.strings[label] <NEW_LINE> self.orig_label = nlp.vocab.strings['DATE'] <NEW_LINE> Token.set_extension('is_false_date', default=False, force=True) <NEW_LINE> self.matcher = Matcher(nlp.vocab) <NEW_LINE> self.matcher.add(match_id, None, pattern_list) <NEW_LINE> self.regex_pat = regex_pat <NEW_LINE> self.nlp=nlp <NEW_LINE> Doc.set_extension('has_false_date', getter=self.has_false_date, force=True) <NEW_LINE> Span.set_extension('has_false_date', getter=self.has_false_date, force=True) <NEW_LINE> <DEDENT> def __call__(self, doc): <NEW_LINE> <INDENT> matches = self.matcher(doc) <NEW_LINE> candidate_spans = [] <NEW_LINE> spans = [] <NEW_LINE> for match_id, start, end in matches: <NEW_LINE> <INDENT> candidate_spans.append(doc[start:end]) <NEW_LINE> <DEDENT> for span in candidate_spans: <NEW_LINE> <INDENT> for token in span: <NEW_LINE> <INDENT> if re.match(self.regex_pat, token.text): <NEW_LINE> <INDENT> entity = Span(doc, token.i, token.i + 1, label=self.label) <NEW_LINE> spans.append(entity) <NEW_LINE> token._.is_false_date = True <NEW_LINE> orig_span = [e for e in doc.ents if token in e][0] <NEW_LINE> new_ents = [] <NEW_LINE> if token.i > orig_span.start: <NEW_LINE> <INDENT> left_span = doc[orig_span.start: token.i] <NEW_LINE> left_ents = list(self.nlp(left_span.text).ents) <NEW_LINE> if left_ents: <NEW_LINE> <INDENT> new_ents.append(Span(doc, left_span.start, left_span.end, label=self.orig_label)) <NEW_LINE> <DEDENT> <DEDENT> new_ents.append(entity) <NEW_LINE> if token.i < orig_span.end: <NEW_LINE> <INDENT> right_span = doc[token.i + 1: orig_span.end + 1] <NEW_LINE> right_ents = list(self.nlp(right_span.text).ents) <NEW_LINE> if right_ents: <NEW_LINE> <INDENT> new_ents.append(Span(doc, right_span.start, right_span.end, label=self.orig_label)) <NEW_LINE> <DEDENT> <DEDENT> doc.ents = list(doc.ents) + new_ents <NEW_LINE> <DEDENT> <DEDENT> for span in spans: <NEW_LINE> <INDENT> span.merge() <NEW_LINE> <DEDENT> <DEDENT> return doc <NEW_LINE> <DEDENT> def has_false_date(self, tokens): <NEW_LINE> <INDENT> return any([t._.get('is_false_date') for t in tokens])
A spaCy pipeline component to flag arabic numbers if they include commas or are greater than 31. Its main use is to mitigate spaCy NER false positives.
62599031796e427e5384f831
class PlainEntry(_BaseEntry): <NEW_LINE> <INDENT> implements(IKnownHostEntry) <NEW_LINE> def __init__(self, hostnames, keyType, publicKey, comment): <NEW_LINE> <INDENT> self._hostnames = hostnames <NEW_LINE> super(PlainEntry, self).__init__(keyType, publicKey, comment) <NEW_LINE> <DEDENT> def fromString(cls, string): <NEW_LINE> <INDENT> hostnames, keyType, key, comment = _extractCommon(string) <NEW_LINE> self = cls(hostnames.split(","), keyType, key, comment) <NEW_LINE> return self <NEW_LINE> <DEDENT> fromString = classmethod(fromString) <NEW_LINE> def matchesHost(self, hostname): <NEW_LINE> <INDENT> return hostname in self._hostnames <NEW_LINE> <DEDENT> def toString(self): <NEW_LINE> <INDENT> fields = [','.join(self._hostnames), self.keyType, _b64encode(self.publicKey.blob())] <NEW_LINE> if self.comment is not None: <NEW_LINE> <INDENT> fields.append(self.comment) <NEW_LINE> <DEDENT> return ' '.join(fields)
A L{PlainEntry} is a representation of a plain-text entry in a known_hosts file. @ivar _hostnames: the list of all host-names associated with this entry. @type _hostnames: L{list} of L{str}
6259903191af0d3eaad3aee2
class MembershipAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ["id", "user", "group", "created", "active", ] <NEW_LINE> list_filter = ["group", ] <NEW_LINE> date_hierarchy = "created" <NEW_LINE> readonly_fields = ["created", ] <NEW_LINE> list_editable = ["active", ] <NEW_LINE> fieldsets = ( [None, {"fields": ["user", "group", ], }, ], [_("Other"), {"fields": ["created", "active", ], }, ], ) <NEW_LINE> def activate(self, request, queryset): <NEW_LINE> <INDENT> queryset.update(active=True) <NEW_LINE> <DEDENT> activate.short_description = _("Activate selected memberships") <NEW_LINE> def deactivate(self, request, queryset): <NEW_LINE> <INDENT> queryset.update(active=False) <NEW_LINE> <DEDENT> deactivate.short_description = _("Deactivate selected memberships")
Customize Membership model for admin area.
6259903150485f2cf55dc033
class ArticleUpDown(models.Model): <NEW_LINE> <INDENT> nid = models.AutoField(primary_key=True) <NEW_LINE> user = models.ForeignKey('UserInfo', null=True) <NEW_LINE> article = models.ForeignKey("Article", null=True) <NEW_LINE> UporDown=models.BooleanField(verbose_name='是否赞',default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.username
文章点赞表
62599031d164cc6175822028
class LieAlgebraWithGenerators(LieAlgebra): <NEW_LINE> <INDENT> def __init__(self, R, names=None, index_set=None, category=None, prefix='L', **kwds): <NEW_LINE> <INDENT> self._indices = index_set <NEW_LINE> LieAlgebra.__init__(self, R, names, category) <NEW_LINE> <DEDENT> @cached_method <NEW_LINE> def lie_algebra_generators(self): <NEW_LINE> <INDENT> return Family(self._indices, self.monomial, name="monomial map") <NEW_LINE> <DEDENT> @cached_method <NEW_LINE> def gens(self): <NEW_LINE> <INDENT> G = self.lie_algebra_generators() <NEW_LINE> try: <NEW_LINE> <INDENT> return tuple(G[i] for i in self.variable_names()) <NEW_LINE> <DEDENT> except (KeyError, IndexError): <NEW_LINE> <INDENT> return tuple(G[i] for i in self.indices()) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return tuple(G) <NEW_LINE> <DEDENT> <DEDENT> def gen(self, i): <NEW_LINE> <INDENT> return self.gens()[i] <NEW_LINE> <DEDENT> def indices(self): <NEW_LINE> <INDENT> return self._indices
A Lie algebra with distinguished generators.
625990318e05c05ec3f6f6b6
class SkillBonus(models.Model): <NEW_LINE> <INDENT> skill = models.ForeignKey(Skill) <NEW_LINE> bonusType = models.CharField(max_length=254) <NEW_LINE> bonusValue = models.FloatField()
bonus values of skills (per skilllevel)
625990315e10d32532ce415e
class MedicationKnowledgeMonitoringProgram(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_type = "MedicationKnowledgeMonitoringProgram" <NEW_LINE> def __init__(self, jsondict=None, strict=True, **kwargs): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.type = None <NEW_LINE> super(MedicationKnowledgeMonitoringProgram, self).__init__(jsondict=jsondict, strict=strict, **kwargs) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(MedicationKnowledgeMonitoringProgram, self).elementProperties() <NEW_LINE> js.extend([ ("name", "name", str, False, None, False), ("type", "type", codeableconcept.CodeableConcept, False, None, False), ]) <NEW_LINE> return js
Program under which a medication is reviewed. The program under which the medication is reviewed.
625990318a349b6b436872f3
class InfoListView(ListView): <NEW_LINE> <INDENT> model = BasicInfo <NEW_LINE> @method_decorator(csrf_exempt) <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super().dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get(self, request): <NEW_LINE> <INDENT> queryset = self.get_queryset() <NEW_LINE> data = [] <NEW_LINE> for info in queryset: <NEW_LINE> <INDENT> data.append({ 'name': info.name, 'last_name': info.last_name, 'telephone': info.tel, 'email': info.email, 'address': info.email, }) <NEW_LINE> <DEDENT> return JsonResponse({'data': data}, status=201) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> return HttpResponse( 'Method not allowed, request via GET instead', status=405 )
View for testing GET method
62599031287bf620b6272c9e
class RoleForm(FlaskForm): <NEW_LINE> <INDENT> name = StringField('Name', validators=[DataRequired()]) <NEW_LINE> description = StringField('Description', validators=[DataRequired()]) <NEW_LINE> submit = SubmitField('Submit')
Form to add or edit role
6259903130c21e258be998c3
class LossComputeBase(nn.Module): <NEW_LINE> <INDENT> def __init__(self,model,tgt_dict): <NEW_LINE> <INDENT> super(LossComputeBase,self).__init__() <NEW_LINE> self.model = model <NEW_LINE> self.tgt_dict = tgt_dict <NEW_LINE> self.padding_idx = Constant.PAD <NEW_LINE> <DEDENT> def _make_shard_state(self, batch, output, range_, attns=None): <NEW_LINE> <INDENT> return NotImplementedError <NEW_LINE> <DEDENT> def _compute_loss(self, batch, output, target, **kwargs): <NEW_LINE> <INDENT> return NotImplementedError <NEW_LINE> <DEDENT> def _compute_corr(self, output, origin): <NEW_LINE> <INDENT> return NotImplementedError <NEW_LINE> <DEDENT> def monolithic_compute_loss(self,batch,output,attns,target_size): <NEW_LINE> <INDENT> range_ = (0,batch['target'].shape[0]) <NEW_LINE> shard_state = self._make_shard_state(batch['target'],output,range_,attns) <NEW_LINE> loss = self._compute_loss(**shard_state,shard_size=batch['target'].shape[0],batch=batch).div(float(target_size)) <NEW_LINE> num_non_padding,num_correct = self._compute_corr(output,batch['origin']) <NEW_LINE> batch_stats = statistics.Statistics(loss.item(),num_non_padding,num_correct) <NEW_LINE> return batch_stats <NEW_LINE> <DEDENT> def sharded_compute_loss(self,batch,output,attns, cur_trunc,trunc_size,shard_size,normalization): <NEW_LINE> <INDENT> range_ = (cur_trunc,cur_trunc + trunc_size) <NEW_LINE> shard_state = self._make_shard_state(batch['target'],output,range_,attns) <NEW_LINE> loss_total = 0 <NEW_LINE> for i,shard in enumerate(shards(shard_state,shard_size)): <NEW_LINE> <INDENT> loss = self._compute_loss(**shard,shard_size=shard_size,batch=batch,now=i) <NEW_LINE> loss.div(float(normalization)).backward() <NEW_LINE> loss_total += loss.item() <NEW_LINE> <DEDENT> num_non_padding,corr = self._compute_corr(output,batch['origin']) <NEW_LINE> batch_stats = statistics.Statistics(loss_total,num_non_padding,corr) <NEW_LINE> return batch_stats <NEW_LINE> <DEDENT> def _compute_corr(self, output, target): <NEW_LINE> <INDENT> pred = output.max(-1)[1] <NEW_LINE> non_padding = target.ne(self.padding_idx) <NEW_LINE> num_correct = 0 <NEW_LINE> for i in range(output.shape[1]): <NEW_LINE> <INDENT> origin = set( np.array(target[i].detach().cpu())) <NEW_LINE> ans = set( np.array( pred[:,i].detach().cpu())) <NEW_LINE> num_correct += len(origin & ans) <NEW_LINE> <DEDENT> num_non_padding = non_padding.sum().item() <NEW_LINE> return num_non_padding,num_correct <NEW_LINE> <DEDENT> def _bottle(self,_v): <NEW_LINE> <INDENT> return _v.view(-1,_v.shape[-1]) <NEW_LINE> <DEDENT> def _unbottle(self,_v,batch_size): <NEW_LINE> <INDENT> return _v.view(-1,batch_size,_v.shape[1])
Class for managing efficient loss computation. Handles sharding next step predictions and accumulating mutiple loss computations Users can implement their own loss computation strategy by making subclass of this one. Users need to implement the _compute_loss() and make_shard_state() methods. Args: generator (:obj:`nn.Module`) : module that maps the output of the decoder to a distribution over the target vocabulary. tgt_vocab (:obj:`Vocab`) : torchtext vocab object representing the target output normalzation (str): normalize by "sents" or "tokens"
625990311d351010ab8f4bce
class R_2_instances_name_replace_disks(baserlib.OpcodeResource): <NEW_LINE> <INDENT> POST_OPCODE = opcodes.OpInstanceReplaceDisks <NEW_LINE> def GetPostOpInput(self): <NEW_LINE> <INDENT> static = { "instance_name": self.items[0], } <NEW_LINE> if self.request_body: <NEW_LINE> <INDENT> data = self.request_body <NEW_LINE> <DEDENT> elif self.queryargs: <NEW_LINE> <INDENT> data = { "remote_node": self._checkStringVariable("remote_node", default=None), "mode": self._checkStringVariable("mode", default=None), "disks": self._checkStringVariable("disks", default=None), "iallocator": self._checkStringVariable("iallocator", default=None), } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = {} <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> raw_disks = data.pop("disks") <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if raw_disks: <NEW_LINE> <INDENT> if ht.TListOf(ht.TInt)(raw_disks): <NEW_LINE> <INDENT> data["disks"] = raw_disks <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data["disks"] = [int(part) for part in raw_disks.split(",")] <NEW_LINE> <DEDENT> except (TypeError, ValueError) as err: <NEW_LINE> <INDENT> raise http.HttpBadRequest("Invalid disk index passed: %s" % err) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return (data, static)
/2/instances/[instance_name]/replace-disks resource.
62599031d4950a0f3b111699
class City(PlaceRecord): <NEW_LINE> <INDENT> _valid_attributes = set(['confidence', 'geoname_id', 'names'])
Contains data for the city record associated with an IP address This class contains the city-level data associated with an IP address. This record is returned by ``city`` and ``insights``. Attributes: .. attribute:: confidence A value from 0-100 indicating MaxMind's confidence that the city is correct. This attribute is only available from the Insights end point. :type: int .. attribute:: geoname_id The GeoName ID for the city. :type: int .. attribute:: name The name of the city based on the locales list passed to the constructor. :type: unicode .. attribute:: names A dictionary where the keys are locale codes and the values are names. :type: dict
625990319b70327d1c57fe3b
class Operation(object): <NEW_LINE> <INDENT> def __init__(self, uri, operation, http_client): <NEW_LINE> <INDENT> self.uri = uri <NEW_LINE> self.json = operation <NEW_LINE> self.http_client = http_client <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%s)" % (self.__class__.__name__, self.json['nickname']) <NEW_LINE> <DEDENT> def __call__(self, **kwargs): <NEW_LINE> <INDENT> log.info("%s?%r" % (self.json['nickname'], urllib.urlencode(kwargs))) <NEW_LINE> method = self.json['httpMethod'] <NEW_LINE> uri = self.uri <NEW_LINE> params = {} <NEW_LINE> for param in self.json.get('parameters', []): <NEW_LINE> <INDENT> pname = param['name'] <NEW_LINE> value = kwargs.get(pname) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> value = ",".join(value) <NEW_LINE> <DEDENT> if value is not None: <NEW_LINE> <INDENT> if param['paramType'] == 'path': <NEW_LINE> <INDENT> uri = uri.replace('{%s}' % pname, urllib.quote_plus(str(value))) <NEW_LINE> <DEDENT> elif param['paramType'] == 'query': <NEW_LINE> <INDENT> params[pname] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AssertionError( "Unsupported paramType %s" % param.paramType) <NEW_LINE> <DEDENT> del kwargs[pname] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if param['required']: <NEW_LINE> <INDENT> raise TypeError( "Missing required parameter '%s' for '%s'" % (pname, self.json['nickname'])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if kwargs: <NEW_LINE> <INDENT> raise TypeError("'%s' does not have parameters %r" % (self.json['nickname'], kwargs.keys())) <NEW_LINE> <DEDENT> log.info("%s %s(%r)", method, uri, params) <NEW_LINE> if self.json['is_websocket']: <NEW_LINE> <INDENT> uri = re.sub('^http', "ws", uri) <NEW_LINE> return self.http_client.ws_connect(uri, params=params) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.http_client.request( method, uri, params=params)
Operation object.
625990313eb6a72ae038b71c
class Dni(Model): <NEW_LINE> <INDENT> def __init__(self, dni: int=None, nombre: str=None, apellidos: str=None, asignatura: str=None, plazos: int=None): <NEW_LINE> <INDENT> self.swagger_types = { 'dni': int, 'nombre': str, 'apellidos': str, 'asignatura': str, 'plazos': int } <NEW_LINE> self.attribute_map = { 'dni': 'dni', 'nombre': 'Nombre', 'apellidos': 'Apellidos', 'asignatura': 'Asignatura', 'plazos': 'Plazos' } <NEW_LINE> self._dni = dni <NEW_LINE> self._nombre = nombre <NEW_LINE> self._apellidos = apellidos <NEW_LINE> self._asignatura = asignatura <NEW_LINE> self._plazos = plazos <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'Dni': <NEW_LINE> <INDENT> return deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dni(self) -> int: <NEW_LINE> <INDENT> return self._dni <NEW_LINE> <DEDENT> @dni.setter <NEW_LINE> def dni(self, dni: int): <NEW_LINE> <INDENT> if dni is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `dni`, must not be `None`") <NEW_LINE> <DEDENT> self._dni = dni <NEW_LINE> <DEDENT> @property <NEW_LINE> def nombre(self) -> str: <NEW_LINE> <INDENT> return self._nombre <NEW_LINE> <DEDENT> @nombre.setter <NEW_LINE> def nombre(self, nombre: str): <NEW_LINE> <INDENT> self._nombre = nombre <NEW_LINE> <DEDENT> @property <NEW_LINE> def apellidos(self) -> str: <NEW_LINE> <INDENT> return self._apellidos <NEW_LINE> <DEDENT> @apellidos.setter <NEW_LINE> def apellidos(self, apellidos: str): <NEW_LINE> <INDENT> self._apellidos = apellidos <NEW_LINE> <DEDENT> @property <NEW_LINE> def asignatura(self) -> str: <NEW_LINE> <INDENT> return self._asignatura <NEW_LINE> <DEDENT> @asignatura.setter <NEW_LINE> def asignatura(self, asignatura: str): <NEW_LINE> <INDENT> self._asignatura = asignatura <NEW_LINE> <DEDENT> @property <NEW_LINE> def plazos(self) -> int: <NEW_LINE> <INDENT> return self._plazos <NEW_LINE> <DEDENT> @plazos.setter <NEW_LINE> def plazos(self, plazos: int): <NEW_LINE> <INDENT> self._plazos = plazos
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599031e76e3b2f99fd9ac3
class TestDocxTemplateApplicationRequest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDocxTemplateApplicationRequest(self): <NEW_LINE> <INDENT> pass
DocxTemplateApplicationRequest unit test stubs
62599031ac7a0e7691f7359f
class QuickInsertActions(actioncollection.ShortcutCollection): <NEW_LINE> <INDENT> name = "quickinsert" <NEW_LINE> def __init__(self, panel): <NEW_LINE> <INDENT> super(QuickInsertActions, self).__init__(panel.mainwindow()) <NEW_LINE> self.panel = weakref.ref(panel) <NEW_LINE> <DEDENT> def createDefaultShortcuts(self): <NEW_LINE> <INDENT> self.setDefaultShortcuts('staccato', [QKeySequence('Ctrl+.')]) <NEW_LINE> self.setDefaultShortcuts('spanner_slur', [QKeySequence('Ctrl+(')]) <NEW_LINE> self.setDefaultShortcuts('breathe_rcomma', [QKeySequence("Alt+'")]) <NEW_LINE> <DEDENT> def realAction(self, name): <NEW_LINE> <INDENT> return self.panel().widget().actionForName(name) <NEW_LINE> <DEDENT> def title(self): <NEW_LINE> <INDENT> return _("Quick Insert")
Manages keyboard shortcuts for the QuickInsert module.
62599031c432627299fa40ad
class WebSocketProtocol: <NEW_LINE> <INDENT> def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, server: "WebSocketServer"): <NEW_LINE> <INDENT> self.reader = reader <NEW_LINE> self.writer = writer <NEW_LINE> self.server = server <NEW_LINE> self.status = Status.OPEN <NEW_LINE> <DEDENT> async def recv(self): <NEW_LINE> <INDENT> message = WebSocketMessage() <NEW_LINE> await message.recv(self.reader) <NEW_LINE> return message <NEW_LINE> <DEDENT> async def send(self, data, text: bool=True): <NEW_LINE> <INDENT> if self.status == Status.OPEN: <NEW_LINE> <INDENT> message = WebSocketMessage() <NEW_LINE> if text: <NEW_LINE> <INDENT> message.data = data.encode() <NEW_LINE> message.opcode = OpCode.TEXT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message.data = bytes(data) <NEW_LINE> message.opcode = OpCode.BINARY <NEW_LINE> <DEDENT> await message.send(self.writer) <NEW_LINE> <DEDENT> <DEDENT> async def close(self, status_code: int=None, reason: str=None): <NEW_LINE> <INDENT> if self.status == Status.OPEN: <NEW_LINE> <INDENT> self.status = Status.CLOSING <NEW_LINE> message = WebSocketMessage() <NEW_LINE> message.opcode = OpCode.CLOSE <NEW_LINE> message.build_close_data(status_code, reason) <NEW_LINE> await message.send(self.writer) <NEW_LINE> await asyncio.wait_for(self.recv(), 10) <NEW_LINE> self.shutdown() <NEW_LINE> <DEDENT> <DEDENT> async def on_close(self): <NEW_LINE> <INDENT> if self.status == Status.OPEN: <NEW_LINE> <INDENT> self.status = Status.CLOSING <NEW_LINE> message = WebSocketMessage() <NEW_LINE> message.opcode = OpCode.CLOSE <NEW_LINE> await message.send(self.writer) <NEW_LINE> self.shutdown() <NEW_LINE> <DEDENT> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> if self.status == Status.CLOSING: <NEW_LINE> <INDENT> self.writer.close() <NEW_LINE> self.server.sockets.remove(self) <NEW_LINE> self.status = Status.CLOSED
The protocol that manages the WebSocket. Defaults to operating as a server.
625990318c3a8732951f7610
class MouseEventFlag: <NEW_LINE> <INDENT> Move = 0x0001 <NEW_LINE> LeftDown = 0x0002 <NEW_LINE> LeftUp = 0x0004 <NEW_LINE> RightDown = 0x0008 <NEW_LINE> RightUp = 0x0010 <NEW_LINE> MiddleDown = 0x0020 <NEW_LINE> MiddleUp = 0x0040 <NEW_LINE> XDown = 0x0080 <NEW_LINE> XUp = 0x0100 <NEW_LINE> Wheel = 0x0800 <NEW_LINE> HWheel = 0x1000 <NEW_LINE> MoveNoCoalesce = 0x2000 <NEW_LINE> VirtualDesk = 0x4000 <NEW_LINE> Absolute = 0x8000
MouseEventFlag from Win32.
6259903130c21e258be998c5
class GameObject: <NEW_LINE> <INDENT> pass
d
62599031e76e3b2f99fd9ac5
class VisitViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Visit.objects.all() <NEW_LINE> serializer_class = srz.VisitSerializer <NEW_LINE> http_method_names = ['get', 'put', 'patch', 'delete'] <NEW_LINE> permission_classes = (IsOwnerOrReadOnly,)
Endpoints for a visit ('visits')
625990314e696a045264e67e
class Post(models.Model): <NEW_LINE> <INDENT> author = models.ForeignKey('auth.User') <NEW_LINE> title = models.CharField(max_length=200) <NEW_LINE> text = models.TextField(); <NEW_LINE> created_at = models.DateTimeField(default=timezone.now) <NEW_LINE> published_at = models.DateTimeField(blank=True, null=True) <NEW_LINE> def publish(self): <NEW_LINE> <INDENT> self.published_at = timezone.now() <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
Post Model
62599031b57a9660fecd2b3d
class ModifyAddressesBandwidthRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.EcmRegion = None <NEW_LINE> self.AddressIds = None <NEW_LINE> self.InternetMaxBandwidthOut = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.EcmRegion = params.get("EcmRegion") <NEW_LINE> self.AddressIds = params.get("AddressIds") <NEW_LINE> self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut")
ModifyAddressesBandwidth请求参数结构体
6259903121bff66bcd723d1e
class AutoMLDeleteDatasetOperator(BaseOperator): <NEW_LINE> <INDENT> template_fields = ("dataset_id", "location", "project_id") <NEW_LINE> @apply_defaults <NEW_LINE> def __init__( self, dataset_id: Union[str, List[str]], location: str, project_id: Optional[str] = None, metadata: Optional[MetaData] = None, timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", *args, **kwargs ) -> None: <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.dataset_id = dataset_id <NEW_LINE> self.location = location <NEW_LINE> self.project_id = project_id <NEW_LINE> self.metadata = metadata <NEW_LINE> self.timeout = timeout <NEW_LINE> self.retry = retry <NEW_LINE> self.gcp_conn_id = gcp_conn_id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_dataset_id(dataset_id: Union[str, List[str]]) -> List[str]: <NEW_LINE> <INDENT> if not isinstance(dataset_id, str): <NEW_LINE> <INDENT> return dataset_id <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return ast.literal_eval(dataset_id) <NEW_LINE> <DEDENT> except (SyntaxError, ValueError): <NEW_LINE> <INDENT> return dataset_id.split(",") <NEW_LINE> <DEDENT> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id) <NEW_LINE> dataset_id_list = self._parse_dataset_id(self.dataset_id) <NEW_LINE> for dataset_id in dataset_id_list: <NEW_LINE> <INDENT> self.log.info("Deleting dataset %s", dataset_id) <NEW_LINE> hook.delete_dataset( dataset_id=dataset_id, location=self.location, project_id=self.project_id, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) <NEW_LINE> self.log.info("Dataset deleted.")
Deletes a dataset and all of its contents. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:AutoMLDeleteDatasetOperator` :param dataset_id: Name of the dataset_id, list of dataset_id or string of dataset_id coma separated to be deleted. :type dataset_id: Union[str, List[str]] :param project_id: ID of the Google Cloud project where dataset is located if None then default project_id is used. :type project_id: str :param location: The location of the project. :type location: str :param retry: A retry object used to retry requests. If `None` is specified, requests will not be retried. :type retry: Optional[google.api_core.retry.Retry] :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if `retry` is specified, the timeout applies to each individual attempt. :type timeout: Optional[float] :param metadata: Additional metadata that is provided to the method. :type metadata: Optional[Sequence[Tuple[str, str]]] :param gcp_conn_id: The connection ID to use to connect to Google Cloud Platform. :type gcp_conn_id: str
6259903156b00c62f0fb397b
class Encoder(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _encode(self, message): <NEW_LINE> <INDENT> description = self._create_information(message) <NEW_LINE> message = '' <NEW_LINE> message += self.build_header(description) <NEW_LINE> message += self.bulid_fields(message.fields) <NEW_LINE> message += self.build_trailer(description) <NEW_LINE> return message <NEW_LINE> <DEDENT> def _create_information(self, message): <NEW_LINE> <INDENT> result = {} <NEW_LINE> result['size'] = message._message_size() <NEW_LINE> result['name'] = message._meta.encoded_name <NEW_LINE> result['field_count'] = len(message._meta.fields) <NEW_LINE> return result <NEW_LINE> <DEDENT> def build_header(self, message): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def build_body(self, fields): <NEW_LINE> <INDENT> raise NotImplementedException() <NEW_LINE> <DEDENT> def build_footer(self, message): <NEW_LINE> <INDENT> return ''
Base class for a serializer object
6259903150485f2cf55dc037
class InvalidRequestError(Error): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.code = 'INVALID_REQUEST' <NEW_LINE> self.msg = msg
Not all of the required parameters are present.
62599031be8e80087fbc0137
class ModifyDomainLockResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.LockInfo = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("LockInfo") is not None: <NEW_LINE> <INDENT> self.LockInfo = LockInfo() <NEW_LINE> self.LockInfo._deserialize(params.get("LockInfo")) <NEW_LINE> <DEDENT> self.RequestId = params.get("RequestId")
ModifyDomainLock返回参数结构体
6259903173bcbd0ca4bcb34c
class PDFTextbookTabs(TextbookTabsBase): <NEW_LINE> <INDENT> type = 'pdf_textbooks' <NEW_LINE> icon = 'icon-book' <NEW_LINE> def __init__(self, tab_dict=None): <NEW_LINE> <INDENT> super(PDFTextbookTabs, self).__init__( tab_id=self.type, ) <NEW_LINE> <DEDENT> def items(self, course): <NEW_LINE> <INDENT> for index, textbook in enumerate(course.pdf_textbooks): <NEW_LINE> <INDENT> yield SingleTextbookTab( name=textbook['tab_title'], tab_id='pdftextbook/{0}'.format(index), link_func=lambda course, reverse_func: reverse_func('pdf_book', args=[course.id.to_deprecated_string(), index]), )
A tab representing the collection of all PDF textbook tabs.
62599031a8ecb033258722d7
class F3(FeatureExtractor): <NEW_LINE> <INDENT> def run(self, series): <NEW_LINE> <INDENT> return round(np.mean(np.abs(series)), 2)
对于有限长度信号序列时域幅值,先求绝对值,再求均值
625990311d351010ab8f4bd3
class PatternSet(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.patterns = [] <NEW_LINE> self._all_files = False <NEW_LINE> <DEDENT> def _compute_all_files(self): <NEW_LINE> <INDENT> self._all_files = any(pat.all_files() for pat in self.patterns) <NEW_LINE> <DEDENT> def all_files(self): <NEW_LINE> <INDENT> if self._all_files is None: <NEW_LINE> <INDENT> self._compute_all_files() <NEW_LINE> <DEDENT> return self._all_files <NEW_LINE> <DEDENT> def append(self, pattern): <NEW_LINE> <INDENT> assert isinstance(pattern, Pattern) <NEW_LINE> self.patterns.append(pattern) <NEW_LINE> if self._all_files is not None: <NEW_LINE> <INDENT> self._all_files = self._all_files or pattern.all_files() <NEW_LINE> <DEDENT> <DEDENT> def extend(self, patterns): <NEW_LINE> <INDENT> assert patterns is not None <NEW_LINE> if isinstance(patterns, Pattern): <NEW_LINE> <INDENT> self.append(patterns) <NEW_LINE> return <NEW_LINE> <DEDENT> if isinstance(patterns, PatternSet): <NEW_LINE> <INDENT> patterns = patterns.patterns <NEW_LINE> <DEDENT> assert all(isinstance(pat, Pattern) for pat in patterns) <NEW_LINE> self.patterns.extend(patterns) <NEW_LINE> self._all_files = None <NEW_LINE> <DEDENT> def remove(self, pattern): <NEW_LINE> <INDENT> assert isinstance(pattern, Pattern) <NEW_LINE> self.patterns.remove(pattern) <NEW_LINE> self._all_files = None <NEW_LINE> <DEDENT> def match_files(self, matched, unmatched): <NEW_LINE> <INDENT> for pattern in self.iter(): <NEW_LINE> <INDENT> pattern.match_files(matched, unmatched) <NEW_LINE> if not unmatched: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def empty(self): <NEW_LINE> <INDENT> return len(self.patterns) == 0 <NEW_LINE> <DEDENT> def iter(self): <NEW_LINE> <INDENT> if self.patterns: <NEW_LINE> <INDENT> patterns = list(self.patterns) <NEW_LINE> for pattern in patterns: <NEW_LINE> <INDENT> yield pattern <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ("PatternSet (All files? {0}) [{1}] ". format(self.all_files(), ", ".join(str(pat) for pat in self.patterns))) <NEW_LINE> <DEDENT> def match_file(self, elements): <NEW_LINE> <INDENT> for pattern in self.iter(): <NEW_LINE> <INDENT> if pattern.match_file(elements): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
A set of :class:`Pattern` instances; :class:`PatternSet` provides a number of operations over the entire set. :class:`PatternSet` contains a number of implementation optimizations and is an integral part of various optimizations in :class:`FileSet`. This class is *not* an implementation of Apache Ant PatternSet
6259903123e79379d538d5c5
class PackageReferenceBase(Model): <NEW_LINE> <INDENT> _validation = { 'type': {'required': True}, 'id': {'required': True}, } <NEW_LINE> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } <NEW_LINE> _subtype_map = { 'type': {'aptPackage': 'AptPackageReference', 'chocolateyPackage': 'ChocolateyPackageReference', 'yumPackage': 'YumPackageReference'} } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(PackageReferenceBase, self).__init__(**kwargs) <NEW_LINE> self.type = None <NEW_LINE> self.id = kwargs.get('id') <NEW_LINE> self.version = kwargs.get('version', None)
A reference to a package to be installed on the compute nodes using a package manager. :param str id: The name of the package. :param str version: The version of the package to be installed. If omitted, the latest version (according to the package repository) will be installed.
6259903196565a6dacd2d7eb
class NudgeButton(GLBackground): <NEW_LINE> <INDENT> is_gl_container = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> GLBackground.__init__(self) <NEW_LINE> nudgeLabel = Label("Nudge", margin=8) <NEW_LINE> self.add(nudgeLabel) <NEW_LINE> self.shrink_wrap() <NEW_LINE> keys = [config.config.get("Keys", k).upper() for k in ("Forward", "Back", "Left", "Right", "Up", "Down")] <NEW_LINE> nudgeLabel.tooltipText = _("Click and hold. While holding, use the movement keys ({0}{1}{2}{3}{4}{5}) to nudge. Hold SHIFT to nudge faster.").format( *keys) <NEW_LINE> <DEDENT> def mouse_down(self, event): <NEW_LINE> <INDENT> self.focus() <NEW_LINE> <DEDENT> def mouse_up(self, event): <NEW_LINE> <INDENT> self.get_root().mcedit.editor.focus_switch = None <NEW_LINE> <DEDENT> def key_down(self, evt): <NEW_LINE> <INDENT> keyname = key.name(evt.key) <NEW_LINE> if keyname == config.config.get("Keys", "Up"): <NEW_LINE> <INDENT> self.nudge(Vector(0, 1, 0)) <NEW_LINE> <DEDENT> if keyname == config.config.get("Keys", "Down"): <NEW_LINE> <INDENT> self.nudge(Vector(0, -1, 0)) <NEW_LINE> <DEDENT> Z = self.get_root().mcedit.editor.mainViewport.cameraVector <NEW_LINE> absZ = map(abs, Z) <NEW_LINE> if absZ[0] < absZ[2]: <NEW_LINE> <INDENT> forward = (0, 0, (-1 if Z[2] < 0 else 1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> forward = ((-1 if Z[0] < 0 else 1), 0, 0) <NEW_LINE> <DEDENT> back = map(int.__neg__, forward) <NEW_LINE> left = forward[2], forward[1], -forward[0] <NEW_LINE> right = map(int.__neg__, left) <NEW_LINE> if keyname == config.config.get("Keys", "Forward"): <NEW_LINE> <INDENT> self.nudge(Vector(*forward)) <NEW_LINE> <DEDENT> if keyname == config.config.get("Keys", "Back"): <NEW_LINE> <INDENT> self.nudge(Vector(*back)) <NEW_LINE> <DEDENT> if keyname == config.config.get("Keys", "Left"): <NEW_LINE> <INDENT> self.nudge(Vector(*left)) <NEW_LINE> <DEDENT> if keyname == config.config.get("Keys", "Right"): <NEW_LINE> <INDENT> self.nudge(Vector(*right))
A button that captures movement keys while pressed and sends them to a listener as nudge events. Poorly planned.
625990315166f23b2e244492
class LookupInterface(resource.TrackableResource): <NEW_LINE> <INDENT> def __init__(self, key_dtype, value_dtype): <NEW_LINE> <INDENT> self._key_dtype = dtypes.as_dtype(key_dtype) <NEW_LINE> self._value_dtype = dtypes.as_dtype(value_dtype) <NEW_LINE> super(LookupInterface, self).__init__() <NEW_LINE> <DEDENT> def _create_resource(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def key_dtype(self): <NEW_LINE> <INDENT> return self._key_dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_dtype(self): <NEW_LINE> <INDENT> return self._value_dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return NotImplementedError <NEW_LINE> <DEDENT> def size(self, name=None): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def lookup(self, keys, name=None): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __getitem__(self, keys): <NEW_LINE> <INDENT> return self.lookup(keys)
Represent a lookup table that persists across different steps.
62599031d99f1b3c44d0675f
class BoolSerialiser(Serialiser[bool]): <NEW_LINE> <INDENT> def __init__(self, true_byte: bytes = b'\x01', false_byte: bytes = b'\x00'): <NEW_LINE> <INDENT> if len(true_byte) != 1 or len(false_byte) != 1: <NEW_LINE> <INDENT> raise ValueError(f"true_byte and false_byte must be single-byte values") <NEW_LINE> <DEDENT> if true_byte == false_byte: <NEW_LINE> <INDENT> raise ValueError(f"true_byte and false_byte are indistinguishable") <NEW_LINE> <DEDENT> self._true_byte = true_byte <NEW_LINE> self._false_byte = false_byte <NEW_LINE> <DEDENT> def _check(self, obj: bool): <NEW_LINE> <INDENT> if not isinstance(obj, bool): <NEW_LINE> <INDENT> raise TypeError(f"{BoolSerialiser.__name__} serialises bools, got {type(obj)}") <NEW_LINE> <DEDENT> <DEDENT> def _serialise(self, obj: bool, stream: IO[bytes]): <NEW_LINE> <INDENT> stream.write(self._true_byte if obj else self._false_byte) <NEW_LINE> <DEDENT> def _deserialise(self, stream: IO[bytes]) -> bool: <NEW_LINE> <INDENT> byte = stream.read(1) <NEW_LINE> if byte == self._true_byte: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif byte == self._false_byte: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f"Received invalid byte '{byte}'")
Serialiser which serialises a boolean value to a single byte, with 0x00 representing False and 0x01 representing True.
62599031d164cc617582202d
class Account(object): <NEW_LINE> <INDENT> address = None <NEW_LINE> aliases = [] <NEW_LINE> realname = None <NEW_LINE> gpg_key = None <NEW_LINE> signature = None <NEW_LINE> signature_filename = None <NEW_LINE> signature_as_attachment = None <NEW_LINE> abook = None <NEW_LINE> def __init__(self, address=None, aliases=None, realname=None, gpg_key=None, signature=None, signature_filename=None, signature_as_attachment=False, sent_box=None, sent_tags=['sent'], draft_box=None, draft_tags=['draft'], abook=None, sign_by_default=False, **rest): <NEW_LINE> <INDENT> self.address = address <NEW_LINE> self.aliases = aliases <NEW_LINE> self.realname = realname <NEW_LINE> self.gpg_key = gpg_key <NEW_LINE> self.signature = signature <NEW_LINE> self.signature_filename = signature_filename <NEW_LINE> self.signature_as_attachment = signature_as_attachment <NEW_LINE> self.sign_by_default = sign_by_default <NEW_LINE> self.sent_box = sent_box <NEW_LINE> self.sent_tags = sent_tags <NEW_LINE> self.draft_box = draft_box <NEW_LINE> self.draft_tags = draft_tags <NEW_LINE> self.abook = abook <NEW_LINE> <DEDENT> def get_addresses(self): <NEW_LINE> <INDENT> return [self.address] + self.aliases <NEW_LINE> <DEDENT> def store_mail(self, mbx, mail): <NEW_LINE> <INDENT> if not isinstance(mbx, mailbox.Mailbox): <NEW_LINE> <INDENT> logging.debug('Not a mailbox') <NEW_LINE> return False <NEW_LINE> <DEDENT> mbx.lock() <NEW_LINE> if isinstance(mbx, mailbox.Maildir): <NEW_LINE> <INDENT> logging.debug('Maildir') <NEW_LINE> msg = mailbox.MaildirMessage(mail) <NEW_LINE> msg.set_flags('S') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.debug('no Maildir') <NEW_LINE> msg = mailbox.Message(mail) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> message_id = mbx.add(msg) <NEW_LINE> mbx.flush() <NEW_LINE> mbx.unlock() <NEW_LINE> logging.debug('got mailbox msg id : %s' % message_id) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise StoreMailError(e) <NEW_LINE> <DEDENT> path = None <NEW_LINE> if isinstance(mbx, mailbox.Maildir): <NEW_LINE> <INDENT> plist = glob.glob1(os.path.join(mbx._path, 'new'), message_id + '*') <NEW_LINE> if plist: <NEW_LINE> <INDENT> path = os.path.join(mbx._path, 'new', plist[0]) <NEW_LINE> logging.debug('path of saved msg: %s' % path) <NEW_LINE> <DEDENT> <DEDENT> return path <NEW_LINE> <DEDENT> def store_sent_mail(self, mail): <NEW_LINE> <INDENT> if self.sent_box is not None: <NEW_LINE> <INDENT> return self.store_mail(self.sent_box, mail) <NEW_LINE> <DEDENT> <DEDENT> def store_draft_mail(self, mail): <NEW_LINE> <INDENT> if self.draft_box is not None: <NEW_LINE> <INDENT> return self.store_mail(self.draft_box, mail) <NEW_LINE> <DEDENT> <DEDENT> def send_mail(self, mail): <NEW_LINE> <INDENT> raise NotImplementedError
Datastructure that represents an email account. It manages this account's settings, can send and store mails to maildirs (drafts/send). .. note:: This is an abstract class that leaves :meth:`send_mail` unspecified. See :class:`SendmailAccount` for a subclass that uses a sendmail command to send out mails.
625990318c3a8732951f7614
class Answer(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(Respondent, verbose_name="Пользователь", on_delete=models.CASCADE) <NEW_LINE> quiz = models.ForeignKey(Quiz, verbose_name="Опрос", on_delete=models.CASCADE, related_name='quiz_answer') <NEW_LINE> question = models.ForeignKey(Question, verbose_name="Вопрос", on_delete=models.CASCADE, related_name='question_answer') <NEW_LINE> choices = models.ManyToManyField(Choice, verbose_name="Варианты", related_name='choice_answer', blank=True) <NEW_LINE> answer_text = models.CharField(verbose_name="Ответ", max_length=50, blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Ответ" <NEW_LINE> verbose_name_plural = "Ответы" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.answer_text == '': <NEW_LINE> <INDENT> return ', '.join([choice for choice in self.choices.all().values_list('choice_text', flat=True)]) <NEW_LINE> <DEDENT> return self.answer_text
Класс ответов на вопросы
6259903173bcbd0ca4bcb34e
class RunAutomationServiceEnabled(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Enabled = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Enabled = params.get("Enabled") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
描述了 “云自动化助手” 服务相关的信息
6259903166673b3332c314ae
class ProductsPropertiesRelation(models.Model): <NEW_LINE> <INDENT> product = models.ForeignKey(Product, verbose_name=_(u"Product"), related_name="productsproperties") <NEW_LINE> property = models.ForeignKey(Property, verbose_name=_(u"Property")) <NEW_LINE> position = models.IntegerField( _(u"Position"), default=999) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ("position", ) <NEW_LINE> unique_together = ("product", "property")
Represents the m:n relationship between Products and Properties. This is done via an explicit class to store the position of the property within the product. Attributes: - product The product of the relationship. - property The property of the relationship. - position The position of the property within the product.
62599031d4950a0f3b11169c
class UserListResponse(object): <NEW_LINE> <INDENT> TYPE = 0x04 <NEW_LINE> PSEUDOHEADER_FORMAT_REP = '>BB8sBBBBH' <NEW_LINE> PSEUDOHEADER_SIZE_REP = struct.calcsize(PSEUDOHEADER_FORMAT_REP) <NEW_LINE> def __init__(self, dictdata=None, rawdata=None): <NEW_LINE> <INDENT> if dictdata: <NEW_LINE> <INDENT> self.user_list = dictdata.get('user_list') <NEW_LINE> <DEDENT> elif rawdata: <NEW_LINE> <INDENT> self.user_list = list() <NEW_LINE> for i in range(len(rawdata) / self.PSEUDOHEADER_SIZE_REP): <NEW_LINE> <INDENT> offset = (i * self.PSEUDOHEADER_SIZE_REP) <NEW_LINE> rawclient = struct.unpack(self.PSEUDOHEADER_FORMAT_REP, rawdata[offset:(offset + self.PSEUDOHEADER_SIZE_REP)]) <NEW_LINE> dictclient = dict(client_id=rawclient[0], group_id=rawclient[1], username=rawclient[2].rstrip('\0'), ip_address='{}.{}.{}.{}'.format(rawclient[3],rawclient[4],rawclient[5],rawclient[6]), port=rawclient[7]) <NEW_LINE> self.user_list.append(dictclient) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise(ValueError) <NEW_LINE> <DEDENT> <DEDENT> def size(self): <NEW_LINE> <INDENT> return len(self.user_list) * self.PSEUDOHEADER_SIZE_REP <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> serialization = '' <NEW_LINE> for user in self.user_list: <NEW_LINE> <INDENT> ip_bytes = user.get('ip_address').split('.') <NEW_LINE> serialization = '{}{}'.format(serialization, struct.pack(self.PSEUDOHEADER_FORMAT_REP, user.get('client_id'), user.get('group_id'), user.get('username'), int(ip_bytes[0]), int(ip_bytes[1]), int(ip_bytes[2]), int(ip_bytes[3]), user.get('port'))) <NEW_LINE> <DEDENT> return serialization <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[user_list={}]'.format(self.user_list)
0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type |R|S|A| Source ID | Group ID | Header Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | Client ID | Group ID | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Username | + +-+-+-+-+ | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | IP Address | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Port | +-+-+-+-+-+-+-+-+-+-+-+-+
625990311d351010ab8f4bd5
class Tessellation(object): <NEW_LINE> <INDENT> def __init__(self,*args,**kwarg): <NEW_LINE> <INDENT> raise Exception("An abstract class") <NEW_LINE> <DEDENT> def create_verts_and_H(self,dim_range, valid_outside ): <NEW_LINE> <INDENT> if self.type == 'I': <NEW_LINE> <INDENT> return self.create_verts_and_H_type_I(dim_range,valid_outside) <NEW_LINE> <DEDENT> elif self.type=='II': <NEW_LINE> <INDENT> return self.create_verts_and_H_type_II(dim_range) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError(self.type) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def make_it_hashable(arr): <NEW_LINE> <INDENT> return tuple([tuple(r.tolist()) for r in arr])
An abstract class
625990319b70327d1c57fe41
class EvolutionCandy(Item): <NEW_LINE> <INDENT> def __init__(self, name, description, coin_cost): <NEW_LINE> <INDENT> Item.__init__(self, name, description, coin_cost)
This class contains attributes of a candy to evolve a legendary creature.
6259903163f4b57ef00865d1
class MessageViewSet(SenyViewSet): <NEW_LINE> <INDENT> queryset = Message.objects.all() <NEW_LINE> permission_classes = [SenyAuth, MessagePermissions] <NEW_LINE> serializer_class = MessageSerializer <NEW_LINE> filterable_by = ['new', ['destination', 'username'], ['source', 'username']] <NEW_LINE> filter_backends = (filters.SearchFilter,) <NEW_LINE> search_fields = ('destination__username', 'source__username', 'thread__title', 'content') <NEW_LINE> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action in ['new']: <NEW_LINE> <INDENT> return MessageWithThreadSerializer <NEW_LINE> <DEDENT> if self.action in ['response']: <NEW_LINE> <INDENT> return MessageWithThreadAndResponseSerializer <NEW_LINE> <DEDENT> return self.serializer_class <NEW_LINE> <DEDENT> @list_route(methods=['POST', 'GET'], permission_classes=permission_classes) <NEW_LINE> def new(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return self.create(request, *args, **kwargs) <NEW_LINE> <DEDENT> @list_route(methods=['GET'], permission_classes=permission_classes) <NEW_LINE> def user(self, request, *args, **kwargs): <NEW_LINE> <INDENT> page_size = self.request.QUERY_PARAMS.get('page_size', None) <NEW_LINE> page = self.request.QUERY_PARAMS.get('page', 0) <NEW_LINE> queryset = self.get_queryset(ignore_paging=True).filter(Q(source=request.user) | Q(destination=request.user)) <NEW_LINE> if page_size: <NEW_LINE> <INDENT> queryset = Paginator(queryset, page_size).page(page) <NEW_LINE> <DEDENT> serializer = self.get_serializer(data=queryset, many=True) <NEW_LINE> serializer.is_valid() <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> @list_route(methods=['POST', 'GET']) <NEW_LINE> def response(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return self.create(request, *args, **kwargs) <NEW_LINE> <DEDENT> @detail_route(methods=['GET']) <NEW_LINE> def read(self, request, pk=None): <NEW_LINE> <INDENT> message = self.get_object() <NEW_LINE> message.new = False <NEW_LINE> message.save() <NEW_LINE> return Response(self.get_serializer_class()(message).data)
## Filterable by: ## + new - 1 for true 0 for false + source/destination - username + search - checks if title, content, source, destination contain given query string ## Special Endpoints ## ### new ### /api/version/messages/new Creates a new thread and message at same time. ### User ### /api/version/messages/user Return all message the current user is involved in. ### Read ### get /api/version/messages/user/id/read Marks message as not new and returns the new object.
62599031796e427e5384f839
class MaintenanceModeMiddleware(MiddlewareMixin): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> namespace = htk_setting('HTK_URLS_NAMESPACE') <NEW_LINE> url_name_suffix = htk_setting('HTK_MAINTENANCE_MODE_URL_NAME') <NEW_LINE> if namespace: <NEW_LINE> <INDENT> url_name = '%s:%s' % (namespace, url_name_suffix,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url_name = url_name_suffix <NEW_LINE> <DEDENT> maintenance_mode_page = reverse_lazy(url_name) <NEW_LINE> response = None <NEW_LINE> if request.path == maintenance_mode_page: <NEW_LINE> <INDENT> if not is_maintenance_mode(): <NEW_LINE> <INDENT> response = redirect('/') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if is_maintenance_mode(): <NEW_LINE> <INDENT> response = redirect(maintenance_mode_page) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return response
Checks whether HTK_MAINTENANCE_MODE is set If so, redirects to the HTK_MAINTENANCE_MODE_URL_NAME page
6259903130c21e258be998ca
class FilesystemEventHandler(watchdog.events.FileSystemEventHandler): <NEW_LINE> <INDENT> def on_created(self, event): <NEW_LINE> <INDENT> key = 'filesystem:file_created' <NEW_LINE> data = { 'filepath': event.src_path, 'is_directory': event.is_directory, 'dirpath': os.path.dirname(event.src_path) } <NEW_LINE> bmsg = BroadcastMessage(key=key, data=data) <NEW_LINE> BroadcastManager.broadcast(bmsg) <NEW_LINE> <DEDENT> def on_deleted(self, event): <NEW_LINE> <INDENT> key = 'filesystem:file_deleted' <NEW_LINE> data = { 'filepath': event.src_path, 'is_directory': event.is_directory, 'dirpath': os.path.dirname(event.src_path) } <NEW_LINE> bmsg = BroadcastMessage(key=key, data=data) <NEW_LINE> BroadcastManager.broadcast(bmsg) <NEW_LINE> <DEDENT> def on_moved(self, event): <NEW_LINE> <INDENT> key = 'filesystem:file_moved' <NEW_LINE> data = { 'src_filepath': event.src_path, 'dest_filepath': event.dest_path, 'is_directory': event.is_directory, 'src_dirpath': os.path.dirname(event.src_path), 'dest_dirpath': os.path.dirname(event.dest_path) } <NEW_LINE> bmsg = BroadcastMessage(key=key, data=data) <NEW_LINE> BroadcastManager.broadcast(bmsg) <NEW_LINE> <DEDENT> def on_modified(self, event): <NEW_LINE> <INDENT> key = 'filesystem:file_modified' <NEW_LINE> data = { 'filepath': event.src_path, 'is_directory': event.is_directory, 'dirpath': os.path.dirname(event.src_path) } <NEW_LINE> bmsg = BroadcastMessage(key=key, data=data) <NEW_LINE> BroadcastManager.broadcast(bmsg)
Subclass of `watchdog.events.FilesystemEventHandler` Manages on_created, on_deleted, on_moved events
62599031b830903b9686ecd9
class Meta: <NEW_LINE> <INDENT> managed = False <NEW_LINE> db_table = 'courts'
Django Meta class.
62599031287bf620b6272ca6
class VBackKey(VKey): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> VKey.__init__(self, u'\u21a9') <NEW_LINE> <DEDENT> def update_buffer(self, buffer): <NEW_LINE> <INDENT> return buffer[:-1]
Custom key for back.
6259903126068e7796d4da09
class TotalEnergy: <NEW_LINE> <INDENT> def __init__(self, core_hamiltonian): <NEW_LINE> <INDENT> self.core_hamiltonian = core_hamiltonian <NEW_LINE> <DEDENT> def restricted(self, density_matrix, hamiltonian): <NEW_LINE> <INDENT> length = density_matrix.shape[0] <NEW_LINE> total_energy = 0 <NEW_LINE> for i, j in itertools.product(range(length), repeat=2): <NEW_LINE> <INDENT> total_energy += 1/2 * density_matrix.item(i, j) * (self.core_hamiltonian.item(i, j) + hamiltonian.item(i, j)) <NEW_LINE> <DEDENT> return total_energy <NEW_LINE> <DEDENT> def unrestricted(self, density_matrix_alpha, density_matrix_beta, hamiltonian_alpha, hamiltonian_beta): <NEW_LINE> <INDENT> density_matrix_total = density_matrix_alpha + density_matrix_beta <NEW_LINE> length = density_matrix_total.shape[0] <NEW_LINE> total_energy = 0 <NEW_LINE> for i, j in itertools.product(range(length), repeat=2): <NEW_LINE> <INDENT> out1 = density_matrix_total.item(i, j) * self.core_hamiltonian.item(i, j) <NEW_LINE> out2 = density_matrix_alpha.item(i, j) * hamiltonian_alpha.item(i, j) <NEW_LINE> out3 = density_matrix_beta.item(i, j) * hamiltonian_beta.item(i, j) <NEW_LINE> total_energy += (1/2) * (out1 + out2 + out3) <NEW_LINE> <DEDENT> return total_energy
Calculates the total energy for hartree fock methods. Attributes ---------- core_hamiltonian : np.matrix
62599031a8ecb033258722db
class DataSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Data <NEW_LINE> fields = ['pk', 'value', 'display']
Element Extra data CRUD serializer
62599031d4950a0f3b11169d
class DriverSchedule(BaseModel): <NEW_LINE> <INDENT> driver = ForeignKeyField(Driver) <NEW_LINE> schedule = ForeignKeyField(Schedule, backref='driver_schedules') <NEW_LINE> power_on = BooleanField(null=True, default=None)
Many-to-many relationship between drivers and schedules peewee docs: https://goo.gl/8SB4iY
6259903163f4b57ef00865d2
@context.configure(name="oslomsg", order=1000) <NEW_LINE> class OsloMsgContext(context.Context): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(OsloMsgContext, self).__init__(*args, **kwargs) <NEW_LINE> self.server_processes = [] <NEW_LINE> self.messages_received = multiprocessing.Queue() <NEW_LINE> <DEDENT> def set_config_opts(self): <NEW_LINE> <INDENT> config_opts = self.config.get('config_opts', {}) <NEW_LINE> for section, values in config_opts.items(): <NEW_LINE> <INDENT> if not section == "DEFAULT": <NEW_LINE> <INDENT> cfg.CONF.register_group(cfg.OptGroup(section)) <NEW_LINE> obj = getattr(cfg.CONF, section) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj = cfg.CONF <NEW_LINE> <DEDENT> for opt, value in values.items(): <NEW_LINE> <INDENT> setattr(obj, opt, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def setup(self): <NEW_LINE> <INDENT> url = self.context['admin']['credential'].auth_url <NEW_LINE> self.set_config_opts() <NEW_LINE> transport = messaging.get_transport(cfg.CONF, url=url) <NEW_LINE> self.context['servers'] = [] <NEW_LINE> num_servers = self.config.get('num_servers') <NEW_LINE> num_topics = self.config.get('num_topics') <NEW_LINE> self._start_servers(transport, num_servers, num_topics) <NEW_LINE> client.setup_clients(url, self.config['num_clients']) <NEW_LINE> client.init_random_generator(self.config['msg_length_file']) <NEW_LINE> <DEDENT> def _start_servers(self, transport, num_servers, num_topics): <NEW_LINE> <INDENT> topics = [petname.Generate(3, "_") for _i in range(num_topics)] <NEW_LINE> topics_iter = itertools.cycle(topics) <NEW_LINE> for i in range(num_servers): <NEW_LINE> <INDENT> topic = topics_iter.next() <NEW_LINE> server_name = 'profiler_server' <NEW_LINE> LOG.info("Starting server %s topic %s" % (server_name, topic)) <NEW_LINE> target = messaging.Target(topic=topic, server=server_name) <NEW_LINE> pr = multiprocessing.Process(target=self._start_server, args=(transport, target, self.messages_received)) <NEW_LINE> pr.start() <NEW_LINE> self.server_processes.append(pr) <NEW_LINE> self.context['servers'].append((topic, server_name)) <NEW_LINE> <DEDENT> <DEDENT> def _start_server(self, transport, target, messages): <NEW_LINE> <INDENT> server = rpc.get_rpc_server(transport, target, [RpcEndpoint(messages)], executor='eventlet') <NEW_LINE> server.start() <NEW_LINE> while 1: <NEW_LINE> <INDENT> time.sleep(3) <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> for p in self.server_processes: <NEW_LINE> <INDENT> p.terminate() <NEW_LINE> <DEDENT> count = self.messages_received.qsize() <NEW_LINE> with open(self.config['msg_timestamp_file'], 'w+') as f: <NEW_LINE> <INDENT> while not self.messages_received.empty(): <NEW_LINE> <INDENT> f.write(str(self.messages_received.get())) <NEW_LINE> f.write('\n') <NEW_LINE> <DEDENT> <DEDENT> LOG.info("Messages count: %s" % count)
Oslo messaging default context Cerate tranport and target; Set up servers
6259903115baa72349463058
class PhaseSplitter(AbstractStringSplitter): <NEW_LINE> <INDENT> def __init__(self, string): <NEW_LINE> <INDENT> self._season_index = None <NEW_LINE> self._year_index = None <NEW_LINE> self._phase_type_index = None <NEW_LINE> super(PhaseSplitter, self).__init__(string, 3) <NEW_LINE> <DEDENT> @property <NEW_LINE> def season(self): <NEW_LINE> <INDENT> return self._parts[self._season_index] if self._season_index is not None else None <NEW_LINE> <DEDENT> @season.setter <NEW_LINE> def season(self, value): <NEW_LINE> <INDENT> if self._season_index is None: <NEW_LINE> <INDENT> self._season_index = self._last_index <NEW_LINE> self._last_index += 1 <NEW_LINE> <DEDENT> self._parts[self._season_index] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def year(self): <NEW_LINE> <INDENT> return self._parts[self._year_index] if self._year_index is not None else None <NEW_LINE> <DEDENT> @year.setter <NEW_LINE> def year(self, value): <NEW_LINE> <INDENT> if self._year_index is None: <NEW_LINE> <INDENT> self._year_index = self._last_index <NEW_LINE> self._last_index += 1 <NEW_LINE> <DEDENT> self._parts[self._year_index] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def phase_type(self): <NEW_LINE> <INDENT> return self._parts[self._phase_type_index] if self._phase_type_index is not None else None <NEW_LINE> <DEDENT> @phase_type.setter <NEW_LINE> def phase_type(self, value): <NEW_LINE> <INDENT> if self._phase_type_index is None: <NEW_LINE> <INDENT> self._phase_type_index = self._last_index <NEW_LINE> self._last_index += 1 <NEW_LINE> <DEDENT> self._parts[self._phase_type_index] = value <NEW_LINE> <DEDENT> def _split(self): <NEW_LINE> <INDENT> self.season = self._input_str[0] <NEW_LINE> self.year = int(self._input_str[1:-1]) <NEW_LINE> self.phase_type = self._input_str[-1]
Splits a phase into its components
6259903121bff66bcd723d24
class SAMLConfigurationFactory(DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = SAMLConfiguration <NEW_LINE> <DEDENT> site = SubFactory(SiteFactory) <NEW_LINE> enabled = True
Factory or SAMLConfiguration model in third_party_auth app.
6259903130c21e258be998cc
class XTSProfile(TestProfile): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> XTSTest.RESULTS_PATH = self.results_dir <NEW_LINE> try: <NEW_LINE> <INDENT> os.mkdir(os.path.join(self.results_dir, 'images')) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if e.errno != 17: <NEW_LINE> <INDENT> raise
A subclass of TestProfile that provides a setup hook for XTS
625990318c3a8732951f7618