code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Connector(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.connections = {} <NEW_LINE> self._nextid = 0 <NEW_LINE> <DEDENT> def emit(self, signal_id, payload): <NEW_LINE> <INDENT> if signal_id in self.connections: <NEW_LINE> <INDENT> for action in self.connections[signal_id].values(): <NEW_LINE> <INDENT> action(self, payload) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def connect(self, signal_id, action): <NEW_LINE> <INDENT> if signal_id not in self.connections: <NEW_LINE> <INDENT> self.connections[signal_id] = {} <NEW_LINE> <DEDENT> connection_id = self._nextid <NEW_LINE> self._nextid += 1 <NEW_LINE> self.connections[signal_id][connection_id] = action <NEW_LINE> return connection_id <NEW_LINE> <DEDENT> def disconnect(self, connection_id): <NEW_LINE> <INDENT> for action_dict in self.connections.values(): <NEW_LINE> <INDENT> if connection_id in action_dict: <NEW_LINE> <INDENT> del action_dict[connection_id] <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False | Simple signal-slot connector | 6259906f4f88993c371f1164 |
class MeasurewiseQTarget(QTarget): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__(self, items: typing.Sequence[QTargetMeasure] | None = None): <NEW_LINE> <INDENT> super().__init__(items) <NEW_LINE> <DEDENT> def _notate( self, grace_handler: GraceHandler, attack_point_optimizer: AttackPointOptimizer, attach_tempos: bool = True, ) -> abjad.Voice: <NEW_LINE> <INDENT> voice = abjad.Voice() <NEW_LINE> q_target_measure = self._items[0] <NEW_LINE> assert isinstance(q_target_measure, QTargetMeasure) <NEW_LINE> time_signature = q_target_measure.time_signature <NEW_LINE> measure = abjad.Container() <NEW_LINE> for beat in q_target_measure.beats: <NEW_LINE> <INDENT> measure.extend(beat.q_grid(beat.beatspan)) <NEW_LINE> <DEDENT> leaf = abjad.get.leaf(measure, 0) <NEW_LINE> abjad.attach(time_signature, leaf) <NEW_LINE> if attach_tempos: <NEW_LINE> <INDENT> tempo = copy.deepcopy(q_target_measure.tempo) <NEW_LINE> leaf = abjad.get.leaf(measure, 0) <NEW_LINE> abjad.attach(tempo, leaf) <NEW_LINE> <DEDENT> voice.append(measure) <NEW_LINE> pairs = abjad.sequence.nwise(self.items) <NEW_LINE> for q_target_measure_one, q_target_measure_two in pairs: <NEW_LINE> <INDENT> measure = abjad.Container() <NEW_LINE> for beat in q_target_measure_two.beats: <NEW_LINE> <INDENT> measure.extend(beat.q_grid(beat.beatspan)) <NEW_LINE> <DEDENT> if ( q_target_measure_two.time_signature != q_target_measure_one.time_signature ): <NEW_LINE> <INDENT> time_signature = q_target_measure_two.time_signature <NEW_LINE> leaf = abjad.get.leaf(measure, 0) <NEW_LINE> abjad.attach(time_signature, leaf) <NEW_LINE> <DEDENT> if ( q_target_measure_two.tempo != q_target_measure_one.tempo ) and attach_tempos: <NEW_LINE> <INDENT> tempo = copy.deepcopy(q_target_measure_two.tempo) <NEW_LINE> leaf = abjad.get.leaf(measure, 0) <NEW_LINE> abjad.attach(tempo, leaf) <NEW_LINE> <DEDENT> voice.append(measure) <NEW_LINE> <DEDENT> q_events_attachments = self._notate_leaves( grace_handler=grace_handler, voice=voice ) <NEW_LINE> for index, measure in enumerate(voice): <NEW_LINE> <INDENT> if isinstance(attack_point_optimizer, MeasurewiseAttackPointOptimizer): <NEW_LINE> <INDENT> attack_point_optimizer(measure, self.items[index].time_signature) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attack_point_optimizer(measure) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(grace_handler, ConcatenatingGraceHandler): <NEW_LINE> <INDENT> self._attach_attachments_to_logical_ties(voice, q_events_attachments) <NEW_LINE> <DEDENT> return voice <NEW_LINE> <DEDENT> @property <NEW_LINE> def beats(self) -> tuple[QTargetBeat, ...]: <NEW_LINE> <INDENT> return tuple([beat for item in self.items for beat in item.beats]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def item_class(self) -> type[QTargetMeasure]: <NEW_LINE> <INDENT> return QTargetMeasure | Measurewise quantization target.
Not composer-safe.
Used internally by ``Quantizer``. | 6259906fe5267d203ee6d001 |
class ChatUser(models.Model): <NEW_LINE> <INDENT> room = models.ForeignKey(ChatRoom, verbose_name="房间名") <NEW_LINE> user = models.ForeignKey(User, verbose_name="用户名") <NEW_LINE> add_time = models.DateTimeField(default=datetime.now, verbose_name="添加时间") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = " 在线会员" <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.user.username | 在线会员: 房间id,房间名称,会员id,会员名,时间 | 6259906f71ff763f4b5e9031 |
class PluginRule(Rule.PluginRuleBase): <NEW_LINE> <INDENT> def __init__(self, module): <NEW_LINE> <INDENT> super().__init__(module) <NEW_LINE> self.param = { 'NAME':'G', 'ENABLE':False, 'TIMER':3000, 'FILTER':'', 'name_t':'T', 'name_h':'H', 'name_p':'P', 'abc':123, 'xyz':456, 'sel':2, 'qwe':'Lorem ipsum', 'asd':[1,2,3,4,5], } <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> super().init() <NEW_LINE> <DEDENT> def exit(self): <NEW_LINE> <INDENT> super().exit() <NEW_LINE> <DEDENT> def timer(self): <NEW_LINE> <INDENT> log(5, 'rugTestR Timer') <NEW_LINE> <DEDENT> def variables(self, news:dict): <NEW_LINE> <INDENT> log(LOG_INFO, 'Variables in ruTestR: {}', news) | TODO | 6259906fd486a94d0ba2d847 |
class Login(Resource): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> creditentials = request.json <NEW_LINE> userCheck = session.query(User) .filter(User.name == creditentials['username'], User.password == creditentials['password']) .first() <NEW_LINE> if userCheck != None: <NEW_LINE> <INDENT> userData = userSchemaObj.dump(userCheck) <NEW_LINE> access_token = create_access_token(identity=userData.data["id"]) <NEW_LINE> LOGGED_USERS.add(userData.data["id"]) <NEW_LINE> return { "access_token": access_token, "user_id": userData.data["id"]} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'Unauthorized', 401 | rout to log in user | 6259906f97e22403b383c78c |
class NSNitroNserrGslbDbTimeout(NSNitroGslbErrors): <NEW_LINE> <INDENT> pass | Nitro error code 1975
Static proximity database server is not responding. | 6259906f1f5feb6acb16447a |
class ReadOnlyCursorWrapper(object): <NEW_LINE> <INDENT> def __init__(self, cursor, db): <NEW_LINE> <INDENT> self.cursor = cursor <NEW_LINE> self.db = db <NEW_LINE> <DEDENT> def execute(self, sql, params=()): <NEW_LINE> <INDENT> self.db.validate_no_broken_transaction() <NEW_LINE> with self.db.wrap_database_errors: <NEW_LINE> <INDENT> sql = sql.replace("_salesforce\".\"","_salesforce_") <NEW_LINE> if params is None: <NEW_LINE> <INDENT> return self.cursor.execute(sql) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.cursor.execute(sql, params) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def executemany(self, sql, param_list): <NEW_LINE> <INDENT> sql = sql.replace("_salesforce\".\"","_salesforce_") <NEW_LINE> return self.cursor.executemany(sql, param_list) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self.cursor, attr) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.cursor) | This is a wrapper for a database cursor.
This sits between django's own wrapper at
`django.db.backends.util.CursorWrapper` and the database specific cursor at
`django.db.backends.*.base.*CursorWrapper`. It overrides two specific
methods: `execute` and `executemany`. If the site is in read-only mode,
then the SQL is examined to see if it contains any write actions. If a
write is detected, an exception is raised.
A site is in read only mode by setting the SITE_READ_ONLY setting. For
obvious reasons, this is False by default.
Raises a DatabaseWriteDenied exception if writes are disabled. | 6259906f283ffb24f3cf5132 |
class CategoryListView(LoginRequiredMixin, ListView): <NEW_LINE> <INDENT> queryset = (Category.objects .filter(_has_banners_q, level=1) .distinct() .order_by('parent__name', 'name')) <NEW_LINE> template_name = 'banners/generator/categories.html' <NEW_LINE> context_object_name = 'categories' | List all categories. | 6259906f4527f215b58eb5e4 |
class MinHeap: <NEW_LINE> <INDENT> def __init__(self, init_list=None): <NEW_LINE> <INDENT> self.priority_queue = [] <NEW_LINE> self.priority_queue.append(None) <NEW_LINE> if init_list is not None: <NEW_LINE> <INDENT> for i in init_list: <NEW_LINE> <INDENT> self.add(i) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add(self, element): <NEW_LINE> <INDENT> self.priority_queue.append(element) <NEW_LINE> self._swim(element, len(self.priority_queue) - 1) <NEW_LINE> <DEDENT> def _swim(self, element, current_index): <NEW_LINE> <INDENT> parent_index = int(current_index / 2) <NEW_LINE> while current_index != 1 and element < self.priority_queue[parent_index]: <NEW_LINE> <INDENT> _switch_list_value(self.priority_queue, current_index, parent_index) <NEW_LINE> current_index = parent_index <NEW_LINE> parent_index = int(current_index / 2) <NEW_LINE> <DEDENT> <DEDENT> def pop(self): <NEW_LINE> <INDENT> pop_element = self.priority_queue[1] <NEW_LINE> _switch_list_value(self.priority_queue, 1, len(self.priority_queue) - 1) <NEW_LINE> self.priority_queue.pop() <NEW_LINE> self._sink(1) <NEW_LINE> return pop_element <NEW_LINE> <DEDENT> def _sink(self, current_index): <NEW_LINE> <INDENT> child_index_1 = current_index * 2 <NEW_LINE> child_index_2 = current_index * 2 + 1 <NEW_LINE> last_index = len(self.priority_queue) - 1 <NEW_LINE> while child_index_1 <= last_index: <NEW_LINE> <INDENT> if child_index_2 > last_index: <NEW_LINE> <INDENT> smaller_child_index = child_index_1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> smaller_child_index = child_index_1 if self.priority_queue[child_index_1] < self.priority_queue[ child_index_2] else child_index_2 <NEW_LINE> <DEDENT> if self.priority_queue[smaller_child_index] < self.priority_queue[current_index]: <NEW_LINE> <INDENT> _switch_list_value(self.priority_queue, smaller_child_index, current_index) <NEW_LINE> current_index = smaller_child_index <NEW_LINE> child_index_1 = current_index * 2 <NEW_LINE> child_index_2 = current_index * 2 + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def size(self): <NEW_LINE> <INDENT> return len(self.priority_queue) - 1 | 最小堆
是一个完全二叉树,树及子树的根一定是树里面最小的元素
添加或删除元素会通过上浮或下沉调整节点使得保持堆的定义
由于是完全二叉树,使用数组实现会比较简单,数组索引0位置留空,从1开始使用,
若节点的索引是k,则此节点的父节点是k/2,两个子节点是2k和2k+1 | 6259906ff548e778e596ce16 |
@dataclass() <NEW_LINE> class LDR(AbstractAccessMechanism): <NEW_LINE> <INDENT> component: str = "" <NEW_LINE> offset: int = 0 <NEW_LINE> name: str = "ldr" <NEW_LINE> def is_read(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_write(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_memory_mapped(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_valid(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def binary_encoded(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Access mechanism for reading a system control coprocessor register | 6259906f26068e7796d4e1c5 |
class Player: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.power = 100 <NEW_LINE> self.speciality = None <NEW_LINE> self.spells_available = [] <NEW_LINE> self.position = [] <NEW_LINE> self.this_spell = None <NEW_LINE> self.move_log = [] <NEW_LINE> <DEDENT> def hit(self, ind): <NEW_LINE> <INDENT> spell = self.spells_available.pop(ind) <NEW_LINE> self.move_log.append(sps.index(spell.name)) <NEW_LINE> self.this_spell = spell <NEW_LINE> self.power -= spell.power//2 <NEW_LINE> return spell | Player Class: gives player objects | 6259906f4c3428357761bb3d |
class State(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_('user')) <NEW_LINE> state = models.BooleanField(verbose_name=_('state'), default=(False)) | Docstrings are only dragging us down. | 6259906f8da39b475be04a77 |
class NearestEmbedFunc(Function): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def forward(ctx, input, emb): <NEW_LINE> <INDENT> if input.size(1) != emb.size(0): <NEW_LINE> <INDENT> raise RuntimeError('invalid argument: input.size(1) ({}) must be equal to emb.size(0) ({})'. format(input.size(1), emb.size(0))) <NEW_LINE> <DEDENT> ctx.batch_size = input.size(0) <NEW_LINE> ctx.num_latents = int(np.prod(np.array(input.size()[2:]))) <NEW_LINE> ctx.emb_dim = emb.size(0) <NEW_LINE> ctx.num_emb = emb.size(1) <NEW_LINE> ctx.input_type = type(input) <NEW_LINE> ctx.dims = list(range(len(input.size()))) <NEW_LINE> x_expanded = input.unsqueeze(-1) <NEW_LINE> num_arbitrary_dims = len(ctx.dims) - 2 <NEW_LINE> if num_arbitrary_dims: <NEW_LINE> <INDENT> emb_expanded = emb.view(emb.shape[0], *([1] * num_arbitrary_dims), emb.shape[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> emb_expanded = emb <NEW_LINE> <DEDENT> dist = torch.norm(x_expanded - emb_expanded, 2, 1) <NEW_LINE> _, argmin = dist.min(-1) <NEW_LINE> shifted_shape = [input.shape[0], *list(input.shape[2:]) ,input.shape[1]] <NEW_LINE> result = emb.t().index_select(0, argmin.view(-1)).view(shifted_shape).permute(0, ctx.dims[-1], *ctx.dims[1:-1]) <NEW_LINE> ctx.argmin = argmin <NEW_LINE> return result.contiguous(), argmin <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def backward(ctx, grad_output, argmin=None): <NEW_LINE> <INDENT> grad_input = grad_emb = None <NEW_LINE> if ctx.needs_input_grad[0]: <NEW_LINE> <INDENT> grad_input = grad_output <NEW_LINE> <DEDENT> if ctx.needs_input_grad[1]: <NEW_LINE> <INDENT> latent_indices = torch.arange(ctx.num_emb).type_as(ctx.argmin) <NEW_LINE> idx_choices = (ctx.argmin.view(-1, 1) == latent_indices.view(1, -1)).type_as(grad_output.data) <NEW_LINE> n_idx_choice = idx_choices.sum(0) <NEW_LINE> n_idx_choice[n_idx_choice == 0] = 1 <NEW_LINE> idx_avg_choices = idx_choices / n_idx_choice <NEW_LINE> grad_output = grad_output.permute(0, *ctx.dims[2:], 1).contiguous() <NEW_LINE> grad_output = grad_output.view(ctx.batch_size * ctx.num_latents, ctx.emb_dim) <NEW_LINE> grad_emb = Variable(torch.sum( grad_output.data.view(-1, ctx.emb_dim, 1) * idx_avg_choices.view(-1, 1, ctx.num_emb), 0)) <NEW_LINE> <DEDENT> return grad_input, grad_emb, None, None | Input:
------
x - (batch_size, emb_dim, *)
Last dimensions may be arbitrary
emb - (emb_dim, num_emb) | 6259906fa8370b77170f1c52 |
class Environment: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.init_state() <NEW_LINE> <DEDENT> def init_state(self): <NEW_LINE> <INDENT> self.state = None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_states(): <NEW_LINE> <INDENT> states = [] <NEW_LINE> return states <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_actions(state): <NEW_LINE> <INDENT> actions = [] <NEW_LINE> return actions <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def encode(state): <NEW_LINE> <INDENT> return state <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decode(state): <NEW_LINE> <INDENT> return state <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_transition(state, action): <NEW_LINE> <INDENT> probs = [1] <NEW_LINE> states = [deepcopy(state)] <NEW_LINE> return probs, states <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_reward(state): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_terminal(state): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_model(state, action): <NEW_LINE> <INDENT> probs, states = Environment.get_transition(state, action) <NEW_LINE> rewards = [Environment.get_reward(state) for state in states] <NEW_LINE> return probs, states, rewards <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> reward = None <NEW_LINE> stop = True <NEW_LINE> if action is not None and action in self.get_actions(self.state): <NEW_LINE> <INDENT> probs, states, rewards = self.get_model(self.state, action) <NEW_LINE> i = np.random.choice(len(probs), p=probs) <NEW_LINE> state = states[i] <NEW_LINE> self.state = state <NEW_LINE> reward = rewards[i] <NEW_LINE> stop = self.is_terminal(state) <NEW_LINE> <DEDENT> return reward, stop | Generic environment.
Reward only depends on the target state. | 6259906fa219f33f346c8093 |
class EmbeddingCollection(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_dim, embedding_dim, max_len=5000): <NEW_LINE> <INDENT> super(EmbeddingCollection, self).__init__() <NEW_LINE> self.__input_dim = input_dim <NEW_LINE> self.__embedding_dim = embedding_dim <NEW_LINE> self.__max_len = max_len <NEW_LINE> self.__embedding_layer = nn.Embedding( self.__input_dim, self.__embedding_dim ) <NEW_LINE> <DEDENT> def forward(self, input_x): <NEW_LINE> <INDENT> embedding_x = self.__embedding_layer(input_x) <NEW_LINE> return embedding_x, embedding_x | Provide word vector and position vector encoding. | 6259906f5fdd1c0f98e5f810 |
class BasePenetrance(BaseOperator): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _simuPOP_laop.BasePenetrance_swiginit(self, _simuPOP_laop.new_BasePenetrance(*args, **kwargs)) <NEW_LINE> <DEDENT> __swig_destroy__ = _simuPOP_laop.delete_BasePenetrance <NEW_LINE> def applyToIndividual(self, ind: 'Individual', pop: 'Population'=None) -> "bool": <NEW_LINE> <INDENT> return _simuPOP_laop.BasePenetrance_applyToIndividual(self, ind, pop) | Details:
A penetrance model models the probability that an individual has a
certain disease provided that he or she has certain genetic
(genotype) and environmental (information field) riske factors. A
penetrance operator calculates this probability according to
provided information and set his or her affection status randomly.
For example, an individual will have probability 0.8 to be
affected if the penetrance is 0.8. This class is the base class to
all penetrance operators and defines a common interface for all
penetrance operators. A penetrance operator can be applied at any
stage of an evolutionary cycle. If it is applied before or after
mating, it will set affection status of all parents and offspring,
respectively. If it is applied during mating, it will set the
affection status of each offspring. You can also apply a
penetrance operator to an individual using its applyToIndividual
member function. By default, a penetrance operator assigns
affection status of individuals but does not save the actual
penetrance value. However, if an information field is specified,
penetrance values will be saved to this field for future analysis.
When a penetrance operator is applied to a population, it is only
applied to the current generation. You can, however, use parameter
ancGens to set affection status for all ancestral generations
(ALL_AVAIL), or individuals in specified generations if a list of
ancestral generations is specified. Note that this parameter is
ignored if the operator is applied during mating. | 6259906f2ae34c7f260ac973 |
@script_interface_register <NEW_LINE> class Cluster(ScriptInterfaceHelper): <NEW_LINE> <INDENT> _so_name = "ClusterAnalysis::Cluster" <NEW_LINE> _so_bind_methods = ("particle_ids", "size", "longest_distance", "radius_of_gyration", "fractal_dimension", "center_of_mass") <NEW_LINE> _so_creation_policy = "LOCAL" <NEW_LINE> def particles(self): <NEW_LINE> <INDENT> return ParticleSlice(self.particle_ids()) | Class representing a cluster of particles.
Methods
-------
particle_ids()
Returns list of particle ids in the cluster
size()
Returns the number of particles in the cluster
center_of_mass()
Center of mass of the cluster (folded coordinates)
longest_distance()
Longest distance between any combination of two particles in the cluster
fractal_dimension(dr=None)
Estimates the cluster's fractal dimension by fitting the number of
particles :math:`n` in spheres of growing radius around the center of mass
to :math:`c*r_g^d`, where :math:`r_g` is the radius of gyration of the
particles within the sphere, and :math:`d` is the fractal dimension.
.. note::
Requires ``GSL`` external feature, enabled with ``-DWITH_GSL=ON``.
Parameters
----------
dr: :obj:`float`
Minimum increment for the radius of the spheres.
Returns
-------
:obj:`tuple`:
Fractal dimension and mean square residual. | 6259906f009cb60464d02dc1 |
class TestCheckFullPathSubFolder(TestCheckAbsoluteSubFolder): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def patch_site(self): <NEW_LINE> <INDENT> conf_path = os.path.join(self.target_dir, "conf.py") <NEW_LINE> with io.open(conf_path, "r", encoding="utf-8") as inf: <NEW_LINE> <INDENT> data = inf.read() <NEW_LINE> data = data.replace('SITE_URL = "https://example.com/"', 'SITE_URL = "https://example.com/foo/"') <NEW_LINE> data = data.replace("# URL_TYPE = 'rel_path'", "URL_TYPE = 'full_path'") <NEW_LINE> <DEDENT> with io.open(conf_path, "w+", encoding="utf8") as outf: <NEW_LINE> <INDENT> outf.write(data) <NEW_LINE> outf.flush() | Validate links in a site which is:
* built in URL_TYPE="full_path"
* deployable to a subfolder (BASE_URL="https://example.com/foo/") | 6259906f67a9b606de5476e8 |
class Prover(object): <NEW_LINE> <INDENT> rules = [] <NEW_LINE> goalId = 100 <NEW_LINE> trace = 0 <NEW_LINE> @staticmethod <NEW_LINE> def unify (srcTerm, srcEnv, destTerm, destEnv) : <NEW_LINE> <INDENT> nargs = len(srcTerm.args) <NEW_LINE> if nargs != len(destTerm.args) : return 0 <NEW_LINE> if srcTerm.pred != destTerm.pred : return 0 <NEW_LINE> for i in range(nargs) : <NEW_LINE> <INDENT> srcArg = srcTerm.args[i] <NEW_LINE> destArg = destTerm.args[i] <NEW_LINE> if srcArg <= 'Z' : <NEW_LINE> <INDENT> srcVal = srcEnv.get(srcArg) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> srcVal = srcArg <NEW_LINE> <DEDENT> if srcVal : <NEW_LINE> <INDENT> if destArg <= 'Z' : <NEW_LINE> <INDENT> destVal = destEnv.get(destArg) <NEW_LINE> if not destVal : <NEW_LINE> <INDENT> destEnv[destArg] = srcVal <NEW_LINE> <DEDENT> elif destVal != srcVal : return 0 <NEW_LINE> <DEDENT> elif destArg != srcVal : return 0 <NEW_LINE> <DEDENT> <DEDENT> return 1 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def search (term) : <NEW_LINE> <INDENT> goalId = Prover.goalId <NEW_LINE> trace = Prover.trace <NEW_LINE> rules = Prover.rules <NEW_LINE> unify = Prover.unify <NEW_LINE> goalId = 0 <NEW_LINE> returnValue = False <NEW_LINE> if trace : print("search", term) <NEW_LINE> goal = Goal(Rule("got(goal):-x(y)")) <NEW_LINE> goal.rule.goals = [term] <NEW_LINE> if trace : print("stack", goal) <NEW_LINE> stack = [goal] <NEW_LINE> while stack : <NEW_LINE> <INDENT> c = stack.pop() <NEW_LINE> if trace : print(" pop", c) <NEW_LINE> if c.inx >= len(c.rule.goals) : <NEW_LINE> <INDENT> if c.parent == None : <NEW_LINE> <INDENT> if c.env : print(c.env) <NEW_LINE> else : <NEW_LINE> <INDENT> returnValue = True <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> parent = deepcopy(c.parent) <NEW_LINE> unify (c.rule.head,c.env,parent.rule.goals[parent.inx],parent.env) <NEW_LINE> parent.inx = parent.inx+1 <NEW_LINE> if trace : print("stack", parent) <NEW_LINE> stack.append(parent) <NEW_LINE> continue <NEW_LINE> <DEDENT> term = c.rule.goals[c.inx] <NEW_LINE> for rule in rules : <NEW_LINE> <INDENT> if rule.head.pred != term.pred : continue <NEW_LINE> if len(rule.head.args) != len(term.args) : continue <NEW_LINE> child = Goal(rule, c) <NEW_LINE> ans = unify (term, c.env, rule.head, child.env) <NEW_LINE> if ans : <NEW_LINE> <INDENT> if trace : print("stack", child) <NEW_LINE> stack.append(child) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return returnValue <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def prove(facts,example,clause): <NEW_LINE> <INDENT> Prover.rules = [] <NEW_LINE> Prover.trace = 0 <NEW_LINE> Prover.goalId = 100 <NEW_LINE> Prover.rules += [Rule(fact) for fact in facts] <NEW_LINE> Prover.rules += [Rule(clause)] <NEW_LINE> proofOutcome = Prover.search(Term(example)) <NEW_LINE> return proofOutcome | class for prolog style proof of query | 6259906fe76e3b2f99fda28d |
class PluginInstaller: <NEW_LINE> <INDENT> def __init__(self, connector=None, loop=None): <NEW_LINE> <INDENT> self.connector = connector <NEW_LINE> self.loop = loop <NEW_LINE> <DEDENT> async def request_repo(self, pluginname): <NEW_LINE> <INDENT> url = ( GitHubRoute( "DecoraterBot-devs", "DecoraterBot-cogs", "master", "cogslist.json")).url <NEW_LINE> async with aiohttp.ClientSession( connector=self.connector, loop=self.loop) as session: <NEW_LINE> <INDENT> data = await session.get(url) <NEW_LINE> resp1 = await data.json(content_type='text/plain') <NEW_LINE> version = resp1[pluginname]['version'] <NEW_LINE> url2 = resp1[pluginname]['downloadurl'] <NEW_LINE> url3 = resp1[pluginname]['textjson'] <NEW_LINE> data2 = await session.get(url2) <NEW_LINE> data3 = await session.get(url3) <NEW_LINE> plugincode = await data2.text() <NEW_LINE> textjson = await data3.text() <NEW_LINE> return PluginData( plugincode=plugincode, version=version, textjson=textjson) <NEW_LINE> <DEDENT> <DEDENT> async def checkupdate(self, pluginname): <NEW_LINE> <INDENT> pluginversion = None <NEW_LINE> requestrepo = await self.request_repo(pluginname) <NEW_LINE> if requestrepo.version != pluginversion: <NEW_LINE> <INDENT> return requestrepo <NEW_LINE> <DEDENT> <DEDENT> async def checkupdates(self, pluginlist): <NEW_LINE> <INDENT> update_list = [] <NEW_LINE> for plugin in pluginlist: <NEW_LINE> <INDENT> update_list.append(await self.checkupdate(plugin)) <NEW_LINE> <DEDENT> return update_list <NEW_LINE> <DEDENT> async def install_plugin(self, pluginname): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def install_plugins(self, pluginlist): <NEW_LINE> <INDENT> for pluginname in pluginlist: <NEW_LINE> <INDENT> self.install_plugin(pluginname) | Class that implements all of the Plugin
Instalation / updating system for
DecoraterBot. | 6259906f63b5f9789fe869ee |
class RandomNanoflares(object): <NEW_LINE> <INDENT> @u.quantity_input <NEW_LINE> def __init__(self, duration: u.s, stress): <NEW_LINE> <INDENT> self.duration = duration.to(u.s).value <NEW_LINE> self.stress = stress <NEW_LINE> <DEDENT> def calculate_event_properties(self, loop): <NEW_LINE> <INDENT> self.number_events = 1 <NEW_LINE> start_time = np.random.uniform(low=0, high=self.base_config['total_time'] - self.duration) <NEW_LINE> max_energy = (self.stress * loop.field_strength.mean().value)**2/(8.*np.pi) <NEW_LINE> return {'magnitude': np.array([max_energy/(self.duration/2.)]), 'rise_start': np.array([start_time]), 'rise_end': np.array([start_time+self.duration/2]), 'decay_start': np.array([start_time+self.duration/2]), 'decay_end': np.array([start_time+self.duration])} | Add a single nanoflare at a random time during the simulation period
Parameters
----------
duration : `~astropy.units.Quantity`
Duration of each event
stress : `float`
Fraction of field energy density to input into the loop | 6259906f097d151d1a2c28fc |
class ExportTwoPointOhLibraries(BaseView): <NEW_LINE> <INDENT> decorators = [advertise('scopes', 'rate_limit')] <NEW_LINE> scopes = ['user'] <NEW_LINE> rate_limit = [1000, 60*60*24] <NEW_LINE> def get(self, export): <NEW_LINE> <INDENT> if export not in current_app.config['HARBOUR_EXPORT_TYPES']: <NEW_LINE> <INDENT> return err(TWOPOINTOH_WRONG_EXPORT_TYPE) <NEW_LINE> <DEDENT> if not current_app.config['ADS_TWO_POINT_OH_LOADED_USERS']: <NEW_LINE> <INDENT> current_app.logger.error( 'Users from MongoDB have not been loaded into the app' ) <NEW_LINE> return err(TWOPOINTOH_AWS_PROBLEM) <NEW_LINE> <DEDENT> absolute_uid = self.helper_get_user_id() <NEW_LINE> try: <NEW_LINE> <INDENT> user = Users.query.filter(Users.absolute_uid == absolute_uid).one() <NEW_LINE> if not user.twopointoh_email: <NEW_LINE> <INDENT> raise NoResultFound <NEW_LINE> <DEDENT> <DEDENT> except NoResultFound: <NEW_LINE> <INDENT> current_app.logger.warning( 'User does not have an associated ADS Classic/2.0 account' ) <NEW_LINE> return err(NO_TWOPOINTOH_ACCOUNT) <NEW_LINE> <DEDENT> library_file_name = current_app.config['ADS_TWO_POINT_OH_USERS'].get( user.twopointoh_email, None ) <NEW_LINE> if not library_file_name: <NEW_LINE> <INDENT> current_app.logger.warning( 'User does not have any libraries in ADS 2.0' ) <NEW_LINE> return err(NO_TWOPOINTOH_LIBRARIES) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> s3 = boto3.client('s3') <NEW_LINE> s3_presigned_url = s3.generate_presigned_url( ClientMethod='get_object', Params={ 'Bucket': current_app.config['ADS_TWO_POINT_OH_S3_MONGO_BUCKET'], 'Key': library_file_name.replace('.json', '.{}.zip'.format(export)) }, ExpiresIn=1800 ) <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> current_app.logger.error( 'Unknown error with AWS: {}'.format(error) ) <NEW_LINE> return err(TWOPOINTOH_AWS_PROBLEM) <NEW_LINE> <DEDENT> return {'url': s3_presigned_url}, 200 | End point to return ADS 2.0 libraries in a format that users can use to
import them to other services. Currently, the following third-party
services are supported:
- Zotero (https://www.zotero.org/)
- Papers (http://www.papersapp.com/)
- Mendeley (https://www.mendeley.com/) | 6259906f4527f215b58eb5e5 |
class Course(): <NEW_LINE> <INDENT> tasks = {} <NEW_LINE> exam_score = 0 <NEW_LINE> lab_max = 1 <NEW_LINE> def_task_num = 0 <NEW_LINE> def __init__(self, course_config): <NEW_LINE> <INDENT> self.exam_max = course_config["exam_max"] <NEW_LINE> self.lab_max = course_config["lab_max"] <NEW_LINE> self.lab_num = course_config["lab_num"] <NEW_LINE> self.threshold = course_config["k"] <NEW_LINE> <DEDENT> def make_task(self, score: int, task_number: int = def_task_num): <NEW_LINE> <INDENT> if (task_number < self.lab_num) & (task_number >= 0): <NEW_LINE> <INDENT> if (score <= self.lab_max) & (score >= 0): <NEW_LINE> <INDENT> self.tasks.update({task_number: score}) <NEW_LINE> self.def_task_num = self.default_task_number() <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def default_task_number(self): <NEW_LINE> <INDENT> if len(self.tasks) <= self.lab_num: <NEW_LINE> <INDENT> made_tasks = {k[0] for k in self.tasks.items()} <NEW_LINE> rest_tasks = set(range(0, self.lab_num)).difference(made_tasks) <NEW_LINE> if rest_tasks: <NEW_LINE> <INDENT> return min(rest_tasks) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def make_exam(self, score: int): <NEW_LINE> <INDENT> if score <= self.exam_max: <NEW_LINE> <INDENT> self.exam_score = score <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def sum_score(self): <NEW_LINE> <INDENT> sum_score = 0 <NEW_LINE> for el in self.tasks.values(): <NEW_LINE> <INDENT> sum_score += el <NEW_LINE> <DEDENT> sum_score += self.exam_score <NEW_LINE> return sum_score <NEW_LINE> <DEDENT> def is_certified(self): <NEW_LINE> <INDENT> course_score = self.sum_score() <NEW_LINE> exam_pass = course_score/(self.exam_max + self.lab_max*self.lab_num) >= self.threshold <NEW_LINE> print(course_score/(self.exam_max + self.lab_max*self.lab_num)) <NEW_LINE> return (course_score, exam_pass) <NEW_LINE> <DEDENT> def get_def_task_num(self): <NEW_LINE> <INDENT> return self.def_task_num | Course | 6259906f7d847024c075dc69 |
class Adafruit_BME680_I2C(Adafruit_BME680): <NEW_LINE> <INDENT> def __init__(self, i2c, address=0x77, debug=False, *, refresh_rate=10): <NEW_LINE> <INDENT> self._i2c = i2c <NEW_LINE> self._debug = debug <NEW_LINE> super().__init__(refresh_rate=refresh_rate) <NEW_LINE> <DEDENT> def _read(self, register, length): <NEW_LINE> <INDENT> with self._i2c as i2c: <NEW_LINE> <INDENT> i2c.write(bytes([register & 0xFF])) <NEW_LINE> result = bytearray(length) <NEW_LINE> i2c.readinto(result) <NEW_LINE> if self._debug: <NEW_LINE> <INDENT> print("\t$%02X => %s" % (register, [hex(i) for i in result])) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> <DEDENT> def _write(self, register, values): <NEW_LINE> <INDENT> with self._i2c as i2c: <NEW_LINE> <INDENT> buffer = bytearray(2 * len(values)) <NEW_LINE> for i, value in enumerate(values): <NEW_LINE> <INDENT> buffer[2 * i] = register + i <NEW_LINE> buffer[2 * i + 1] = value <NEW_LINE> <DEDENT> i2c.write(buffer) <NEW_LINE> if self._debug: <NEW_LINE> <INDENT> print("\t$%02X <= %s" % (values[0], [hex(i) for i in values[1:]])) | Driver for I2C connected BME680.
:param int address: I2C device address
:param bool debug: Print debug statements when True.
:param int refresh_rate: Maximum number of readings per second. Faster property reads
will be from the previous reading. | 6259906f167d2b6e312b81d4 |
class ServiceFee(): <NEW_LINE> <INDENT> def __init__(self, term): <NEW_LINE> <INDENT> self.fee_discount = np.repeat(0.99, term) | lendingclub charge a constant service fee of 1% of payments | 6259906fa8370b77170f1c54 |
class _ErrorReportingLoggingAPI(object): <NEW_LINE> <INDENT> def __init__( self, project, credentials=None, _http=None, client_info=None, client_options=None, ): <NEW_LINE> <INDENT> self.logging_client = google.cloud.logging.client.Client( project, credentials, _http=_http, client_info=client_info, client_options=client_options, ) <NEW_LINE> <DEDENT> def report_error_event(self, error_report): <NEW_LINE> <INDENT> logger = self.logging_client.logger("errors") <NEW_LINE> logger.log_struct(error_report) | Report to Stackdriver Error Reporting via Logging API
:type project: str
:param project: the project which the client acts on behalf of. If not
passed falls back to the default inferred from the
environment.
:type credentials: :class:`google.auth.credentials.Credentials` or
:class:`NoneType`
:param credentials: The authorization credentials to attach to requests.
These credentials identify this application to the service.
If none are specified, the client will attempt to ascertain
the credentials from the environment.
:type _http: :class:`~requests.Session`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`requests.Session.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
:type client_info:
:class:`google.api_core.client_info.ClientInfo` or
:class:`google.api_core.gapic_v1.client_info.ClientInfo`
:param client_info:
The client info used to send a user-agent string along with API
requests. If ``None``, then default info will be used. Generally,
you only need to set this if you're developing your own library
or partner tool.
:type client_options: :class:`~google.api_core.client_options.ClientOptions`
or :class:`dict`
:param client_options: (Optional) Client options used to set user options
on the client. API Endpoint should be set through client_options. | 6259906fa8370b77170f1c55 |
class RealActualClass: <NEW_LINE> <INDENT> doing_impossible_stuff = False <NEW_LINE> doing_possible_stuff = False <NEW_LINE> def impossibleMethod(self): <NEW_LINE> <INDENT> self.doing_impossible_stuff = True <NEW_LINE> raise AssertionError("Trying to do impossible stuff.") <NEW_LINE> <DEDENT> def testableMethod(self): <NEW_LINE> <INDENT> self.doing_possible_stuff = True <NEW_LINE> <DEDENT> def doComplicatedThings(self, argument): <NEW_LINE> <INDENT> self.impossibleMethod() <NEW_LINE> self.testableMethod() <NEW_LINE> return argument | A class that's hard to test. | 6259906fe1aae11d1e7cf452 |
class HelloApiView(APIView): <NEW_LINE> <INDENT> serializer_class = serializers.HelloSerializer <NEW_LINE> def get(self, request, format=None): <NEW_LINE> <INDENT> an_api_view = [ 'Uses HTTP methods as function (get, post, patch, put, delete)', 'It is similar to traditional Django View', 'Gives you the most control over yout logic', 'Is mapped manually to URLs' ] <NEW_LINE> return Response({'message': 'Hello', 'an_api_view': an_api_view}) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> serializer = serializers.HelloSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> name = serializer.data.get('name') <NEW_LINE> message = 'Hello {0}'.format(name) <NEW_LINE> return Response({'message': message}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method': 'put'}) <NEW_LINE> <DEDENT> def patch(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method': 'patch'}) <NEW_LINE> <DEDENT> def delete(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method': 'delete'}) | Test API View. | 6259906fa219f33f346c8095 |
class UserListCreateAPIView(ListCreateAPIView): <NEW_LINE> <INDENT> serializer_class = UserRegisterSerializer <NEW_LINE> queryset = User.objects.all() <NEW_LINE> permission_classes = [CreateListUserPermission] <NEW_LINE> filter_backends = (SearchFilter, OrderingFilter) <NEW_LINE> search_fields = ('name', 'email') <NEW_LINE> ordering_fields = ('email', 'institution') | Controller that allows any logged user see all users and only not
logged user and admin user to create user. | 6259906f4428ac0f6e659dbf |
class Tags(BrowserView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> self.request.response.setHeader("Content-type", "application/json; charset=utf-8") <NEW_LINE> catalog = getToolByName(self.context, 'portal_catalog') <NEW_LINE> tags = catalog.uniqueValuesFor('Subject') <NEW_LINE> tags = [cgi.escape(tag) for tag in tags] <NEW_LINE> tags = [{'id': tag, 'text': tag} for tag in tags] <NEW_LINE> return json.dumps(tags) | Support view for select2subject.
Returns Subject tags in json | 6259906f67a9b606de5476e9 |
class VirtualWanSecurityProviders(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'supported_providers': {'key': 'supportedProviders', 'type': '[VirtualWanSecurityProvider]'}, } <NEW_LINE> def __init__( self, *, supported_providers: Optional[List["VirtualWanSecurityProvider"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(VirtualWanSecurityProviders, self).__init__(**kwargs) <NEW_LINE> self.supported_providers = supported_providers | Collection of SecurityProviders.
:param supported_providers: List of VirtualWAN security providers.
:type supported_providers:
list[~azure.mgmt.network.v2019_12_01.models.VirtualWanSecurityProvider] | 6259906fd268445f2663a7a3 |
class TokenManager(object): <NEW_LINE> <INDENT> def get_token(self): <NEW_LINE> <INDENT> return None | An abstract base class for token managers.
Every token manager must derive from this base class
and override get_token method. | 6259906fcc0a2c111447c717 |
class RCServicer(object): <NEW_LINE> <INDENT> def ExecuteRC(self, request, context): <NEW_LINE> <INDENT> pass <NEW_LINE> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | The greeting service definition.
| 6259906f460517430c432c9d |
class SupplierAttributes2201(object): <NEW_LINE> <INDENT> swagger_types = { 'enable': 'int', 'entry': 'SupplierAttributes2201Entry' } <NEW_LINE> attribute_map = { 'enable': 'enable', 'entry': 'entry' } <NEW_LINE> def __init__(self, enable=None, entry=None): <NEW_LINE> <INDENT> self._enable = None <NEW_LINE> self._entry = None <NEW_LINE> self.discriminator = None <NEW_LINE> if enable is not None: <NEW_LINE> <INDENT> self.enable = enable <NEW_LINE> <DEDENT> if entry is not None: <NEW_LINE> <INDENT> self.entry = entry <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def enable(self): <NEW_LINE> <INDENT> return self._enable <NEW_LINE> <DEDENT> @enable.setter <NEW_LINE> def enable(self, enable): <NEW_LINE> <INDENT> self._enable = enable <NEW_LINE> <DEDENT> @property <NEW_LINE> def entry(self): <NEW_LINE> <INDENT> return self._entry <NEW_LINE> <DEDENT> @entry.setter <NEW_LINE> def entry(self, entry): <NEW_LINE> <INDENT> self._entry = entry <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(SupplierAttributes2201, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SupplierAttributes2201): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906fe76e3b2f99fda28f |
class Phone(Region): <NEW_LINE> <INDENT> @property <NEW_LINE> def phone(self) -> str: <NEW_LINE> <INDENT> return self.phone_field.get_attribute('value') <NEW_LINE> <DEDENT> @phone.setter <NEW_LINE> def phone(self, number: Union[int, str]): <NEW_LINE> <INDENT> Utility.click_option(self.driver, element=self.phone_field) <NEW_LINE> self.phone_field.send_keys(str(number)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def phone_error(self) -> str: <NEW_LINE> <INDENT> if self.phone_has_error: <NEW_LINE> <INDENT> return self.find_element( *self._phone_error_message_locator).text <NEW_LINE> <DEDENT> return '' <NEW_LINE> <DEDENT> @property <NEW_LINE> def phone_field(self) -> WebElement: <NEW_LINE> <INDENT> return self.find_element(*self._phone_locator) <NEW_LINE> <DEDENT> @property <NEW_LINE> def phone_has_error(self) -> bool: <NEW_LINE> <INDENT> return 'has-error' in self.phone_field.get_attribute('class') | A telephone form field.
.. note::
Must define ``_phone_locator`` and ``_phone_error_message_locator`` | 6259906f63b5f9789fe869f0 |
class TestTask332(unittest.TestCase): <NEW_LINE> <INDENT> @parameterized.expand( [ (1, [1, 0, 0, 0]), (4, [2, 0, 0, 0]), (234, [15, 3, 0, 0]), (1646, [40, 6, 3, 1]), (2141, [46, 5, 0, 0]), (2137, [46, 4, 2, 1]), (2149, [46, 5, 2, 2]), (12412, [111, 9, 3, 1]), (90475, [300, 21, 5, 3]), ] ) <NEW_LINE> def test_task332(self, input_value, expected_value): <NEW_LINE> <INDENT> self.assertEqual(algo.Task332.main_logic(input_value), expected_value) | Testing task 332 class main logic | 6259906fadb09d7d5dc0bdf8 |
class ApasxolisiType(models.Model): <NEW_LINE> <INDENT> aptyp = models.CharField("Τύπος απασχόλησης", max_length=50) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ["id"] <NEW_LINE> verbose_name = "ΑΠΑΣΧΟΛΗΣΗ ΤΥΠΟΣ" <NEW_LINE> verbose_name_plural = "ΠΡΟΣΛΗΨΕΙΣ-ΤΥΠΟΣ ΑΠΑΣΧΟΛΗΣΗΣ" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{self.aptyp}" | Αορίστου, ορισμένου, έργου | 6259906fec188e330fdfa12f |
class _OpenBLASModule(_Module): <NEW_LINE> <INDENT> def get_version(self): <NEW_LINE> <INDENT> get_config = getattr(self._dynlib, "openblas_get_config", lambda: None) <NEW_LINE> get_config.restype = ctypes.c_char_p <NEW_LINE> config = get_config().split() <NEW_LINE> if config[0] == b"OpenBLAS": <NEW_LINE> <INDENT> return config[1].decode("utf-8") <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_num_threads(self): <NEW_LINE> <INDENT> get_func = getattr(self._dynlib, "openblas_get_num_threads", lambda: None) <NEW_LINE> return get_func() <NEW_LINE> <DEDENT> def set_num_threads(self, num_threads): <NEW_LINE> <INDENT> set_func = getattr(self._dynlib, "openblas_set_num_threads", lambda num_threads: None) <NEW_LINE> return set_func(num_threads) <NEW_LINE> <DEDENT> def _get_extra_info(self): <NEW_LINE> <INDENT> self.threading_layer = self.get_threading_layer() <NEW_LINE> <DEDENT> def get_threading_layer(self): <NEW_LINE> <INDENT> threading_layer = self._dynlib.openblas_get_parallel() <NEW_LINE> if threading_layer == 2: <NEW_LINE> <INDENT> return "openmp" <NEW_LINE> <DEDENT> elif threading_layer == 1: <NEW_LINE> <INDENT> return "pthreads" <NEW_LINE> <DEDENT> return "disabled" | Module class for OpenBLAS | 6259906f4e4d562566373c94 |
class HelloApiView(APIView): <NEW_LINE> <INDENT> serializer_class = serializers.HelloSerializer <NEW_LINE> def get(self, request, format=None): <NEW_LINE> <INDENT> an_apiview = [ 'Uses HTTP methods as function (get, post, patch, put, delete)', 'Is similar to a traditional Django View', 'Gives you the most control over your application logic', 'Is mapped manually to URLs', ] <NEW_LINE> return Response({'message': 'Hello!', 'an_apiview': an_apiview}) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> serializer = self.serializer_class(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> name = serializer.validated_data.get('name') <NEW_LINE> message = f'Hello {name}!' <NEW_LINE> return Response({'message': message}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST ) <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method': 'PUT'}) <NEW_LINE> <DEDENT> def patch(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method': 'PATCH'}) <NEW_LINE> <DEDENT> def delete(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method': 'DELETE'}) | Test API View | 6259906f627d3e7fe0e08715 |
class BackendAddressPool(SubResource): <NEW_LINE> <INDENT> _validation = { 'backend_ip_configurations': {'readonly': True}, 'load_balancing_rules': {'readonly': True}, 'outbound_nat_rule': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'}, 'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'}, 'outbound_nat_rule': {'key': 'properties.outboundNatRule', 'type': 'SubResource'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(BackendAddressPool, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = kwargs.get('etag', None) <NEW_LINE> self.backend_ip_configurations = None <NEW_LINE> self.load_balancing_rules = None <NEW_LINE> self.outbound_nat_rule = None <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) | Pool of backend IP addresses.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar backend_ip_configurations: Gets collection of references to IP addresses defined in
network interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2018_02_01.models.NetworkInterfaceIPConfiguration]
:ivar load_balancing_rules: Gets load balancing rules that use this backend address pool.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2018_02_01.models.SubResource]
:ivar outbound_nat_rule: Gets outbound rules that use this backend address pool.
:vartype outbound_nat_rule: ~azure.mgmt.network.v2018_02_01.models.SubResource
:param provisioning_state: Get provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str | 6259906f26068e7796d4e1c9 |
class InertiaEffect(Effect): <NEW_LINE> <INDENT> def __init__(self, player): <NEW_LINE> <INDENT> super().__init__(player) <NEW_LINE> self.name = 'inertia' <NEW_LINE> <DEDENT> def set_effect(self, up, down, left, right): <NEW_LINE> <INDENT> if up: <NEW_LINE> <INDENT> if self.player.onGround: <NEW_LINE> <INDENT> self.player.yvel -= 10 <NEW_LINE> <DEDENT> <DEDENT> if down: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if left: <NEW_LINE> <INDENT> self.player.xvel += -0.3 <NEW_LINE> <DEDENT> if right: <NEW_LINE> <INDENT> self.player.xvel += 0.3 <NEW_LINE> <DEDENT> if not self.player.onGround: <NEW_LINE> <INDENT> self.player.yvel += 0.3 <NEW_LINE> if self.player.yvel > self.player.max_vel: <NEW_LINE> <INDENT> self.player.yvel = self.player.max_vel <NEW_LINE> <DEDENT> <DEDENT> if self.player.xvel < 0: <NEW_LINE> <INDENT> self.player.xvel += 0.1 <NEW_LINE> <DEDENT> if self.player.xvel > 0: <NEW_LINE> <INDENT> self.player.xvel -= 0.1 <NEW_LINE> <DEDENT> if abs(self.player.xvel) < 0.1: <NEW_LINE> <INDENT> self.player.xvel = 0 <NEW_LINE> <DEDENT> <DEDENT> def update(self, x, y): <NEW_LINE> <INDENT> label = self.font.render(self.name, 1, (255, 0, 255)) <NEW_LINE> self.player.gamestate.screen.blit(label, (x, y)) | level effect | 6259906fa05bb46b3848bd73 |
class ExprAssign(Expr): <NEW_LINE> <INDENT> __slots__ = Expr.__slots__ + ["_dst", "_src"] <NEW_LINE> def __init__(self, dst, src): <NEW_LINE> <INDENT> assert isinstance(dst, Expr) <NEW_LINE> assert isinstance(src, Expr) <NEW_LINE> if dst.size != src.size: <NEW_LINE> <INDENT> raise ValueError( "sanitycheck: ExprAssign args must have same size! %s" % ([(str(arg), arg.size) for arg in [dst, src]])) <NEW_LINE> <DEDENT> super(ExprAssign, self).__init__(self.dst.size) <NEW_LINE> <DEDENT> dst = property(lambda self: self._dst) <NEW_LINE> src = property(lambda self: self._src) <NEW_LINE> def __reduce__(self): <NEW_LINE> <INDENT> state = self._dst, self._src <NEW_LINE> return self.__class__, state <NEW_LINE> <DEDENT> def __new__(cls, dst, src): <NEW_LINE> <INDENT> if dst.is_slice() and dst.arg.size == src.size: <NEW_LINE> <INDENT> new_dst, new_src = dst.arg, src <NEW_LINE> <DEDENT> elif dst.is_slice(): <NEW_LINE> <INDENT> new_dst = dst.arg <NEW_LINE> rest = [(ExprSlice(dst.arg, r[0], r[1]), r[0], r[1]) for r in dst.slice_rest()] <NEW_LINE> all_a = [(src, dst.start, dst.stop)] + rest <NEW_LINE> all_a.sort(key=lambda x: x[1]) <NEW_LINE> args = [expr for (expr, _, _) in all_a] <NEW_LINE> new_src = ExprCompose(*args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_dst, new_src = dst, src <NEW_LINE> <DEDENT> expr = Expr.get_object(cls, (new_dst, new_src)) <NEW_LINE> expr._dst, expr._src = new_dst, new_src <NEW_LINE> return expr <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s = %s" % (str(self._dst), str(self._src)) <NEW_LINE> <DEDENT> def get_w(self): <NEW_LINE> <INDENT> if isinstance(self._dst, ExprMem): <NEW_LINE> <INDENT> return set([self._dst]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._dst.get_w() <NEW_LINE> <DEDENT> <DEDENT> def _exprhash(self): <NEW_LINE> <INDENT> return hash((EXPRASSIGN, hash(self._dst), hash(self._src))) <NEW_LINE> <DEDENT> def _exprrepr(self): <NEW_LINE> <INDENT> return "%s(%r, %r)" % (self.__class__.__name__, self._dst, self._src) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return ExprAssign(self._dst.copy(), self._src.copy()) <NEW_LINE> <DEDENT> def depth(self): <NEW_LINE> <INDENT> return max(self._src.depth(), self._dst.depth()) + 1 <NEW_LINE> <DEDENT> def graph_recursive(self, graph): <NEW_LINE> <INDENT> graph.add_node(self) <NEW_LINE> for arg in [self._src, self._dst]: <NEW_LINE> <INDENT> arg.graph_recursive(graph) <NEW_LINE> graph.add_uniq_edge(self, arg) <NEW_LINE> <DEDENT> <DEDENT> def is_aff(self): <NEW_LINE> <INDENT> warnings.warn('DEPRECATION WARNING: use is_assign()') <NEW_LINE> return True <NEW_LINE> <DEDENT> def is_assign(self): <NEW_LINE> <INDENT> return True | An ExprAssign represent an assignment from an Expression to another one.
Some use cases:
- var1 <- 2 | 6259906ff9cc0f698b1c5f11 |
class SkodaSensor(SkodaEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> _LOGGER.debug('Getting state of %s' % self.instrument.attr) <NEW_LINE> return self.instrument.state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self.instrument.unit | Representation of a Skoda Carnet Sensor. | 6259906f4c3428357761bb41 |
class PlmVectorFieldAnalysisLogic(ScriptedLoadableModuleLogic): <NEW_LINE> <INDENT> def hasImageData(self,volumeNode): <NEW_LINE> <INDENT> if not volumeNode: <NEW_LINE> <INDENT> print('no volume node') <NEW_LINE> return False <NEW_LINE> <DEDENT> if volumeNode.GetImageData() == None: <NEW_LINE> <INDENT> print('no image data') <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def RunVectorFieldAnalysis(self, inputWLIO): <NEW_LINE> <INDENT> import os, sys, vtk <NEW_LINE> loadablePath = os.path.join(slicer.modules.plastimatch_slicer_bspline.path,'..'+os.sep+'..'+os.sep+'qt-loadable-modules') <NEW_LINE> if loadablePath not in sys.path: <NEW_LINE> <INDENT> sys.path.append(loadablePath) <NEW_LINE> <DEDENT> import vtkSlicerPlastimatchPyModuleLogicPython <NEW_LINE> reg = vtkSlicerPlastimatchPyModuleLogicPython.vtkSlicerPlastimatchPyModuleLogic() <NEW_LINE> reg.SetMRMLScene(slicer.mrmlScene) <NEW_LINE> print("running RunVectorFieldAnalysis") <NEW_LINE> reg.SetOutputVolumeID(inputWLIO.outputJacobianNode.GetID() ) <NEW_LINE> reg.SetFixedImageID(inputWLIO.fixedImageNode.GetID() ) <NEW_LINE> reg.SetVFImageID(inputWLIO.vfImageNode.GetID() ) <NEW_LINE> print(inputWLIO.vfImageNode.GetID()) <NEW_LINE> print(inputWLIO.outputJacobianNode.GetID()) <NEW_LINE> print(inputWLIO.fixedImageNode.GetID()) <NEW_LINE> print("starting RunJacobian") <NEW_LINE> reg.RunJacobian() <NEW_LINE> print("control went past RunJacobian") <NEW_LINE> outputWLIO = PlmVectorFieldAnalysisWidgetLogicIO() <NEW_LINE> outputWLIO.SetMinJacobian(reg.GetJacobianMinString()) <NEW_LINE> outputWLIO.SetMaxJacobian(reg.GetJacobianMaxString()) <NEW_LINE> return outputWLIO | This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget | 6259906f3539df3088ecdb2a |
class CardFactory(factory.django.DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Card <NEW_LINE> <DEDENT> assigned_to = factory.SubFactory(UserFactory) <NEW_LINE> category = factory.SubFactory(CategoryFactory) <NEW_LINE> title = factory.Faker('word') <NEW_LINE> description = factory.Faker('paragraph') | Create a test card for writing tests. | 6259906f7b180e01f3e49cab |
class Material(Parameters): <NEW_LINE> <INDENT> pass | Material specific parameters.
Inherits from :class:`parameters.Parameters`. | 6259906ff548e778e596ce1b |
class update_detail_view_CBV_mixin(JsonResponseMixin, View): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> data = { "count": 1000, "content": "using django JsonResponse with CBV" } <NEW_LINE> return self.render_to_json_response(data) | using django JsonResponse with CBV with mixin | 6259906f99fddb7c1ca63a19 |
class AbinsCASTEPIsotopes(stresstesting.MantidStressTest, HelperTestingClass): <NEW_LINE> <INDENT> tolerance = None <NEW_LINE> ref_result = None <NEW_LINE> def runTest(self): <NEW_LINE> <INDENT> HelperTestingClass.__init__(self) <NEW_LINE> name = "LiOH_H2O_2D2O_CASTEP" <NEW_LINE> self.ref_result = name + ".nxs" <NEW_LINE> self.set_ab_initio_program("CASTEP") <NEW_LINE> self.set_name(name) <NEW_LINE> self.set_order(AbinsConstants.QUANTUM_ORDER_ONE) <NEW_LINE> self.set_cross_section(cross_section="Incoherent") <NEW_LINE> self.set_bin_width(width=2.0) <NEW_LINE> self.case_from_scratch() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> self.tolerance = 1e-2 <NEW_LINE> return self._output_name, self.ref_result | In this benchmark it is tested if calculation of the system with isotopic substitutions: H -> 2H, Li -> 7Li,
produces correct results. Input data is generated by CASTEP. This system test should be fast so no need for
excludeInPullRequests flag. | 6259906f4f88993c371f1167 |
class SshfsAccess: <NEW_LINE> <INDENT> def name(self): <NEW_LINE> <INDENT> return 'sshfs' <NEW_LINE> <DEDENT> def make_available(self, user, server, remote_path, path): <NEW_LINE> <INDENT> if os.path.ismount(path): <NEW_LINE> <INDENT> return 201 <NEW_LINE> <DEDENT> if len(os.listdir(path)) > 0: <NEW_LINE> <INDENT> print("Error: %s has to be empty, otherwise mounting impossible." % path) <NEW_LINE> sys.exit(41) <NEW_LINE> <DEDENT> return execute('sshfs {}@{}:{} {}'.format(user, server, remote_path, path)) <NEW_LINE> <DEDENT> def make_unavailable(self, path): <NEW_LINE> <INDENT> execute('fusermount -u {}'.format(path), raise_on_error=True) | This class mounts and umounts the remote files using sshfs. This will
work on any system that fuse runs on, namely GNU/Linux, FreeBSD and Mac. | 6259906f67a9b606de5476ea |
class BootstrapQCPostProcessor(BootstrapPlugin): <NEW_LINE> <INDENT> def on_initial_bootstrap(self, process, config, **kwargs): <NEW_LINE> <INDENT> return <NEW_LINE> if os.environ.get('PYCC_MODE'): <NEW_LINE> <INDENT> log.info('PYCC_MODE: skipping qc_post_processor launch') <NEW_LINE> return <NEW_LINE> <DEDENT> if self.process_exists(process, 'qc_post_processor'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.scheduler_service = SchedulerServiceProcessClient(process=process) <NEW_LINE> self.process_dispatcher = ProcessDispatcherServiceProcessClient(process=process) <NEW_LINE> self.run_interval = CFG.get_safe('service.qc_processing.run_interval', 24) <NEW_LINE> interval_key = uuid4().hex <NEW_LINE> config = DotDict() <NEW_LINE> config.process.interval_key = interval_key <NEW_LINE> process_definition = ProcessDefinition(name='qc_post_processor', executable={'module':'ion.processes.data.transforms.qc_post_processing', 'class':'QCPostProcessing'}) <NEW_LINE> process_definition_id = self.process_dispatcher.create_process_definition(process_definition) <NEW_LINE> process_id = self.process_dispatcher.create_process(process_definition_id) <NEW_LINE> self.process_dispatcher.schedule_process(process_definition_id, process_id=process_id, configuration=config) <NEW_LINE> timer_id = self.scheduler_service.create_interval_timer(start_time=str(time.time()), end_time='-1', interval=3600*self.run_interval, event_origin=interval_key) <NEW_LINE> <DEDENT> def process_exists(self, process, name): <NEW_LINE> <INDENT> proc_ids, meta = process.container.resource_registry.find_resources(restype=RT.Process, id_only=True) <NEW_LINE> return any([name in p['name'] for p in meta if p['name']]) | Sets up one QC Post Processing worker and initiates
the Scheduler Service's interval every 24 hours. | 6259906fe5267d203ee6d004 |
class ThreadLocals(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> _thread_locals.sarvuser = getattr(request, 'sarvuser', None) | Middleware that gets various objects from the
request object and saves them in thread local storage. | 6259906fcc0a2c111447c718 |
class BroadcastCmdResponse(Base): <NEW_LINE> <INDENT> def __init__(self, msg, callback, on_done=None, num_retry=3): <NEW_LINE> <INDENT> super().__init__(on_done, num_retry) <NEW_LINE> self.addr = msg.to_addr <NEW_LINE> self.cmd = msg.cmd1 <NEW_LINE> self.callback = callback <NEW_LINE> self._device_ACK = False <NEW_LINE> <DEDENT> def msg_received(self, protocol, msg): <NEW_LINE> <INDENT> if not self._PLM_sent: <NEW_LINE> <INDENT> return Msg.UNKNOWN <NEW_LINE> <DEDENT> if isinstance(msg, Msg.OutStandard): <NEW_LINE> <INDENT> if msg.to_addr == self.addr and msg.cmd1 == self.cmd: <NEW_LINE> <INDENT> if not msg.is_ack: <NEW_LINE> <INDENT> LOG.warning("%s PLM NAK response", self.addr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.debug("%s got PLM ACK", self.addr) <NEW_LINE> self._PLM_ACK = True <NEW_LINE> <DEDENT> return Msg.CONTINUE <NEW_LINE> <DEDENT> LOG.debug("%s handler unknown msg", self.addr) <NEW_LINE> return Msg.UNKNOWN <NEW_LINE> <DEDENT> elif (isinstance(msg, Msg.InpStandard) and msg.flags.type != Msg.Flags.Type.BROADCAST and self._PLM_ACK): <NEW_LINE> <INDENT> if msg.from_addr != self.addr or msg.cmd1 != self.cmd: <NEW_LINE> <INDENT> return Msg.UNKNOWN <NEW_LINE> <DEDENT> if msg.flags.type == Msg.Flags.Type.DIRECT_ACK: <NEW_LINE> <INDENT> LOG.info("%s device ACK response, waiting for broadcast " "payload", msg.from_addr) <NEW_LINE> self._device_ACK = True <NEW_LINE> return Msg.CONTINUE <NEW_LINE> <DEDENT> elif msg.flags.type == Msg.Flags.Type.DIRECT_NAK: <NEW_LINE> <INDENT> if msg.cmd2 == msg.NakType.PRE_NAK: <NEW_LINE> <INDENT> LOG.warning("%s Pre-NAK: %s, Message: %s", msg.from_addr, msg.nak_str(), msg) <NEW_LINE> return Msg.CONTINUE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.error("%s device NAK error: %s", msg.from_addr, msg) <NEW_LINE> self.on_done(False, "Device command NAK", None) <NEW_LINE> return Msg.FINISHED <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> LOG.warning("%s device unexpected msg: %s", msg.from_addr, msg) <NEW_LINE> return Msg.UNKNOWN <NEW_LINE> <DEDENT> <DEDENT> elif (isinstance(msg, Msg.InpStandard) and msg.flags.type == Msg.Flags.Type.BROADCAST and self._PLM_ACK and self._device_ACK): <NEW_LINE> <INDENT> if msg.from_addr == self.addr: <NEW_LINE> <INDENT> self.callback(msg, on_done=self.on_done) <NEW_LINE> return Msg.FINISHED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.info("Possible unexpected broadcast message from %s", msg.from_addr) <NEW_LINE> <DEDENT> <DEDENT> return Msg.UNKNOWN | Handles Broadcast Messages Received in Response to a Direct Request`.
This class handles responses from the device where the device sends an
ACK but a subsequent broadcast message is sent with the requested payload.
The handler watches for the proper PLM ACK, followed by a standard length
ACK from the device, and then only after these two prior ACKs have been
received, will it call the callback with a broadcast message received. | 6259906fadb09d7d5dc0bdfa |
class PrincipalPassword(form.EditForm): <NEW_LINE> <INDENT> interface.implements(IPrincipalPasswordForm, IPersonalPasswordForm) <NEW_LINE> ignoreContext = True <NEW_LINE> label = _('Change password') <NEW_LINE> fields = field.Fields(SChangePasswordForm, SPasswordForm) <NEW_LINE> def update(self, *args, **kw): <NEW_LINE> <INDENT> principal = self.context.user <NEW_LINE> info = IUserInfo(principal) <NEW_LINE> self.principal_login = info.login <NEW_LINE> self.principal_title = info.fullname <NEW_LINE> return super(PrincipalPassword, self).update() <NEW_LINE> <DEDENT> @button.buttonAndHandler(_(u"Change password")) <NEW_LINE> def applyChanges(self, action): <NEW_LINE> <INDENT> data, errors = self.extractData() <NEW_LINE> if errors: <NEW_LINE> <INDENT> self.status = self.formErrorsMessage <NEW_LINE> <DEDENT> elif data['password']: <NEW_LINE> <INDENT> self.context.changePassword(data['password']) <NEW_LINE> self.status = _('Password has been changed for ${title}.', mapping = {'title': self.principal_title}) | change password form | 6259906f38b623060ffaa49b |
class Nalu(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://github.com/NaluCFD/Nalu" <NEW_LINE> git = "https://github.com/NaluCFD/Nalu.git" <NEW_LINE> version('master', branch='master') <NEW_LINE> variant('shared', default=(sys.platform != 'darwin'), description='Build dependencies as shared libraries') <NEW_LINE> variant('pic', default=True, description='Position independent code') <NEW_LINE> variant('tioga', default=False, description='Compile with Tioga support') <NEW_LINE> depends_on('mpi') <NEW_LINE> depends_on('[email protected]:', when='+shared') <NEW_LINE> depends_on('[email protected]:', when='~shared') <NEW_LINE> depends_on('trilinos+mpi+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+zlib+shards~hypre@master') <NEW_LINE> depends_on('trilinos~shared', when='~shared') <NEW_LINE> depends_on('tioga', when='+tioga+shared') <NEW_LINE> depends_on('tioga~shared', when='+tioga~shared') <NEW_LINE> def cmake_args(self): <NEW_LINE> <INDENT> spec = self.spec <NEW_LINE> options = [] <NEW_LINE> options.extend([ '-DTrilinos_DIR:PATH=%s' % spec['trilinos'].prefix, '-DYAML_DIR:PATH=%s' % spec['yaml-cpp'].prefix, '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc, '-DMPI_C_COMPILER=%s' % spec['mpi'].mpicc, '-DMPI_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DMPI_Fortran_COMPILER=%s' % spec['mpi'].mpifc, self.define_from_variant('CMAKE_POSITION_INDEPENDENT_CODE', 'pic'), ]) <NEW_LINE> if '+tioga' in spec: <NEW_LINE> <INDENT> options.extend([ '-DENABLE_TIOGA:BOOL=ON', '-DTIOGA_DIR:PATH=%s' % spec['tioga'].prefix ]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> options.append('-DENABLE_TIOGA:BOOL=OFF') <NEW_LINE> <DEDENT> if 'darwin' in spec.architecture: <NEW_LINE> <INDENT> options.append('-DCMAKE_MACOSX_RPATH:BOOL=ON') <NEW_LINE> <DEDENT> return options | Nalu: a generalized unstructured massively parallel low Mach flow code
designed to support a variety of energy applications of interest
built on the Sierra Toolkit and Trilinos solver Tpetra/Epetra stack | 6259906f66673b3332c31c8d |
class MinAvgTwoSliceTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_min_avg(self): <NEW_LINE> <INDENT> self.assertEqual(solution(A), S) | The output from solution() must be equal to S | 6259906f167d2b6e312b81d6 |
class AttributeSearch(models.Model): <NEW_LINE> <INDENT> __metaclass__ = TransMeta <NEW_LINE> name = models.CharField(_('Name'), max_length=64) <NEW_LINE> categ = models.ManyToManyField('ProductCategory', null=True, blank=True, related_name='attribute_search_set', verbose_name=_('Categories')) <NEW_LINE> price = models.BooleanField(_('Price'), default=True, help_text=_("Available Price Search")) <NEW_LINE> active = models.BooleanField(_('Active'), default=True) <NEW_LINE> default_box = models.BooleanField(_('Default'), default=False, help_text=_("Default Attribute Search Box. If you don't select some category, use this search box")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'attribute_search' <NEW_LINE> verbose_name = _('Attribute Search') <NEW_LINE> verbose_name_plural = _('Attributes Search') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> super(AttributeSearch, self).save() <NEW_LINE> current = 10 <NEW_LINE> for item in AttributeSearchItem.objects.filter(attribute=self).order_by('order'): <NEW_LINE> <INDENT> item.order = current <NEW_LINE> item.save() <NEW_LINE> current += 10 | Attribute Search | 6259906f097d151d1a2c2900 |
class FormatFlowedDecoder: <NEW_LINE> <INDENT> def __init__(self, delete_space=False, character_set='us-ascii', error_handling='strict'): <NEW_LINE> <INDENT> self.delete_space = delete_space <NEW_LINE> self.character_set = character_set <NEW_LINE> self.error_handling = error_handling <NEW_LINE> <DEDENT> def _stripquotes(self, line): <NEW_LINE> <INDENT> stripped = line.lstrip('>') <NEW_LINE> return len(line) - len(stripped), stripped <NEW_LINE> <DEDENT> def _stripstuffing(self, line): <NEW_LINE> <INDENT> if line.startswith(' '): <NEW_LINE> <INDENT> return line[1:] <NEW_LINE> <DEDENT> return line <NEW_LINE> <DEDENT> def _stripflow(self, line): <NEW_LINE> <INDENT> if self.delete_space and line.endswith(' '): <NEW_LINE> <INDENT> return line[:-1] <NEW_LINE> <DEDENT> return line <NEW_LINE> <DEDENT> def decode(self, flowed): <NEW_LINE> <INDENT> para = '' <NEW_LINE> pinfo = {'type': PARAGRAPH} <NEW_LINE> for line in flowed.split(b'\r\n'): <NEW_LINE> <INDENT> line = line.decode(self.character_set, self.error_handling) <NEW_LINE> quotedepth, line = self._stripquotes(line) <NEW_LINE> line = self._stripstuffing(line) <NEW_LINE> if line == '-- ': <NEW_LINE> <INDENT> if para: <NEW_LINE> <INDENT> yield (pinfo, para) <NEW_LINE> pinfo = {'type': PARAGRAPH} <NEW_LINE> para = '' <NEW_LINE> <DEDENT> yield ({'type': SIGNATURE_SEPARATOR, 'quotedepth': quotedepth}, line) <NEW_LINE> continue <NEW_LINE> <DEDENT> if line.endswith(' '): <NEW_LINE> <INDENT> if quotedepth != pinfo.get('quotedepth', quotedepth): <NEW_LINE> <INDENT> yield (pinfo, para) <NEW_LINE> pinfo = {'type': PARAGRAPH} <NEW_LINE> para = '' <NEW_LINE> <DEDENT> para += self._stripflow(line) <NEW_LINE> pinfo['quotedepth'] = quotedepth <NEW_LINE> continue <NEW_LINE> <DEDENT> if para: <NEW_LINE> <INDENT> if quotedepth != pinfo.get('quotedepth', quotedepth): <NEW_LINE> <INDENT> yield (pinfo, para) <NEW_LINE> pinfo = {'type': PARAGRAPH} <NEW_LINE> para = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield (pinfo, para + line) <NEW_LINE> pinfo = {'type': PARAGRAPH} <NEW_LINE> para = '' <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> yield ({'type': FIXED, 'quotedepth': quotedepth}, line) <NEW_LINE> <DEDENT> if para: <NEW_LINE> <INDENT> yield (pinfo, para) | Object for converting a format=flowed bytestring to other formats
The following instance attributes influence the interpretation of
format=flowed bytestring:
delete_space (default: False)
Delete the trailing space before the CRLF on flowed lines before
interpreting the line on flowed input, corresponds to the DelSp mime
parameter
character_set (default: us-ascii)
The text encoding for the data. Text is decoded to unicode using this
encoding, using the error handling scheme specified below.
error_handling (default: strict)
The error handling scheme used when decoding the text. | 6259906f7047854f46340c47 |
class StrEnum(str, Enum): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.value) | Base class for str enums. | 6259906f23849d37ff852945 |
class ConcreteDiGraph(DiGraph): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.nodes = [] <NEW_LINE> self.edges = dict() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = "Di-graph with %d nodes." % len(self.nodes) <NEW_LINE> for node, outs in list(self.edges.items()): <NEW_LINE> <INDENT> node_str = "Node %s with outward edges:\n" % str(node) <NEW_LINE> node_str += "\n".join([str(edge) for edge in outs]) <NEW_LINE> result += ("\n" + node_str) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def addNode(self, node): <NEW_LINE> <INDENT> self.nodes.append(node) <NEW_LINE> assert node not in self.edges <NEW_LINE> self.edges[node] = [] <NEW_LINE> assert node.parent == self <NEW_LINE> <DEDENT> def addEdge(self, edge): <NEW_LINE> <INDENT> self.edges[edge.source].append(edge) <NEW_LINE> <DEDENT> def getNodes(self): <NEW_LINE> <INDENT> return self.nodes <NEW_LINE> <DEDENT> def getOutEdges(self, node): <NEW_LINE> <INDENT> return self.edges[node] | Represents a directed graph as a list of nodes and lists of edges. | 6259906f7c178a314d78e833 |
class AboutSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = About <NEW_LINE> fields = '__all__' | about serializer | 6259906fdd821e528d6da5c9 |
class Solution_Multiple_Source: <NEW_LINE> <INDENT> def wallsAndGates(self, rooms): <NEW_LINE> <INDENT> if not rooms or not rooms[0]: <NEW_LINE> <INDENT> return rooms <NEW_LINE> <DEDENT> queue = deque() <NEW_LINE> for row in range(len(rooms)): <NEW_LINE> <INDENT> for col in range(len(rooms[0])): <NEW_LINE> <INDENT> if rooms[row][col] == 0: <NEW_LINE> <INDENT> queue.append((row, col)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> while queue: <NEW_LINE> <INDENT> cx, cy = queue.popleft() <NEW_LINE> dx = [0, 0, -1, 1] <NEW_LINE> dy = [1, -1, 0, 0] <NEW_LINE> for nx, ny in zip(dx, dy): <NEW_LINE> <INDENT> nx += cx <NEW_LINE> ny += cy <NEW_LINE> if 0 <= nx < len(rooms) and 0 <= ny < len(rooms[0]) and rooms[nx][ny] == 2147483647: <NEW_LINE> <INDENT> rooms[nx][ny] = rooms[cx][cy] + 1 <NEW_LINE> queue.append((nx, ny)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return rooms | @param rooms: m x n 2D grid
@return: nothing | 6259906f4a966d76dd5f077a |
class Sound(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def generate(self, fs, d): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def save_to_wav(self, fs, d): <NEW_LINE> <INDENT> x = self.generate(fs, d) <NEW_LINE> dir_name = 'sound_generation/generated_sounds/' <NEW_LINE> if not os.path.exists(dir_name): <NEW_LINE> <INDENT> os.makedirs(dir_name) <NEW_LINE> <DEDENT> wavfile.write(dir_name + str(self), fs, x) | Abstract class for all modules creating sound_generation | 6259906f2ae34c7f260ac979 |
class CeleryLoader(BaseLoader): <NEW_LINE> <INDENT> def now(self): <NEW_LINE> <INDENT> return timezone.now() <NEW_LINE> <DEDENT> def read_configuration(self): <NEW_LINE> <INDENT> self.configured = True <NEW_LINE> return settings.CELERY_SETTINGS <NEW_LINE> <DEDENT> def on_worker_init(self): <NEW_LINE> <INDENT> if settings.DEBUG: <NEW_LINE> <INDENT> import warnings <NEW_LINE> warnings.warn("Using settings.DEBUG leads to a memory leak, never " "use this setting in production environments!") <NEW_LINE> <DEDENT> self.import_default_modules() <NEW_LINE> <DEDENT> def import_default_modules(self): <NEW_LINE> <INDENT> super(CeleryLoader, self).import_default_modules() <NEW_LINE> self.autodiscover() <NEW_LINE> <DEDENT> def autodiscover(self): <NEW_LINE> <INDENT> apps.populate(["theory.apps",]) <NEW_LINE> cmdImportPath = [ cmd.moduleImportPath for cmd in Command.objects.only('app', 'name'). filter(runMode=Command.RUN_MODE_ASYNC) ] <NEW_LINE> for path in cmdImportPath: <NEW_LINE> <INDENT> importModule(path) <NEW_LINE> <DEDENT> import theory.apps.command.baseCommand <NEW_LINE> self.task_modules.update(["theory.apps.command.baseCommand"]) | Modified from celery default loader and django-celery DjangoLoader | 6259906f009cb60464d02dc7 |
class UpdateProjectCardInput(sgqlc.types.Input): <NEW_LINE> <INDENT> __schema__ = github_schema <NEW_LINE> __field_names__ = ('project_card_id', 'is_archived', 'note', 'client_mutation_id') <NEW_LINE> project_card_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name='projectCardId') <NEW_LINE> is_archived = sgqlc.types.Field(Boolean, graphql_name='isArchived') <NEW_LINE> note = sgqlc.types.Field(String, graphql_name='note') <NEW_LINE> client_mutation_id = sgqlc.types.Field(String, graphql_name='clientMutationId') | Autogenerated input type of UpdateProjectCard | 6259906f4f88993c371f1168 |
class BuildConfig(CMakeProjectConfig): <NEW_LINE> <INDENT> DEFAULT_NAME = "default" <NEW_LINE> CMAKE_BUILD_TYPE_AUTO_DETECT = True <NEW_LINE> def __init__(self, name=None, data=None, **kwargs): <NEW_LINE> <INDENT> CMakeProjectConfig.__init__(self, data, **kwargs) <NEW_LINE> self.name = name or self.DEFAULT_NAME <NEW_LINE> self._cmake_build_type = self.cmake_build_type <NEW_LINE> self.derive_cmake_build_type_if_unconfigured() <NEW_LINE> <DEDENT> def derive_cmake_build_type(self): <NEW_LINE> <INDENT> return map_build_config_to_cmake_build_type(self.name) <NEW_LINE> <DEDENT> def derive_cmake_build_type_and_assign(self, force=False): <NEW_LINE> <INDENT> if force or self.CMAKE_BUILD_TYPE_AUTO_DETECT: <NEW_LINE> <INDENT> self.cmake_build_type = self.derive_cmake_build_type() <NEW_LINE> <DEDENT> return self.cmake_build_type <NEW_LINE> <DEDENT> def derive_cmake_build_type_if_unconfigured(self): <NEW_LINE> <INDENT> not_configured = not self._cmake_build_type <NEW_LINE> if not_configured: <NEW_LINE> <INDENT> self.derive_cmake_build_type_and_assign() <NEW_LINE> <DEDENT> return self.cmake_build_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def cmake_build_type(self): <NEW_LINE> <INDENT> return self.cmake_defines.get("CMAKE_BUILD_TYPE", None) <NEW_LINE> <DEDENT> @cmake_build_type.setter <NEW_LINE> def cmake_build_type(self, value): <NEW_LINE> <INDENT> self.cmake_defines["CMAKE_BUILD_TYPE"] = value <NEW_LINE> self._cmake_build_type = value | Represent the configuration data related to a build-configuration.
.. code-block:: YAML
# -- FILE: cmake_build.yaml
...
build_configs:
- Linux_arm64_Debug:
cmake_build_type: Debug
cmake_toolchain: cmake/toolchain/linux_gcc_arm64.cmake
cmake_defines:
- CMAKE_BUILD_TYPE=Debug
- cmake_init_args: --warn-uninitialized --check-system-vars
- Linux_arm64_Release:
cmake_build_type: MinSizeRel
cmake_toolchain: cmake/toolchain/linux_gcc_arm64.cmake
cmake_generator: ninja | 6259906f7b25080760ed892b |
class OrderAsk(Order): <NEW_LINE> <INDENT> pass | AKA sell | 6259906f4428ac0f6e659dc4 |
class AutoConnectEngine(BaseProxyEngine): <NEW_LINE> <INDENT> def __init__(self, dburi, **kwargs): <NEW_LINE> <INDENT> BaseProxyEngine.__init__(self) <NEW_LINE> self.dburi = dburi <NEW_LINE> self.kwargs = kwargs <NEW_LINE> self._engine = None <NEW_LINE> <DEDENT> def get_engine(self): <NEW_LINE> <INDENT> if self._engine is None: <NEW_LINE> <INDENT> if callable(self.dburi): <NEW_LINE> <INDENT> dburi = self.dburi() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dburi = self.dburi <NEW_LINE> <DEDENT> self._engine = create_engine(dburi, **self.kwargs) <NEW_LINE> <DEDENT> return self._engine | An SQLEngine proxy that automatically connects when necessary. | 6259906fd268445f2663a7a5 |
class check_dep(Command): <NEW_LINE> <INDENT> description = "Check if dependencies are installed" <NEW_LINE> user_options = [] <NEW_LINE> test_commands = {} <NEW_LINE> def initialize_options(self): pass <NEW_LINE> def finalize_options(self): pass <NEW_LINE> def run(self): <NEW_LINE> <INDENT> packages = self.distribution.install_requires <NEW_LINE> for v in self.distribution.extras_require.values(): <NEW_LINE> <INDENT> if isinstance(v, list): packages.extend(v) <NEW_LINE> else: packages.append(v) <NEW_LINE> <DEDENT> sep=['>', '<', '>=', '<=', '=='] <NEW_LINE> found = [] <NEW_LINE> notfound=[] <NEW_LINE> for package in packages: <NEW_LINE> <INDENT> for s in sep: <NEW_LINE> <INDENT> idx = package.find(s) <NEW_LINE> if idx > -1: <NEW_LINE> <INDENT> package=package[:idx] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> try: mod = __import__(package) <NEW_LINE> except: notfound.append(package) <NEW_LINE> else: found.append((package, mod)) <NEW_LINE> <DEDENT> for package in notfound: <NEW_LINE> <INDENT> log.info("Checking for %s: not found"%(package)) <NEW_LINE> <DEDENT> log.info('---') <NEW_LINE> for package, mod in found: <NEW_LINE> <INDENT> version = ' - '+mod.__version__ if hasattr(mod, '__version__') else '' <NEW_LINE> log.info("Checking for %s: found%s"%(package, version)) <NEW_LINE> <DEDENT> sys.exit(len(notfound)) | Check if the dependencies listed in `install_requires` and `extras_require`
are currently installed and on the Python path. | 6259906f16aa5153ce401d6a |
class TestUserListTestCase(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.url = reverse('user-list') <NEW_LINE> self.user_data = {'username': 'test', 'password': 'test'} <NEW_LINE> <DEDENT> def test_post_request_with_no_data_fails(self): <NEW_LINE> <INDENT> response = self.client.post(self.url, {}) <NEW_LINE> eq_(response.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_post_request_with_valid_data_succeeds(self): <NEW_LINE> <INDENT> response = self.client.post(self.url, self.user_data) <NEW_LINE> eq_(response.status_code, status.HTTP_201_CREATED) <NEW_LINE> user = User.objects.get(pk=response.data.get('id')) <NEW_LINE> eq_(user.username, self.user_data.get('username')) <NEW_LINE> ok_(check_password(self.user_data.get('password'), user.password)) | Tests /users list operations. | 6259906f4e4d562566373c97 |
class Store(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def current_appliance(self): <NEW_LINE> <INDENT> from utils import appliance <NEW_LINE> return appliance.current_appliance <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.config = None <NEW_LINE> self.session = None <NEW_LINE> self.parallelizer_role = None <NEW_LINE> self._terminalreporter = None <NEW_LINE> self.ssh_clients_to_close = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_config(self): <NEW_LINE> <INDENT> return self.config is not None <NEW_LINE> <DEDENT> @property <NEW_LINE> def base_url(self): <NEW_LINE> <INDENT> return self.current_appliance.url <NEW_LINE> <DEDENT> def _maybe_get_plugin(self, name): <NEW_LINE> <INDENT> return self.pluginmanager and self.pluginmanager.getplugin(name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def in_pytest_session(self): <NEW_LINE> <INDENT> return self.session is not None <NEW_LINE> <DEDENT> @property <NEW_LINE> def fixturemanager(self): <NEW_LINE> <INDENT> return self.session and self.session._fixturemanager <NEW_LINE> <DEDENT> @property <NEW_LINE> def capturemanager(self): <NEW_LINE> <INDENT> return self._maybe_get_plugin('capturemanager') <NEW_LINE> <DEDENT> @property <NEW_LINE> def pluginmanager(self): <NEW_LINE> <INDENT> return self.config and self.config.pluginmanager <NEW_LINE> <DEDENT> @property <NEW_LINE> def terminalreporter(self): <NEW_LINE> <INDENT> if self._terminalreporter is not None: <NEW_LINE> <INDENT> return self._terminalreporter <NEW_LINE> <DEDENT> reporter = self._maybe_get_plugin('terminalreporter') <NEW_LINE> if reporter and isinstance(reporter, TerminalReporter): <NEW_LINE> <INDENT> self._terminalreporter = reporter <NEW_LINE> return reporter <NEW_LINE> <DEDENT> return FlexibleTerminalReporter(self.config) <NEW_LINE> <DEDENT> @property <NEW_LINE> def terminaldistreporter(self): <NEW_LINE> <INDENT> return self._maybe_get_plugin('terminaldistreporter') <NEW_LINE> <DEDENT> @property <NEW_LINE> def parallel_session(self): <NEW_LINE> <INDENT> return self._maybe_get_plugin('parallel_session') <NEW_LINE> <DEDENT> @property <NEW_LINE> def slave_manager(self): <NEW_LINE> <INDENT> return self._maybe_get_plugin('slave_manager') <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def my_ip_address(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.environ['CFME_MY_IP_ADDRESS'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self.current_appliance.ssh_client.client_address() <NEW_LINE> <DEDENT> <DEDENT> def write_line(self, line, **kwargs): <NEW_LINE> <INDENT> return write_line(line, **kwargs) | pytest object store
If a property isn't available for any reason (including being accessed outside of a pytest run),
it will be None. | 6259906faad79263cf430046 |
class NeedsStart(object): <NEW_LINE> <INDENT> started = False <NEW_LINE> def prepare(self): <NEW_LINE> <INDENT> if self.api.prepare(): <NEW_LINE> <INDENT> if not self.started: <NEW_LINE> <INDENT> self.Start() <NEW_LINE> <DEDENT> return self.started <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def Start(self): <NEW_LINE> <INDENT> if self.started: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.api.prepare(): <NEW_LINE> <INDENT> self.started = self.cls.Start(self) <NEW_LINE> if self.started: <NEW_LINE> <INDENT> self.api.started.append(self) <NEW_LINE> <DEDENT> return self.started <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> start = Start <NEW_LINE> def Finish(self): <NEW_LINE> <INDENT> if self.started: <NEW_LINE> <INDENT> self.cls.Finish(self) <NEW_LINE> self.started = False <NEW_LINE> if self in self.api.started: <NEW_LINE> <INDENT> self.api.started.remove(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finish = Finish | This mixin enforces Start/Finish routines | 6259906f5166f23b2e244c64 |
class PackagePathEntry(Fixture): <NEW_LINE> <INDENT> def __init__(self, packagename, directory): <NEW_LINE> <INDENT> self.packagename = packagename <NEW_LINE> self.directory = directory <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> Fixture.setUp(self) <NEW_LINE> path = sys.modules[self.packagename].__path__ <NEW_LINE> if self.directory in path: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.addCleanup(path.remove, self.directory) <NEW_LINE> path.append(self.directory) | Add a path to the path of a python package.
The python package needs to be already imported.
If this new path is already in the packages __path__ list then the __path__
list will not be altered. | 6259906f1f5feb6acb164482 |
class CLI(object): <NEW_LINE> <INDENT> POLL_INTERVAL = int(os.environ.get('POLL_INTERVAL', 5)) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.cgc = ambassador.cgc.ticlient.TiClient.from_env() <NEW_LINE> self.notifier = Notifier(tries_threshold=3) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while not self.cgc.ready(): <NEW_LINE> <INDENT> self.notifier.api_is_down() <NEW_LINE> time.sleep(self.POLL_INTERVAL) <NEW_LINE> <DEDENT> self.notifier.api_is_up() <NEW_LINE> status_retriever = StatusRetriever(self.cgc) <NEW_LINE> status_retriever.run() <NEW_LINE> LOG.info("Round #%d", status_retriever.current_round.num) <NEW_LINE> ConsensusEvaluationRetriever(self.cgc, status_retriever.current_round).run() <NEW_LINE> if not status_retriever.current_round.is_ready(): <NEW_LINE> <INDENT> status_retriever.current_round.ready() <NEW_LINE> <DEDENT> FeedbackRetriever(self.cgc, status_retriever.current_round).run() <NEW_LINE> CBSubmitter(self.cgc, status_retriever.current_round).run() <NEW_LINE> POVSubmitter(self.cgc).run() <NEW_LINE> <DEDENT> except ambassador.cgc.tierror.TiError: <NEW_LINE> <INDENT> self.notifier.api_is_down() <NEW_LINE> <DEDENT> <DEDENT> return 0 | A docstring | 6259906f97e22403b383c794 |
class Article2Tag(models.Model): <NEW_LINE> <INDENT> nid = models.AutoField(primary_key=True) <NEW_LINE> article = models.ForeignKey(to='Article', to_field='nid') <NEW_LINE> tag = models.ForeignKey(to='Tag', to_field='nid') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = (('article', 'tag'), ) | 文章和标签关系表 | 6259906fbe8e80087fbc0920 |
@ft.total_ordering <NEW_LINE> class Match: <NEW_LINE> <INDENT> def __init__(self, filename, template, variables): <NEW_LINE> <INDENT> self.__filename = filename <NEW_LINE> self.__template = template <NEW_LINE> self.__variables = variables <NEW_LINE> <DEDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> return self.__filename <NEW_LINE> <DEDENT> @property <NEW_LINE> def template(self): <NEW_LINE> <INDENT> return self.__template <NEW_LINE> <DEDENT> @property <NEW_LINE> def variables(self): <NEW_LINE> <INDENT> return dict(self.__variables) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (isinstance(other, Match) and self.filename == other.filename and self.template == other.template and self.variables == other.variables) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return isinstance(other, Match) and self.filename < other.filename <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Match({}: {})'.format(self.template, self.filename) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) | A ``Match`` object represents a file with a name matching a template in
a ``FileTree``. The :meth:`FileTree.query` method returns ``Match``
objects. | 6259906ff9cc0f698b1c5f13 |
class webshellPostSpiderRootShell(AbstractWebshellPostSpider): <NEW_LINE> <INDENT> name = "webshell_post_RootShell" <NEW_LINE> url = "http://10.108.114.132/dvwa/hackable/uploads/php-webshells-master/rootshell.php" <NEW_LINE> allowed_domains = AbstractWebshellPostSpider.allowed_domains <NEW_LINE> if url in AbstractWebshellPostSpider.available_urls: <NEW_LINE> <INDENT> start_urls = [url] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start_urls = [] <NEW_LINE> <DEDENT> def submitPost(self, url): <NEW_LINE> <INDENT> command_list = self.get_command_list() <NEW_LINE> for cmd in command_list: <NEW_LINE> <INDENT> payload = {'cmd': cmd} <NEW_LINE> yield self.submitPayload(url, payload) | webshell功能
-- shell | 6259906f99cbb53fe683277a |
@PBRTv3Addon.addon_register_class <NEW_LINE> class world(world_panel): <NEW_LINE> <INDENT> bl_label = 'PBRTv3 World Settings' <NEW_LINE> display_property_groups = [ ( ('scene',), 'pbrtv3_world' ) ] | PBRTv3 World Settings | 6259906f2c8b7c6e89bd5078 |
class BlocklistPermission(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if str(view.__class__.__name__).lower().find('store') != -1: <NEW_LINE> <INDENT> if request.method == 'GET': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if request.user.is_authenticated and request.user.is_staff: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif str(view.__class__.__name__).lower().find('scan') != -1: <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> user_id = request.user.username <NEW_LINE> user = User.objects.get(phone_number=user_id) <NEW_LINE> if user.isManager == True: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> elif str(view.__class__.__name__).lower().find('ticket') != -1: <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return False | Global permission check for blocked IPs. | 6259906f5fdd1c0f98e5f818 |
class Command(BaseCommand): <NEW_LINE> <INDENT> help = "Load Sequence Ontology" <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( "--file", help="Sequence Ontology file obo." "Available at https://github.com/" "The-Sequence-Ontology/SO-Ontologies", required=True, type=str, ) <NEW_LINE> <DEDENT> def handle(self, file: str, verbosity: int = 1, **options): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> FileValidator().validate(file) <NEW_LINE> <DEDENT> except ImportingError as e: <NEW_LINE> <INDENT> raise CommandError(e) <NEW_LINE> <DEDENT> with open(file) as obo_file: <NEW_LINE> <INDENT> G = obonet.read_obo(obo_file) <NEW_LINE> <DEDENT> if verbosity > 0: <NEW_LINE> <INDENT> self.stdout.write("Preprocessing") <NEW_LINE> <DEDENT> cv_name = G.graph["default-namespace"][0] <NEW_LINE> cv_definition = G.graph["data-version"] <NEW_LINE> ontology = OntologyLoader(cv_name, cv_definition) <NEW_LINE> if verbosity > 0: <NEW_LINE> <INDENT> self.stdout.write("Loading typedefs") <NEW_LINE> <DEDENT> for typedef in tqdm( G.graph["typedefs"], disable=False if verbosity > 0 else True ): <NEW_LINE> <INDENT> ontology.store_type_def(typedef) <NEW_LINE> <DEDENT> if verbosity > 0: <NEW_LINE> <INDENT> self.stdout.write("Loading terms") <NEW_LINE> <DEDENT> for n, data in tqdm( G.nodes(data=True), disable=False if verbosity > 0 else True ): <NEW_LINE> <INDENT> ontology.store_term(n, data) <NEW_LINE> <DEDENT> if verbosity > 0: <NEW_LINE> <INDENT> self.stdout.write("Loading relationships") <NEW_LINE> <DEDENT> for u, v, type in tqdm( G.edges(keys=True), disable=False if verbosity > 0 else True ): <NEW_LINE> <INDENT> ontology.store_relationship(u, v, type) <NEW_LINE> <DEDENT> if verbosity > 0: <NEW_LINE> <INDENT> self.stdout.write(self.style.SUCCESS("Done")) | Load sequence ontology. | 6259906fa219f33f346c809b |
class MaintenanceActionPlugin(mb.FenixBase): <NEW_LINE> <INDENT> __tablename__ = 'action_plugins' <NEW_LINE> id = _id_column() <NEW_LINE> session_id = sa.Column(sa.String(36), sa.ForeignKey('sessions.session_id'), nullable=False) <NEW_LINE> plugin = sa.Column(sa.String(length=255), nullable=False) <NEW_LINE> type = sa.Column(sa.String(length=32), nullable=True) <NEW_LINE> meta = sa.Column(MediumText(), nullable=False) <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> return super(MaintenanceActionPlugin, self).to_dict() | Maintenance action plugin | 6259906f7b180e01f3e49cad |
class AllEventsFeed(ICalFeed): <NEW_LINE> <INDENT> product_id = '-//happening.com//Example//EN' <NEW_LINE> timezone = 'UTC' <NEW_LINE> file_name = "events.ics" <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return Event.objects.all().order_by('-start') <NEW_LINE> <DEDENT> def item_title(self, item): <NEW_LINE> <INDENT> return item.title <NEW_LINE> <DEDENT> def item_description(self, item): <NEW_LINE> <INDENT> return Description(item).get() <NEW_LINE> <DEDENT> def item_start_datetime(self, item): <NEW_LINE> <INDENT> return item.start | Feed of all events. | 6259906fe5267d203ee6d006 |
class TemplatesHandler(XMLHandlerBase): <NEW_LINE> <INDENT> def __init__(self, templateViewer=None): <NEW_LINE> <INDENT> XMLHandlerBase.__init__(self) <NEW_LINE> self.startDocumentSpecific = self.startDocumentTemplates <NEW_LINE> self.elements.update({ 'Templates' : (self.startTemplates, self.defaultEndElement), 'TemplateGroup' : (self.startTemplateGroup, self.defaultEndElement), 'Template' : (self.startTemplate, self.endTemplate), 'TemplateDescription' : (self.defaultStartElement, self.endTemplateDescription), 'TemplateText' : (self.defaultStartElement, self.endTemplateText), }) <NEW_LINE> if templateViewer: <NEW_LINE> <INDENT> self.viewer = templateViewer <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.viewer = e4App().getObject("TemplateViewer") <NEW_LINE> <DEDENT> <DEDENT> def startDocumentTemplates(self): <NEW_LINE> <INDENT> self.version = '' <NEW_LINE> <DEDENT> def startTemplateGroup(self, attrs): <NEW_LINE> <INDENT> self.groupName = attrs.get('name', "DEFAULT") <NEW_LINE> language = attrs.get('language', "All") <NEW_LINE> self.viewer.addGroup(self.groupName, language) <NEW_LINE> <DEDENT> def startTemplate(self, attrs): <NEW_LINE> <INDENT> self.templateName = attrs.get('name', '') <NEW_LINE> self.templateDescription = "" <NEW_LINE> self.templateText = "" <NEW_LINE> <DEDENT> def endTemplate(self): <NEW_LINE> <INDENT> if self.templateName and self.templateText: <NEW_LINE> <INDENT> self.viewer.addEntry(self.groupName, self.templateName, self.templateDescription, self.templateText, quiet=True) <NEW_LINE> <DEDENT> <DEDENT> def endTemplateText(self): <NEW_LINE> <INDENT> self.templateText = self.unescape(self.utf8_to_code(self.buffer)) <NEW_LINE> <DEDENT> def endTemplateDescription(self): <NEW_LINE> <INDENT> self.templateDescription = self.unescape(self.utf8_to_code(self.buffer)) <NEW_LINE> <DEDENT> def startTemplates(self, attrs): <NEW_LINE> <INDENT> self.version = attrs.get('version', templatesFileFormatVersion) <NEW_LINE> <DEDENT> def getVersion(self): <NEW_LINE> <INDENT> return self.version | Class implementing a sax handler to read an XML templates file. | 6259906f4f6381625f19a0f1 |
class MAVLink_mission_request_message(MAVLink_message): <NEW_LINE> <INDENT> id = MAVLINK_MSG_ID_MISSION_REQUEST <NEW_LINE> name = 'MISSION_REQUEST' <NEW_LINE> fieldnames = ['target_system', 'target_component', 'seq'] <NEW_LINE> ordered_fieldnames = [ 'seq', 'target_system', 'target_component' ] <NEW_LINE> format = '<HBB' <NEW_LINE> native_format = bytearray('<HBB', 'ascii') <NEW_LINE> orders = [1, 2, 0] <NEW_LINE> lengths = [1, 1, 1] <NEW_LINE> array_lengths = [0, 0, 0] <NEW_LINE> crc_extra = 230 <NEW_LINE> def __init__(self, target_system, target_component, seq): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLink_mission_request_message.id, MAVLink_mission_request_message.name) <NEW_LINE> self._fieldnames = MAVLink_mission_request_message.fieldnames <NEW_LINE> self.target_system = target_system <NEW_LINE> self.target_component = target_component <NEW_LINE> self.seq = seq <NEW_LINE> <DEDENT> def pack(self, mav, force_mavlink1=False): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 230, struct.pack('<HBB', self.seq, self.target_system, self.target_component), force_mavlink1=force_mavlink1) | Request the information of the mission item with the sequence
number seq. The response of the system to this message should
be a MISSION_ITEM message.
http://qgroundcontrol.org/mavlink/waypoint_protocol | 6259906f8e7ae83300eea923 |
class PortfolioData(object): <NEW_LINE> <INDENT> def __init__(self, port_id, cur_holding, hist_holding, av_ts): <NEW_LINE> <INDENT> self.id = port_id <NEW_LINE> self.curholding = cur_holding <NEW_LINE> self.histholding = hist_holding <NEW_LINE> self.assetvalue_ts = av_ts <NEW_LINE> <DEDENT> @property <NEW_LINE> def update_time(self): <NEW_LINE> <INDENT> return self.assetvalue_ts.index[-1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_asset_value(self): <NEW_LINE> <INDENT> return self.assetvalue_ts.iloc[-1] <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.__dict__.update(state) <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return PortfolioData(self.id, deepcopy(self.curholding), deepcopy(self.histholding), self.assetvalue_ts.copy()) | 用于存储组合的数据,数据包含组合的最新持仓、历史持仓、组合当前资产总值时间序列 | 6259906f2ae34c7f260ac97c |
class package(dict): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> @classmethod <NEW_LINE> def nested(cls, src:dict, delim:str, root_key:str) -> "package": <NEW_LINE> <INDENT> pkg = cls() <NEW_LINE> for k, v in src.items(): <NEW_LINE> <INDENT> d = pkg <NEW_LINE> *first, last = k.split(delim) <NEW_LINE> for i in first: <NEW_LINE> <INDENT> if i not in d: <NEW_LINE> <INDENT> d[i] = cls() <NEW_LINE> <DEDENT> elif not isinstance(d[i], cls): <NEW_LINE> <INDENT> d[i] = cls({root_key: d[i]}) <NEW_LINE> <DEDENT> d = d[i] <NEW_LINE> <DEDENT> if last not in d or not isinstance(d[last], cls): <NEW_LINE> <INDENT> d[last] = v <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d[last][root_key] = v <NEW_LINE> <DEDENT> <DEDENT> return pkg <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_modules(cls) -> "package": <NEW_LINE> <INDENT> return cls.nested(sys.modules, delim=".", root_key="__init__") | Dict for nesting objects (specifically Python modules) under path-like string keys. Has a special icon. | 6259906f9c8ee82313040dd1 |
class NameSerializer(TagListSerializerField): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': _("Not permitted create 'name' instances") } <NEW_LINE> @staticmethod <NEW_LINE> def update_or_create(photo, names, cleanup=True): <NEW_LINE> <INDENT> if cleanup: <NEW_LINE> <INDENT> photo.names.clear() <NEW_LINE> <DEDENT> if is_iterable(names) and len(names) > 0: <NEW_LINE> <INDENT> photo.names.add(*names) <NEW_LINE> <DEDENT> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> raise ValidationError(self.default_error_messages) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> raise ValidationError(self.default_error_messages) | Name serializer to create, update or render Tagged names of photos. | 6259906f091ae356687064c8 |
class DigitalStoreError(object): <NEW_LINE> <INDENT> def __init__(self, code, message, status_code=400): <NEW_LINE> <INDENT> self.error_code = code <NEW_LINE> self.error_message = message <NEW_LINE> self.status_code = status_code <NEW_LINE> <DEDENT> def as_response(self, status_code=None): <NEW_LINE> <INDENT> if status_code is None: <NEW_LINE> <INDENT> status_code = self.status_code <NEW_LINE> <DEDENT> error = { "code": self.error_code, "msg": self.error_message, } <NEW_LINE> return Response({'errors': [error]}, status_code) | Base Error class for Connyct | 6259906fe76e3b2f99fda295 |
class Command(command.DirectOnlyCommand): <NEW_LINE> <INDENT> def collect_args(self, message): <NEW_LINE> <INDENT> return re.search(r'(?:remove|delete)\s*filter\s*(\S+)', message.content, re.I) <NEW_LINE> <DEDENT> def matches(self, message): <NEW_LINE> <INDENT> return self.collect_args(message) is not None <NEW_LINE> <DEDENT> def action(self, message): <NEW_LINE> <INDENT> args = self.collect_args(message) <NEW_LINE> remove_flag = re.search(r'\B-r\b', message.content) is not None <NEW_LINE> self.public_namespace.db.execute('SELECT owner, active, channels FROM filters WHERE name=?', (args.group(1), )) <NEW_LINE> rows = self.public_namespace.db.cursor.fetchall() <NEW_LINE> if len(rows) == 0: <NEW_LINE> <INDENT> yield from self.send_message(message.channel, 'Could not find a filter with name `%s`' % args.group(1)) <NEW_LINE> return <NEW_LINE> <DEDENT> if rows[0]['owner'] != message.author.id or (rows[0]['owner'] == message.author.id and remove_flag): <NEW_LINE> <INDENT> if message.channel.id in rows[0]['channels']: <NEW_LINE> <INDENT> new_channels = rows[0]['channels'].replace(message.channel.id, '') <NEW_LINE> new_channels = new_channels.replace(',,', ',').strip(',') <NEW_LINE> self.public_namespace.db.execute('UPDATE filters SET channels=? WHERE name=?', (new_channels, args.group(1))) <NEW_LINE> self.public_namespace.db.save() <NEW_LINE> yield from self.send_message(message.channel, 'Successfully removed this channel from `%s`' % args.group(1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield from self.send_message(message.channel, 'You are not the owner of `%s` and/or this channel is not in the filter' % args.group(1)) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if rows[0]['active'] != 1: <NEW_LINE> <INDENT> yield from self.send_message(message.channel, '`%s` has already been removed' % args.group(1)) <NEW_LINE> return <NEW_LINE> <DEDENT> self.public_namespace.db.execute('UPDATE filters SET active=0 WHERE name=? AND owner=?', (args.group(1), message.author.id)) <NEW_LINE> self.public_namespace.db.save() <NEW_LINE> yield from self.send_message(message.channel, 'Successfully deleted filter `%s`' % args.group(1)) | Remove a filter from the current channel.
If you are the owner of the filter, this also deletes the filter.
**Usage**
```@Idea delete filter <name>```
Where
**`<name>`** is the name of the filter you want to remove
If you own the filter you want to remove, include `-r` in your message. | 6259906f8a43f66fc4bf3a27 |
class RpmRepoTestBase(ComponentTestBase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setup_class(cls): <NEW_LINE> <INDENT> super(RpmRepoTestBase, cls).setup_class() <NEW_LINE> cls.manifest = RepoManifest(os.path.join(RPM_TEST_DATA_DIR, 'test-repo-manifest.xml')) <NEW_LINE> cls.orig_repos = {} <NEW_LINE> for prj, brs in cls.manifest.projects_iter(): <NEW_LINE> <INDENT> repo = GitRepository.create(os.path.join(cls._tmproot, '%s.repo' % prj)) <NEW_LINE> try: <NEW_LINE> <INDENT> repo.add_remote_repo('origin', RPM_TEST_DATA_DIR, fetch=True) <NEW_LINE> <DEDENT> except GitRepositoryError: <NEW_LINE> <INDENT> gitfile = os.path.join(RPM_TEST_DATA_DIR, '.git') <NEW_LINE> if os.path.isfile(gitfile): <NEW_LINE> <INDENT> with open(gitfile) as fobj: <NEW_LINE> <INDENT> link = fobj.readline().replace('gitdir:', '').strip() <NEW_LINE> <DEDENT> link_dir = os.path.join(RPM_TEST_DATA_DIR, link) <NEW_LINE> repo.remove_remote_repo('origin') <NEW_LINE> repo.add_remote_repo('origin', link_dir, fetch=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> repo.fetch('origin', tags=True, refspec='refs/remotes/*:refs/upstream/*') <NEW_LINE> for branch, rev in six.iteritems(brs): <NEW_LINE> <INDENT> repo.create_branch(branch, rev) <NEW_LINE> <DEDENT> repo.force_head('master', hard=True) <NEW_LINE> cls.orig_repos[prj] = repo <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> @nottest <NEW_LINE> def init_test_repo(cls, pkg_name): <NEW_LINE> <INDENT> dirname = os.path.basename(cls.orig_repos[pkg_name].path) <NEW_LINE> shutil.copytree(cls.orig_repos[pkg_name].path, dirname) <NEW_LINE> os.chdir(dirname) <NEW_LINE> return GitRepository('.') | Baseclass for tests run in a Git repository with packaging data | 6259906faad79263cf430048 |
class ListFilteredMixin(object): <NEW_LINE> <INDENT> filter_set = None <NEW_LINE> def get_filter_set(self): <NEW_LINE> <INDENT> if self.filter_set: <NEW_LINE> <INDENT> return self.filter_set <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception( "ListFilterMixin requires either a definition of " "'filter_set' or an implementation of 'get_filter()'") <NEW_LINE> <DEDENT> <DEDENT> def get_filter_set_kwargs(self): <NEW_LINE> <INDENT> return { 'data': self.request.GET, 'queryset': self.get_base_queryset(), } <NEW_LINE> <DEDENT> def get_base_queryset(self): <NEW_LINE> <INDENT> return super(ListFilteredMixin, self).get_queryset() <NEW_LINE> <DEDENT> def get_constructed_filter(self): <NEW_LINE> <INDENT> if getattr(self, 'constructed_filter', None): <NEW_LINE> <INDENT> return self.constructed_filter <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = self.get_filter_set()(**self.get_filter_set_kwargs()) <NEW_LINE> self.constructed_filter = f <NEW_LINE> return f <NEW_LINE> <DEDENT> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> return self.get_constructed_filter().qs <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> kwargs.update({'filter': self.get_constructed_filter()}) <NEW_LINE> return super(ListFilteredMixin, self).get_context_data(**kwargs) | Mixin that adds support for django-filter | 6259906f4e4d562566373c99 |
class MilesApp(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> app = QApplication(sys.argv) <NEW_LINE> app.aboutToQuit.connect(self.cleanUp) <NEW_LINE> car = Car() <NEW_LINE> model = World(car) <NEW_LINE> self.preloadedPaths = preloadedPath.PreloadedPaths(model) <NEW_LINE> self.spi = StmController.StmController(model) <NEW_LINE> self.cam = camera.Camera(model) <NEW_LINE> self.check_distance = checkDistance.CheckDistance(model) <NEW_LINE> self.window = window.Window(model, self.preloadedPaths) <NEW_LINE> self.window.show() <NEW_LINE> mixer.init() <NEW_LINE> mixer.music.load('little_helper.mp3') <NEW_LINE> mixer.music.set_volume(1.0) <NEW_LINE> mixer.music.play() <NEW_LINE> time.sleep(5) <NEW_LINE> self.spi.start() <NEW_LINE> self.preloadedPaths.start() <NEW_LINE> self.cam.start() <NEW_LINE> self.check_distance.start() <NEW_LINE> sys.exit(app.exec_()) <NEW_LINE> <DEDENT> def cleanUp(self): <NEW_LINE> <INDENT> self.window.stop() <NEW_LINE> print("window closed") <NEW_LINE> self.preloadedPaths.stop() <NEW_LINE> self.cam.stop() <NEW_LINE> self.check_distance.stop() <NEW_LINE> time.sleep(0.01) <NEW_LINE> self.spi.stop() <NEW_LINE> print("close ok") | This is the main class initializing the model and the controllers. Architecture MVC. | 6259906fd486a94d0ba2d851 |
class Singleton(Borg): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> Borg.__init__(self) <NEW_LINE> self._shared_state.update(kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._shared_state) | docstring for Singleton | 6259906f01c39578d7f1437e |
class SettingsApplicationController(Gtk.Application): <NEW_LINE> <INDENT> def __init__(self, args=[]): <NEW_LINE> <INDENT> super(SettingsApplicationController, self).__init__( application_id=APPLICATION_ID) <NEW_LINE> self._args = args <NEW_LINE> self.connect("activate", self.setup_ui) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return ConfigurationProxy() <NEW_LINE> <DEDENT> def get_uisettings(self): <NEW_LINE> <INDENT> return UISettings(Gio.Settings) <NEW_LINE> <DEDENT> def on_notify(self, message): <NEW_LINE> <INDENT> notification = Notify.Notification.new(NOTIFY_ID, message, "dialog-information") <NEW_LINE> notification.show() <NEW_LINE> <DEDENT> def on_error(self, message): <NEW_LINE> <INDENT> notification = Notify.Notification.new(NOTIFY_ID, message, "dialog-information") <NEW_LINE> notification.show() <NEW_LINE> <DEDENT> def on_succeed(self, action=None): <NEW_LINE> <INDENT> if action: <NEW_LINE> <INDENT> message = action <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = _("Success.") <NEW_LINE> <DEDENT> notification = Notify.Notification.new(NOTIFY_ID, message, "dialog-information") <NEW_LINE> notification.show() <NEW_LINE> <DEDENT> def on_fail(self, action=None): <NEW_LINE> <INDENT> if action: <NEW_LINE> <INDENT> message = action <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = _("Failure.") <NEW_LINE> <DEDENT> notification = Notify.Notification.new(NOTIFY_ID, message, "dialog-information") <NEW_LINE> notification.show() <NEW_LINE> <DEDENT> def setup_ui(self, data=None, asynchronous=True): <NEW_LINE> <INDENT> Notify.init(NOTIFY_ID) <NEW_LINE> config = self.get_config() <NEW_LINE> uisettings = self.get_uisettings() <NEW_LINE> model = ConfigurationModel(proxy=config, proxy_loadargs=self._args, uisettings=uisettings) <NEW_LINE> controller = ConfigController(model) <NEW_LINE> if controller.load(): <NEW_LINE> <INDENT> self.settings_dialog = ClientSettingsDialog(controller) <NEW_LINE> if self.settings_dialog.run() == Gtk.ResponseType.OK: <NEW_LINE> <INDENT> controller.persist(self.on_notify, self.on_error, self.on_succeed, self.on_fail) <NEW_LINE> <DEDENT> controller.exit(asynchronous=asynchronous) <NEW_LINE> self.settings_dialog.destroy() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.on_fail(action=_("Authentication failed")) <NEW_LINE> sys.stderr.write("Authentication failed.\n") | Core application controller for the landscape settings application. | 6259906fbaa26c4b54d50b3e |
class DarkSkyData: <NEW_LINE> <INDENT> def __init__(self, api_key, latitude, longitude, units): <NEW_LINE> <INDENT> self._api_key = api_key <NEW_LINE> self.latitude = latitude <NEW_LINE> self.longitude = longitude <NEW_LINE> self.requested_units = units <NEW_LINE> self.data = None <NEW_LINE> self.currently = None <NEW_LINE> self.hourly = None <NEW_LINE> self.daily = None <NEW_LINE> self._connect_error = False <NEW_LINE> <DEDENT> @Throttle(MIN_TIME_BETWEEN_UPDATES) <NEW_LINE> def update(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.data = forecastio.load_forecast( self._api_key, self.latitude, self.longitude, units=self.requested_units ) <NEW_LINE> self.currently = self.data.currently() <NEW_LINE> self.hourly = self.data.hourly() <NEW_LINE> self.daily = self.data.daily() <NEW_LINE> if self._connect_error: <NEW_LINE> <INDENT> self._connect_error = False <NEW_LINE> _LOGGER.info("Reconnected to Dark Sky") <NEW_LINE> <DEDENT> <DEDENT> except (ConnectError, HTTPError, Timeout, ValueError) as error: <NEW_LINE> <INDENT> if not self._connect_error: <NEW_LINE> <INDENT> self._connect_error = True <NEW_LINE> _LOGGER.error("Unable to connect to Dark Sky. %s", error) <NEW_LINE> <DEDENT> self.data = None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def units(self): <NEW_LINE> <INDENT> if self.data is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.data.json.get("flags").get("units") | Get the latest data from Dark Sky. | 6259906f23849d37ff852949 |
class ManageUserView(generics.RetrieveUpdateAPIView): <NEW_LINE> <INDENT> serializer_class = UserSerializer <NEW_LINE> authentication_classes = (authentication.TokenAuthentication,) <NEW_LINE> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return self.request.user | Manage the authenticated user | 6259906fa8370b77170f1c5c |
class SubCategory(CategoryTemplate): <NEW_LINE> <INDENT> category = models.ForeignKey('Category', on_delete=models.CASCADE, default='id') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'sub-category' <NEW_LINE> verbose_name_plural = 'sub-categories' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.id) | subcategory model | 6259906f7b180e01f3e49cae |
class Player(object): <NEW_LINE> <INDENT> def blast(self, enemy): <NEW_LINE> <INDENT> print("The player blasts an enemy.\n") <NEW_LINE> enemy.die() | A player in a shooter game. | 6259906fdd821e528d6da5cb |
class Pipeline: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.message = 0 <NEW_LINE> self.producer_lock = threading.Lock() <NEW_LINE> self.consumer_lock = threading.Lock() <NEW_LINE> self.consumer_lock.acquire() <NEW_LINE> <DEDENT> def get_message(self, name): <NEW_LINE> <INDENT> logging.debug(f'{name}:about to acquire consumer_lock') <NEW_LINE> self.consumer_lock.acquire() <NEW_LINE> logging.debug(f'{name}:has consumer_lock') <NEW_LINE> message = self.message <NEW_LINE> logging.debug(f'{name}:about to release producer_lock') <NEW_LINE> self.producer_lock.release() <NEW_LINE> logging.debug(f'{name}:producer_lock released') <NEW_LINE> return message <NEW_LINE> <DEDENT> def set_message(self, message, name): <NEW_LINE> <INDENT> logging.debug(f'{name}:about to acquire producer_lock') <NEW_LINE> self.producer_lock.acquire() <NEW_LINE> logging.debug(f'{name}:has producer_lock') <NEW_LINE> self.message = message <NEW_LINE> logging.debug(f'{name}:about to release consumer_lock') <NEW_LINE> self.consumer_lock.release() <NEW_LINE> logging.debug(f'{name}:consumer_lock released') | Class to allow a single element pipeline between producer and consumer. | 6259906f99cbb53fe683277d |
class MreTes4(MreHeaderBase): <NEW_LINE> <INDENT> rec_sig = b'TES4' <NEW_LINE> melSet = MelSet( MelStruct(b'HEDR', [u'f', u'2I'], (u'version', 1.0), u'numRecords', (u'nextObject', 0x800)), MelNull(b'OFST'), MelBase(b'DELE','dele_p',), MreHeaderBase.MelAuthor(), MreHeaderBase.MelDescription(), MreHeaderBase.MelMasterNames(), MelNull(b'DATA'), ) <NEW_LINE> __slots__ = melSet.getSlotsUsed() | TES4 Record. File header. | 6259906f4f6381625f19a0f2 |
class TestTransformRegistration(IntegrationTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.portal = self.layer['portal'] <NEW_LINE> setRoles(self.portal, TEST_USER_ID, ['Manager']) <NEW_LINE> folder = self.portal[self.portal.invokeFactory('Folder', 'folder')] <NEW_LINE> self.board = _createObjectByType('Ploneboard', folder, 'board') <NEW_LINE> <DEDENT> def testDefaultRegistrations(self): <NEW_LINE> <INDENT> tool = getToolByName(self.portal, PLONEBOARD_TOOL) <NEW_LINE> self.failUnlessEqual(len(tool.getTransforms()), 3) <NEW_LINE> self.failUnlessEqual(len(tool.getEnabledTransforms()), 3) <NEW_LINE> <DEDENT> def testDisabling(self): <NEW_LINE> <INDENT> tool = getToolByName(self.portal, PLONEBOARD_TOOL) <NEW_LINE> tool.enableTransform('safe_html', enabled=False) <NEW_LINE> self.failIf('safe_html' in tool.getEnabledTransforms()) <NEW_LINE> tool.enableTransform('safe_html') <NEW_LINE> self.failUnless('safe_html' in tool.getEnabledTransforms()) <NEW_LINE> <DEDENT> def testUnregisteringAllRemovesOnlyThoseAdded(self): <NEW_LINE> <INDENT> tool = getToolByName(self.portal, PLONEBOARD_TOOL) <NEW_LINE> tool.unregisterAllTransforms() <NEW_LINE> transforms = getToolByName(self.portal, 'portal_transforms') <NEW_LINE> self.failIf('url_to_hyperlink' in transforms.objectIds()) <NEW_LINE> self.failIf('text_to_emoticons' in transforms.objectIds()) <NEW_LINE> self.failUnless('safe_html' in transforms.objectIds()) <NEW_LINE> <DEDENT> def testUnregisteringIndividualRemovesOnlyThoseAdded(self): <NEW_LINE> <INDENT> tool = getToolByName(self.portal, PLONEBOARD_TOOL) <NEW_LINE> transforms = getToolByName(self.portal, 'portal_transforms') <NEW_LINE> tool.unregisterTransform('url_to_hyperlink') <NEW_LINE> self.failIf('url_to_hyperlink' in transforms.objectIds()) <NEW_LINE> tool.unregisterTransform('safe_html') <NEW_LINE> self.failUnless('safe_html' in transforms.objectIds()) | Test transform registration | 6259906f4428ac0f6e659dc8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.