code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Agent(Base): <NEW_LINE> <INDENT> def execute(self): <NEW_LINE> <INDENT> server_info = PluginManager().exec_plugin() <NEW_LINE> if server_info['basic']['status']: <NEW_LINE> <INDENT> hostname = server_info['basic']['data']['hostname'] <NEW_LINE> certname = open(settings.CERT_PATH,'r',encoding='utf-8').read().strip() <NEW_LINE> if not certname: <NEW_LINE> <INDENT> with open(settings.CERT_PATH,'w',encoding='utf-8') as f: <NEW_LINE> <INDENT> f.write(hostname) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> server_info['basic']['data']['hostname'] = certname <NEW_LINE> <DEDENT> <DEDENT> for key in list(server_info.keys()): <NEW_LINE> <INDENT> if not server_info[key]['status']: <NEW_LINE> <INDENT> print('>>',server_info[key]['data']) <NEW_LINE> <DEDENT> <DEDENT> self.post_asset(server_info) | Agent模式下,找到唯一标识(使用config/cert文件下的主机名做唯一标识)
第一次采集时,config/cert下的信息为空则以提交信息的hostname为准,
多次时,则均以cert文件下的主机名为准 | 62599072e76e3b2f99fda2f1 |
class FieldFunctionTestCase(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> activate_module('tests') <NEW_LINE> <DEDENT> @with_transaction() <NEW_LINE> def test_accessor(self): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Model = pool.get('test.function.accessor') <NEW_LINE> Target = pool.get('test.function.accessor.target') <NEW_LINE> target = Target() <NEW_LINE> target.save() <NEW_LINE> record = Model() <NEW_LINE> record.target = target <NEW_LINE> self.assertEqual(record.function, target) | Test Field Function | 62599072a219f33f346c80f9 |
class IActionParam(Interface): <NEW_LINE> <INDENT> name = Attribute("Parameter name unique for all action's parameters. " "@type: unicode") <NEW_LINE> label = Attribute("Parameter label or None. @type: unicode or None") <NEW_LINE> desc = Attribute("Parameter description or None. @type: unicode or None") <NEW_LINE> value_info = Attribute("Information about the parameter value. " "@type: IValueInfo") <NEW_LINE> is_required = Attribute("If the parameter is required or optional. " "@type: bool") | Action parameter descriptor. | 625990721f5feb6acb1644e2 |
class MagAvgServer(Server): <NEW_LINE> <INDENT> def aggregation(self, reports): <NEW_LINE> <INDENT> return self.magnetude_fed_avg(reports) <NEW_LINE> <DEDENT> def magnetude_fed_avg(self, reports): <NEW_LINE> <INDENT> import fl_model <NEW_LINE> updates = self.extract_client_updates(reports) <NEW_LINE> magnetudes = [] <NEW_LINE> for update in updates: <NEW_LINE> <INDENT> magnetude = 0 <NEW_LINE> for _, weight in update: <NEW_LINE> <INDENT> magnetude += weight.norm() ** 2 <NEW_LINE> <DEDENT> magnetudes.append(np.sqrt(magnetude)) <NEW_LINE> <DEDENT> avg_update = [torch.zeros(x.size()) for _, x in updates[0]] <NEW_LINE> for i, update in enumerate(updates): <NEW_LINE> <INDENT> for j, (_, delta) in enumerate(update): <NEW_LINE> <INDENT> avg_update[j] += delta * (magnetudes[i] / sum(magnetudes)) <NEW_LINE> <DEDENT> <DEDENT> baseline_weights = fl_model.extract_weights(self.model) <NEW_LINE> updated_weights = [] <NEW_LINE> for i, (name, weight) in enumerate(baseline_weights): <NEW_LINE> <INDENT> updated_weights.append((name, weight + avg_update[i])) <NEW_LINE> <DEDENT> return updated_weights | Federated learning server that performs magnetude weighted federated averaging. | 625990724527f215b58eb617 |
class TFMultipleChoiceDataset: <NEW_LINE> <INDENT> features: List[InputFeatures] <NEW_LINE> def __init__( self, data_dir: str, tokenizer: PreTrainedTokenizer, task: str, max_seq_length: Optional[int] = 128, overwrite_cache=False, mode: Split = Split.train, ): <NEW_LINE> <INDENT> processor = processors[task]() <NEW_LINE> logger.info(f"Creating features from dataset file at {data_dir}") <NEW_LINE> label_list = processor.get_labels() <NEW_LINE> if mode == Split.dev: <NEW_LINE> <INDENT> examples = processor.get_dev_examples(data_dir) <NEW_LINE> <DEDENT> elif mode == Split.test: <NEW_LINE> <INDENT> examples = processor.get_test_examples(data_dir) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> examples = processor.get_train_examples(data_dir) <NEW_LINE> <DEDENT> logger.info("Training examples: %s", len(examples)) <NEW_LINE> self.features = convert_examples_to_features( examples, label_list, max_seq_length, tokenizer, pad_on_left=bool(tokenizer.padding_side == "left"), pad_token=tokenizer.pad_token_id, pad_token_segment_id=tokenizer.pad_token_type_id, ) <NEW_LINE> def gen(): <NEW_LINE> <INDENT> for (ex_index, ex) in tqdm.tqdm(enumerate(self.features), desc="convert examples to features"): <NEW_LINE> <INDENT> if ex_index % 10000 == 0: <NEW_LINE> <INDENT> logger.info("Writing example %d of %d" % (ex_index, len(examples))) <NEW_LINE> <DEDENT> yield ( { "example_id": 0, "input_ids": ex.input_ids, "attention_mask": ex.attention_mask, "token_type_ids": ex.token_type_ids, }, ex.label, ) <NEW_LINE> <DEDENT> <DEDENT> self.dataset = tf.data.Dataset.from_generator( gen, ( { "example_id": tf.int32, "input_ids": tf.int32, "attention_mask": tf.int32, "token_type_ids": tf.int32, }, tf.int64, ), ( { "example_id": tf.TensorShape([]), "input_ids": tf.TensorShape([None, None]), "attention_mask": tf.TensorShape([None, None]), "token_type_ids": tf.TensorShape([None, None]), }, tf.TensorShape([]), ), ) <NEW_LINE> <DEDENT> def get_dataset(self): <NEW_LINE> <INDENT> return self.dataset <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.features) <NEW_LINE> <DEDENT> def __getitem__(self, i) -> InputFeatures: <NEW_LINE> <INDENT> return self.features[i] | This will be superseded by a framework-agnostic approach
soon. | 625990724a966d76dd5f07d9 |
class IndexView(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> context = cache.get('index_page_data') <NEW_LINE> if context is None: <NEW_LINE> <INDENT> types = GoodsType.objects.all() <NEW_LINE> goods_banners = IndexGoodsBanner.objects.all().order_by('index') <NEW_LINE> promotion_banners = IndexPromotionBanner.objects.all().order_by('index') <NEW_LINE> for tp in types: <NEW_LINE> <INDENT> image_banners = IndexTypeGoodsBanner.objects.filter( type=tp, display_type=1).order_by('index') <NEW_LINE> title_banners = IndexTypeGoodsBanner.objects.filter( type=tp, display_type=0).order_by('index') <NEW_LINE> tp.image_banners = image_banners <NEW_LINE> tp.title_banners = title_banners <NEW_LINE> <DEDENT> context = { 'types': types, 'goods_banners': goods_banners, 'promotion_banners': promotion_banners } <NEW_LINE> cache.set('index_page_data', context, 3600) <NEW_LINE> <DEDENT> return render(request, 'index.html', context) | /index
首页 | 62599072dd821e528d6da5f9 |
class OverrideConfigTestCase(MyTestCase): <NEW_LINE> <INDENT> class Config(TestingConfig): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> with mock.patch.dict("privacyidea.config.config", {"testing": cls.Config}): <NEW_LINE> <INDENT> MyTestCase.setUpClass() | helper class that allows to modify the app config processed by ``create_app``.
This can be useful if config values need to be adjusted *for app creation*.
For that, just override the inner ``Config`` class. | 6259907238b623060ffaa4cc |
class iPerfUDPBiDirTestIPV6(ipv6_setup.Set_IPv6_Addresses, iPerfUDPBiDirTest): <NEW_LINE> <INDENT> def reverse_ip(self): <NEW_LINE> <INDENT> return "4aaa::6" <NEW_LINE> <DEDENT> def forward_ip(self): <NEW_LINE> <INDENT> return "5aaa::6" <NEW_LINE> <DEDENT> def server_opts_forward(self): <NEW_LINE> <INDENT> return "-V -B %s" % self.forward_ip() <NEW_LINE> <DEDENT> def server_opts_reverse(self, node): <NEW_LINE> <INDENT> board.uci_forward_traffic_rule("udp", "5001", "4aaa::6") <NEW_LINE> return "-V -B %s" % self.reverse_ip() <NEW_LINE> <DEDENT> def client_opts(self): <NEW_LINE> <INDENT> return "-V" <NEW_LINE> <DEDENT> def runTest(self): <NEW_LINE> <INDENT> ipv6_setup.Set_IPv6_Addresses.runTest(self) <NEW_LINE> iPerfUDPBiDirTest.runTest(self) | iPerf IPV6 from LAN to/from WAN | 62599072be8e80087fbc0980 |
class TrueTypeFont(PdfBaseFont): <NEW_LINE> <INDENT> def text_space_coords(self, x, y): <NEW_LINE> <INDENT> return x/1000., y/1000. | For our purposes, these are just a more restricted form of the Type 1
Fonts, so...we're done here. | 625990723539df3088ecdb86 |
class getNextPosition(smach.State): <NEW_LINE> <INDENT> def __init__(self,point_list): <NEW_LINE> <INDENT> smach.State.__init__(self, outcomes=['succeeded','aborted'], output_keys=['next_x','next_y']) <NEW_LINE> self.ptr = 0 <NEW_LINE> self.point_list = point_list <NEW_LINE> <DEDENT> def execute(self, userdata): <NEW_LINE> <INDENT> if len(self.point_list) > 0 : <NEW_LINE> <INDENT> rospy.loginfo("Setting goal to point %d: (%f,%f) ", self.ptr, self.point_list[self.ptr].x, self.point_list[self.ptr].y) <NEW_LINE> userdata.next_x = self.point_list[self.ptr].x <NEW_LINE> userdata.next_y = self.point_list[self.ptr].y <NEW_LINE> self.ptr = self.ptr + 1 <NEW_LINE> if self.ptr > len(self.point_list) - 1 : <NEW_LINE> <INDENT> self.ptr = 0 <NEW_LINE> <DEDENT> return 'succeeded' <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return 'aborted' | Temporary implementation. State giving the next position goal
TODO: Must be implemented as a service interfacing to a file | 62599072cc0a2c111447c749 |
class ChildrenWatch(object): <NEW_LINE> <INDENT> def __init__(self, client, path, func=None, allow_session_lost=True, send_event=False): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._path = path <NEW_LINE> self._func = func <NEW_LINE> self._send_event = send_event <NEW_LINE> self._stopped = False <NEW_LINE> self._watch_established = False <NEW_LINE> self._allow_session_lost = allow_session_lost <NEW_LINE> self._run_lock = client.handler.lock_object() <NEW_LINE> self._prior_children = None <NEW_LINE> self._used = False <NEW_LINE> if func is not None: <NEW_LINE> <INDENT> self._used = True <NEW_LINE> if allow_session_lost: <NEW_LINE> <INDENT> self._client.add_listener(self._session_watcher) <NEW_LINE> <DEDENT> self._get_children() <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, func): <NEW_LINE> <INDENT> if self._used: <NEW_LINE> <INDENT> raise KazooException( "A function has already been associated with this " "ChildrenWatch instance.") <NEW_LINE> <DEDENT> self._func = func <NEW_LINE> self._used = True <NEW_LINE> if self._allow_session_lost: <NEW_LINE> <INDENT> self._client.add_listener(self._session_watcher) <NEW_LINE> <DEDENT> self._get_children() <NEW_LINE> return func <NEW_LINE> <DEDENT> @_ignore_closed <NEW_LINE> def _get_children(self, event=None): <NEW_LINE> <INDENT> with self._run_lock: <NEW_LINE> <INDENT> if self._stopped: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> children = self._client.retry(self._client.get_children, self._path, self._watcher) <NEW_LINE> <DEDENT> except NoNodeError: <NEW_LINE> <INDENT> self._stopped = True <NEW_LINE> return <NEW_LINE> <DEDENT> if not self._watch_established: <NEW_LINE> <INDENT> self._watch_established = True <NEW_LINE> if self._prior_children is not None and self._prior_children == children: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> self._prior_children = children <NEW_LINE> try: <NEW_LINE> <INDENT> if self._send_event: <NEW_LINE> <INDENT> result = self._func(children, event) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self._func(children) <NEW_LINE> <DEDENT> if result is False: <NEW_LINE> <INDENT> self._stopped = True <NEW_LINE> self._func = None <NEW_LINE> <DEDENT> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> log.exception(exc) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _watcher(self, event): <NEW_LINE> <INDENT> if event.type != "NONE": <NEW_LINE> <INDENT> self._get_children(event) <NEW_LINE> <DEDENT> <DEDENT> def _session_watcher(self, state): <NEW_LINE> <INDENT> if state in (KazooState.LOST, KazooState.SUSPENDED): <NEW_LINE> <INDENT> self._watch_established = False <NEW_LINE> <DEDENT> elif (state == KazooState.CONNECTED and not self._watch_established and not self._stopped): <NEW_LINE> <INDENT> self._client.handler.spawn(self._get_children) | Watches a node for children updates and calls the specified
function each time it changes
The function will also be called the very first time its
registered to get children.
Returning `False` from the registered function will disable future
children change calls. If the client connection is closed (using
the close command), the ChildrenWatch will no longer get updates.
if send_event=True in __init__, then the function will always be
called with second parameter, ``event``. Upon initial call or when
recovering a lost session the ``event`` is always ``None``.
Otherwise it's a :class:`~kazoo.prototype.state.WatchedEvent`
instance.
Example with client:
.. code-block:: python
@client.ChildrenWatch('/path/to/watch')
def my_func(children):
print "Children are %s" % children
# Above function is called immediately and prints children | 62599072a17c0f6771d5d823 |
class SemimonomialActionVec(Action): <NEW_LINE> <INDENT> def __init__(self, G, V, check=True): <NEW_LINE> <INDENT> if check: <NEW_LINE> <INDENT> from sage.modules.free_module import FreeModule_generic <NEW_LINE> if not isinstance(G, SemimonomialTransformationGroup): <NEW_LINE> <INDENT> raise ValueError('%s is not a semimonomial group' % G) <NEW_LINE> <DEDENT> if not isinstance(V, FreeModule_generic): <NEW_LINE> <INDENT> raise ValueError('%s is not a free module' % V) <NEW_LINE> <DEDENT> if V.ambient_module() != V: <NEW_LINE> <INDENT> raise ValueError('%s is not equal to its ambient module' % V) <NEW_LINE> <DEDENT> if V.dimension() != G.degree(): <NEW_LINE> <INDENT> raise ValueError('%s has a dimension different to the degree of %s' % (V, G)) <NEW_LINE> <DEDENT> if V.base_ring() != G.base_ring(): <NEW_LINE> <INDENT> raise ValueError('%s and %s have different base rings' % (V, G)) <NEW_LINE> <DEDENT> <DEDENT> Action.__init__(self, G, V.dense_module()) <NEW_LINE> <DEDENT> def _call_(self, a, b): <NEW_LINE> <INDENT> b = b.apply_map(a.get_autom()) <NEW_LINE> b = self.codomain()(a.get_perm().action(b)) <NEW_LINE> b = b.pairwise_product(self.codomain()(a.get_v_inverse())) <NEW_LINE> return b | The natural action of the semimonomial group on vectors.
The action is defined by:
`(\phi, \pi, \alpha)*(v_0, \ldots, v_{n-1}) :=
(\alpha(v_{\pi(1)-1}) \cdot \phi_0^{-1}, \ldots, \alpha(v_{\pi(n)-1}) \cdot \phi_{n-1}^{-1})`.
(The indexing of vectors is `0`-based here, so
`\psi = (\psi_0, \psi_1, \ldots, \psi_{n-1})`.) | 625990724e4d562566373cf7 |
class HorizontalFlipWithHomo(A.HorizontalFlip): <NEW_LINE> <INDENT> def apply_to_homo(self, homo, **params): <NEW_LINE> <INDENT> return horizontal_flip_homo(homo, **params) | Class based of albumentations.HorizontalFlip, to allow it to deal with homographies.
| 625990724428ac0f6e659e25 |
class Marsh(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.respawn() <NEW_LINE> <DEDENT> def redraw(self): <NEW_LINE> <INDENT> pygame.sprite.Sprite.__init__(self) <NEW_LINE> self.image, self.rect = Resources.load_png('marsh.png') <NEW_LINE> originalsize = self.image.get_size() <NEW_LINE> self.fullsize = (int(originalsize[0]/2), int(originalsize[1]/2)) <NEW_LINE> self.image = pygame.transform.scale( self.image, (int((self.health/100.0) * self.fullsize[0]), int((self.health/100.0) * self.fullsize[1]))) <NEW_LINE> newsize = self.image.get_size() <NEW_LINE> screen = pygame.display.get_surface() <NEW_LINE> centerx = screen.get_width()/2 - newsize[0]/2 <NEW_LINE> centery = screen.get_height()/2 - newsize[1]/2 <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.move_ip(centerx, centery) <NEW_LINE> <DEDENT> def respawn(self): <NEW_LINE> <INDENT> self.health = INITIAL_HEALTH <NEW_LINE> self.redraw() <NEW_LINE> self.healthbar = self.rect.width * min(1, (self.health/100.0)) <NEW_LINE> <DEDENT> def gethealth(self): <NEW_LINE> <INDENT> return self.health <NEW_LINE> <DEDENT> def gethealthlevel(self): <NEW_LINE> <INDENT> if self.health < LOW_HEALTH_THRESHOLD: <NEW_LINE> <INDENT> return Constants.BEAVER_STATE_MARSH_HEALTH_LOW <NEW_LINE> <DEDENT> elif self.health < MED_HEALTH_THRESHOLD: <NEW_LINE> <INDENT> return Constants.BEAVER_STATE_MARSH_HEALTH_MED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Constants.BEAVER_STATE_MARSH_HEALTH_HIGH <NEW_LINE> <DEDENT> <DEDENT> def improve(self): <NEW_LINE> <INDENT> self.health += HEALTH_LUMBER_GAIN <NEW_LINE> if self.health > MAX_HEALTH: <NEW_LINE> <INDENT> self.health = MAX_HEALTH <NEW_LINE> <DEDENT> self.healthbar = self.rect.width * min(1, (self.health/100.0)) <NEW_LINE> <DEDENT> def updatehealth(self): <NEW_LINE> <INDENT> self.health -= HEALTH_IDLE_COST <NEW_LINE> if self.health < MIN_HEALTH: <NEW_LINE> <INDENT> self.health = MIN_HEALTH <NEW_LINE> <DEDENT> self.healthbar = self.rect.width * min(1, (self.health/100.0)) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.updatehealth() <NEW_LINE> self.redraw() | A marsh
Returns: marsh object
Functions: update
Attributes: health, healthbar, scale | 625990721f5feb6acb1644e4 |
class RandomizedMedianOfMedians: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.comparisons = 0 <NEW_LINE> self.swaps = 0 <NEW_LINE> <DEDENT> def median_of_medians(self, array, index): <NEW_LINE> <INDENT> if len(array) <= 10: <NEW_LINE> <INDENT> array.sort() <NEW_LINE> return array[index] <NEW_LINE> <DEDENT> pivot = array[random.randint(0, len(array) - 1)] <NEW_LINE> sys.stderr.write("Chosen pivot: {}\n".format(pivot)) <NEW_LINE> lesser = [] <NEW_LINE> greater = [] <NEW_LINE> equal = [] <NEW_LINE> for item in array: <NEW_LINE> <INDENT> if item < pivot: <NEW_LINE> <INDENT> lesser.append(item) <NEW_LINE> self.comparisons += 1 <NEW_LINE> self.swaps += 1 <NEW_LINE> sys.stderr.write("random select: item: {} < pivot: {}\n".format(item, pivot)) <NEW_LINE> <DEDENT> elif item > pivot: <NEW_LINE> <INDENT> greater.append(item) <NEW_LINE> self.comparisons += 1 <NEW_LINE> self.swaps += 1 <NEW_LINE> sys.stderr.write("random select: item: {} > pivot: {}\n".format(item, pivot)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> equal.append(item) <NEW_LINE> self.comparisons += 1 <NEW_LINE> self.swaps += 1 <NEW_LINE> sys.stderr.write("random select: item: {} == pivot: {}\n".format(item, pivot)) <NEW_LINE> <DEDENT> <DEDENT> if index < len(lesser): <NEW_LINE> <INDENT> self.comparisons += 1 <NEW_LINE> sys.stderr.write("select: index: {} is in lesser\n".format(index)) <NEW_LINE> return self.median_of_medians(lesser, index) <NEW_LINE> <DEDENT> elif index < len(lesser) + len(equal): <NEW_LINE> <INDENT> self.comparisons += 1 <NEW_LINE> sys.stderr.write("select: index: {} is in equal\n".format(index)) <NEW_LINE> return equal[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.comparisons += 1 <NEW_LINE> sys.stderr.write("select: index: {} is in greater\n".format(index)) <NEW_LINE> greater_index = index - (len(lesser) + len(equal)) <NEW_LINE> return self.median_of_medians(greater, greater_index) | Implementation of the RandomizedSelect algorithm in Python | 6259907232920d7e50bc7938 |
class BasicPresence(xmppim.AvailabilityPresence): <NEW_LINE> <INDENT> history = None <NEW_LINE> password = None <NEW_LINE> def toElement(self): <NEW_LINE> <INDENT> element = xmppim.AvailabilityPresence.toElement(self) <NEW_LINE> muc = element.addElement((NS_MUC, 'x')) <NEW_LINE> if self.password: <NEW_LINE> <INDENT> muc.addElement('password', content=self.password) <NEW_LINE> <DEDENT> if self.history: <NEW_LINE> <INDENT> muc.addChild(self.history.toElement()) <NEW_LINE> <DEDENT> return element | Availability presence sent from MUC client to service.
@type history: L{HistoryOptions} | 6259907267a9b606de54771c |
class Meter(object): <NEW_LINE> <INDENT> def __init__(self, objectified_meter): <NEW_LINE> <INDENT> self.objectified = objectified_meter <NEW_LINE> self._warnings = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def objectified(self): <NEW_LINE> <INDENT> return self._objectified <NEW_LINE> <DEDENT> @objectified.setter <NEW_LINE> def objectified(self, value): <NEW_LINE> <INDENT> self._objectified = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def errors(self): <NEW_LINE> <INDENT> self._errors = {} <NEW_LINE> if self.objectified.get('ErrCat'): <NEW_LINE> <INDENT> self._errors = { 'errcat': self.objectified.get('ErrCat'), 'errcode': self.objectified.get('ErrCode') } <NEW_LINE> <DEDENT> return self._errors <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.objectified.get('Id') <NEW_LINE> <DEDENT> @property <NEW_LINE> def report_type(self): <NEW_LINE> <INDENT> raise NotImplementedError('This method is not implemented!') <NEW_LINE> <DEDENT> @property <NEW_LINE> def measure_class(self): <NEW_LINE> <INDENT> return Measure <NEW_LINE> <DEDENT> @property <NEW_LINE> def measures(self): <NEW_LINE> <INDENT> measures = [] <NEW_LINE> if hasattr(self.objectified, self.report_type): <NEW_LINE> <INDENT> objectified = getattr(self.objectified, self.report_type) <NEW_LINE> measures = map(self.measure_class, objectified) <NEW_LINE> <DEDENT> return measures <NEW_LINE> <DEDENT> @property <NEW_LINE> def values(self): <NEW_LINE> <INDENT> values = [] <NEW_LINE> for measure in self.measures: <NEW_LINE> <INDENT> values.append(measure.value()) <NEW_LINE> <DEDENT> return values <NEW_LINE> <DEDENT> @property <NEW_LINE> def warnings(self): <NEW_LINE> <INDENT> return self._warnings | Base class for a meter. | 625990724c3428357761bba6 |
class ProjectGalleryCreate(GalleryAccessMixin, View): <NEW_LINE> <INDENT> template_name = 'projects/gallery_form.html' <NEW_LINE> form_class = ProjectGalleryForm <NEW_LINE> def get(self, request, **kwargs): <NEW_LINE> <INDENT> context = super(ProjectGalleryCreate, self).get_context_data(**kwargs) <NEW_LINE> context['form'] = self.form_class(initial={ 'content_type': ContentType.objects.get_for_model(SocialProject), 'object_id': self.object.pk, }) <NEW_LINE> return render(request, self.template_name, context) <NEW_LINE> <DEDENT> def post(self, request, **kwargs): <NEW_LINE> <INDENT> form = self.form_class(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> gallery = form.save() <NEW_LINE> created_gallery(request.user, gallery) <NEW_LINE> <DEDENT> return redirect(reverse('projects:gallery-list', kwargs={'slug': self.object.slug, })) | Create new gallery for selected project.
| 62599072fff4ab517ebcf10c |
class Charge(_1D): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def func(cls, t, t0, tau, amp, offset): <NEW_LINE> <INDENT> return offset + amp * ((1 - np.exp(- (t - t0) / tau)) * np.heaviside(t - t0, 0.5)) <NEW_LINE> <DEDENT> def approx(self): <NEW_LINE> <INDENT> offset = np.mean(self.ydata[:3]) <NEW_LINE> rng_min = abs(np.min(self.ydata) - offset) <NEW_LINE> rng_max = abs(np.max(self.ydata) - offset) <NEW_LINE> if rng_min > rng_max: <NEW_LINE> <INDENT> amp = -rng_min <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> amp = rng_max <NEW_LINE> <DEDENT> t0 = self.xdata[np.argmin(abs(self.ydata - offset - 0.05 * amp))] <NEW_LINE> tau = 1.2 * (self.xdata[np.argmin(abs(self.ydata - offset - 0.5 * amp))] - t0) <NEW_LINE> return t0, tau, amp, offset | Fit a capactivie charging curve to 1D data
| 62599072a8370b77170f1cbc |
class Object(Element): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.compute_matrix() <NEW_LINE> <DEDENT> def plot(self, ax, elem_pos, beam_path, **kwargs): <NEW_LINE> <INDENT> self._plot_var(ax, **kwargs) <NEW_LINE> ymin = self._Element__ymin <NEW_LINE> ymax = self._Element__ymax <NEW_LINE> sc = self._Element__scale_vert <NEW_LINE> tx = self._Element__text_offset <NEW_LINE> col = self._Element__color['object'] <NEW_LINE> pos = beam_path.position[elem_pos] <NEW_LINE> ax.plot([pos, pos], [ymin * sc, ymax * sc], ls='--', color=col) <NEW_LINE> ax.text(pos, ymax * (sc + tx), 'Object', color=col, ha='center', va='bottom') | Object of beam path | 625990722c8b7c6e89bd50d9 |
class OneTimestepLayer(lasagne.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, input_layer): <NEW_LINE> <INDENT> super(OneTimestepLayer, self).__init__(input_layer) <NEW_LINE> self.input_shape = lasagne.layers.get_output_shape(input_layer) <NEW_LINE> <DEDENT> def get_output_for(self, input, timesteps=None, *args, **kwargs): <NEW_LINE> <INDENT> if timesteps != None: <NEW_LINE> <INDENT> return input[T.arange(start=0,stop=self.input_shape[0]),timesteps,:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return input[:,-1,:] <NEW_LINE> <DEDENT> <DEDENT> def get_output_shape_for(self, input_shape): <NEW_LINE> <INDENT> return (input_shape[0], input_shape[2]) | Only forward outputs at certain/last sequence positions
Parameters
-------
input_layer : lasagne layer type
Input layer with shape: [samples, sequence positions, features] | 62599072283ffb24f3cf519b |
class PatchScore(BaseModel, CBScoreMixin): <NEW_LINE> <INDENT> cs = ForeignKeyField(ChallengeSet, related_name='patch_scores') <NEW_LINE> num_polls = BigIntegerField(null=False) <NEW_LINE> polls_included = BinaryJSONField(null=True) <NEW_LINE> has_failed_polls = BooleanField(null=False, default=False) <NEW_LINE> failed_polls = BinaryJSONField(null=True) <NEW_LINE> round = ForeignKeyField(Round, related_name='patch_scores') <NEW_LINE> perf_score = BinaryJSONField(null=False) <NEW_LINE> patch_type = ForeignKeyField(PatchType, related_name='estimated_scores') <NEW_LINE> @property <NEW_LINE> def security(self): <NEW_LINE> <INDENT> return 2 - self.patch_type.exploitability <NEW_LINE> <DEDENT> @property <NEW_LINE> def success(self): <NEW_LINE> <INDENT> if self.has_failed_polls: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1 - self.patch_type.functionality_risk <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def time_overhead(self): <NEW_LINE> <INDENT> rep_tsk_clk = self.perf_score['score']['rep']['task_clock'] <NEW_LINE> ref_tsk_clk = self.perf_score['score']['ref']['task_clock'] <NEW_LINE> exec_time_overhead = 9999 <NEW_LINE> if ref_tsk_clk != 0: <NEW_LINE> <INDENT> exec_time_overhead = (rep_tsk_clk * 1.0) / ref_tsk_clk <NEW_LINE> <DEDENT> return exec_time_overhead - 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def memory_overhead(self): <NEW_LINE> <INDENT> rep_max_rss = self.perf_score['score']['rep']['rss'] <NEW_LINE> ref_max_rss = self.perf_score['score']['ref']['rss'] <NEW_LINE> rep_min_flt = self.perf_score['score']['rep']['flt'] <NEW_LINE> ref_min_flt = self.perf_score['score']['ref']['flt'] <NEW_LINE> term1 = 9999 <NEW_LINE> if ref_max_rss != 0: <NEW_LINE> <INDENT> term1 = (rep_max_rss * 1.0) / ref_max_rss <NEW_LINE> <DEDENT> term2 = 9999 <NEW_LINE> if ref_min_flt != 0: <NEW_LINE> <INDENT> term2 = (rep_min_flt * 1.0) / ref_min_flt <NEW_LINE> <DEDENT> return 0.5 * (term1 + term2) - 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def size_overhead(self): <NEW_LINE> <INDENT> rep_file_size = self.perf_score['score']['rep']['file_size'] <NEW_LINE> ref_file_size = self.perf_score['score']['ref']['file_size'] <NEW_LINE> return ((rep_file_size * 1.0) / ref_file_size) - 1 | Score of a patched CB | 625990724f6381625f19a122 |
class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "Wall Posts" | meta | 625990727b180e01f3e49cdd |
@injected <NEW_LINE> @setup(IBlogService, name='blogService') <NEW_LINE> class BlogServiceAlchemy(EntityCRUDServiceAlchemy, IBlogService): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> EntityCRUDServiceAlchemy.__init__(self, BlogMapped) <NEW_LINE> <DEDENT> def getBlog(self, blogId): <NEW_LINE> <INDENT> sql = self.session().query(BlogMapped) <NEW_LINE> sql = sql.filter(BlogMapped.Id == blogId) <NEW_LINE> try: return sql.one() <NEW_LINE> except NoResultFound: raise InputError(Ref(_('Unknown id'), ref=Blog.Id)) <NEW_LINE> <DEDENT> def getAll(self, languageId=None, userId=None, offset=None, limit=None, detailed=False, q=None): <NEW_LINE> <INDENT> sql = self._buildQuery(languageId, userId, q) <NEW_LINE> sqlLimit = buildLimits(sql, offset, limit) <NEW_LINE> if detailed: return IterPart(sqlLimit.all(), sql.count(), offset, limit) <NEW_LINE> return sqlLimit.all() <NEW_LINE> <DEDENT> def getLive(self, languageId=None, userId=None, q=None): <NEW_LINE> <INDENT> sql = self._buildQuery(languageId, userId, q) <NEW_LINE> sql = sql.filter((BlogMapped.ClosedOn == None) & (BlogMapped.LiveOn != None)) <NEW_LINE> return sql.all() <NEW_LINE> <DEDENT> def putLive(self, blogId): <NEW_LINE> <INDENT> blog = self.session().query(BlogMapped).get(blogId) <NEW_LINE> if not blog: raise InputError(_('Invalid blog or credentials')) <NEW_LINE> assert isinstance(blog, Blog), 'Invalid blog %s' % blog <NEW_LINE> blog.LiveOn = current_timestamp() if blog.LiveOn is None else None <NEW_LINE> self.session().merge(blog) <NEW_LINE> <DEDENT> def insert(self, blog): <NEW_LINE> <INDENT> assert isinstance(blog, Blog), 'Invalid blog %s' % blog <NEW_LINE> if blog.CreatedOn is None: blog.CreatedOn = current_timestamp() <NEW_LINE> return super().insert(blog) <NEW_LINE> <DEDENT> def _buildQuery(self, languageId=None, userId=None, q=None): <NEW_LINE> <INDENT> sql = self.session().query(BlogMapped) <NEW_LINE> if languageId: sql = sql.filter(BlogMapped.Language == languageId) <NEW_LINE> if userId: <NEW_LINE> <INDENT> userFilter = (BlogMapped.Creator == userId) | exists().where((CollaboratorMapped.User == userId) & (BlogCollaboratorMapped.blogCollaboratorId == CollaboratorMapped.Id) & (BlogCollaboratorMapped.Blog == BlogMapped.Id)) <NEW_LINE> sql = sql.filter(userFilter) <NEW_LINE> <DEDENT> if q: <NEW_LINE> <INDENT> assert isinstance(q, QBlog), 'Invalid query %s' % q <NEW_LINE> sql = buildQuery(sql, q, BlogMapped) <NEW_LINE> <DEDENT> return sql | Implementation for @see: IBlogService | 62599072d268445f2663a7d6 |
class AdvancedModelChoiceIterator(forms.models.ModelChoiceIterator): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> if self.field.empty_label is not None: <NEW_LINE> <INDENT> yield ("", self.field.empty_label) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield ("", "---------") <NEW_LINE> <DEDENT> choices = [] <NEW_LINE> for item in self.queryset.select_related('entidade', 'entidade__entidade'): <NEW_LINE> <INDENT> label = mark_safe(item.sigla_tabulada().replace(' ', ' ')) <NEW_LINE> sortValue = item.sigla_completa() <NEW_LINE> choices.append((item.pk, label, sortValue)) <NEW_LINE> <DEDENT> choices = sorted(choices, key=lambda obj: obj[2]) <NEW_LINE> for item in choices: <NEW_LINE> <INDENT> yield (item[0], item[1]) | Classe para exibição da Entidade no formato Select, ordenado pelo sigla_completa
que inclui a Entidade pai, no formato "<sigla entidade pai> - <sigla entidade>" | 62599072cc0a2c111447c74a |
class Call(AlgebraicLeaf): <NEW_LINE> <INDENT> init_arg_names = ("function", "parameters",) <NEW_LINE> def __init__(self, function, parameters): <NEW_LINE> <INDENT> self.function = function <NEW_LINE> self.parameters = parameters <NEW_LINE> try: <NEW_LINE> <INDENT> arg_count = self.function.arg_count <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(self.parameters) != arg_count: <NEW_LINE> <INDENT> raise TypeError( f"{self.function} called with wrong number of arguments " f"(need {arg_count}, got {len(parameters)})") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getinitargs__(self): <NEW_LINE> <INDENT> return self.function, self.parameters <NEW_LINE> <DEDENT> mapper_method = intern("map_call") | A function invocation.
.. attribute:: function
A :class:`Expression` that evaluates to a function.
.. attribute:: parameters
A :class:`tuple` of positional parameters, each element
of which is a :class:`Expression` or a constant. | 62599072091ae3566870652a |
class FPS(object): <NEW_LINE> <INDENT> def __init__(self, fps, usar_modo_economico): <NEW_LINE> <INDENT> self.cuadros_por_segundo = "??" <NEW_LINE> self.frecuencia = 1000.0 / fps <NEW_LINE> self.timer = QtCore.QTime() <NEW_LINE> self.timer.start() <NEW_LINE> self.siguiente = self.timer.elapsed() + self.frecuencia <NEW_LINE> self.cuadros = 0 <NEW_LINE> self.ultimo_reporte_fps = 0 <NEW_LINE> self.cuadros_por_segundo_numerico = 0 <NEW_LINE> <DEDENT> def actualizar(self): <NEW_LINE> <INDENT> actual = self.timer.elapsed() <NEW_LINE> if actual > self.siguiente: <NEW_LINE> <INDENT> cantidad = 0 <NEW_LINE> while actual > self.siguiente: <NEW_LINE> <INDENT> self.siguiente += self.frecuencia <NEW_LINE> cantidad += 1 <NEW_LINE> self._procesar_fps(actual) <NEW_LINE> <DEDENT> if cantidad > 10: <NEW_LINE> <INDENT> cantidad = 10 <NEW_LINE> <DEDENT> self.cuadros += 1 <NEW_LINE> return cantidad <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def _procesar_fps(self, actual): <NEW_LINE> <INDENT> if actual - self.ultimo_reporte_fps > 1000.0: <NEW_LINE> <INDENT> self.ultimo_reporte_fps += 1000.0 <NEW_LINE> self.cuadros_por_segundo = str(self.cuadros) <NEW_LINE> self.cuadros_por_segundo_numerico = self.cuadros <NEW_LINE> self.cuadros = 0 <NEW_LINE> <DEDENT> <DEDENT> def obtener_cuadros_por_segundo(self): <NEW_LINE> <INDENT> return self.cuadros_por_segundo | Representa un controlador de tiempo para el mainloop de pilas. | 62599072a8370b77170f1cbd |
class DataCDBWriteCommand(Command, ResponseParserMixIn): <NEW_LINE> <INDENT> name = "Write value to CDB" <NEW_LINE> result_type = DataCDBWriteResult <NEW_LINE> response_fields = { 'Length' : {}, 'Cid' : {}, 'Value' : {} } <NEW_LINE> @property <NEW_LINE> def ipmitool_args(self): <NEW_LINE> <INDENT> return ["cxoem", "data", "cdb", "write", self._params['length'], self._params['cid'], self._params['value']] | Describes the cxoem data cdb write command
| 62599072ac7a0e7691f73ddc |
class GLGridItem(GLGraphicsItem): <NEW_LINE> <INDENT> def __init__(self, size=None, color=None, antialias=True, glOptions='translucent'): <NEW_LINE> <INDENT> GLGraphicsItem.__init__(self) <NEW_LINE> self.setGLOptions(glOptions) <NEW_LINE> self.antialias = antialias <NEW_LINE> if size is None: <NEW_LINE> <INDENT> size = QtGui.QVector3D(20,20,1) <NEW_LINE> <DEDENT> self.setSize(size=size) <NEW_LINE> self.setSpacing(1, 1, 1) <NEW_LINE> <DEDENT> def setSize(self, x=None, y=None, z=None, size=None): <NEW_LINE> <INDENT> if size is not None: <NEW_LINE> <INDENT> x = size.x() <NEW_LINE> y = size.y() <NEW_LINE> z = size.z() <NEW_LINE> <DEDENT> self.__size = [x,y,z] <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self.__size[:] <NEW_LINE> <DEDENT> def setSpacing(self, x=None, y=None, z=None, spacing=None): <NEW_LINE> <INDENT> if spacing is not None: <NEW_LINE> <INDENT> x = spacing.x() <NEW_LINE> y = spacing.y() <NEW_LINE> z = spacing.z() <NEW_LINE> <DEDENT> self.__spacing = [x,y,z] <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def spacing(self): <NEW_LINE> <INDENT> return self.__spacing[:] <NEW_LINE> <DEDENT> def paint(self): <NEW_LINE> <INDENT> self.setupGLState() <NEW_LINE> if self.antialias: <NEW_LINE> <INDENT> glEnable(GL_LINE_SMOOTH) <NEW_LINE> glEnable(GL_BLEND) <NEW_LINE> glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) <NEW_LINE> glHint(GL_LINE_SMOOTH_HINT, GL_NICEST) <NEW_LINE> <DEDENT> glBegin( GL_LINES ) <NEW_LINE> x,y,z = self.size() <NEW_LINE> xs,ys,zs = self.spacing() <NEW_LINE> xvals = np.arange(-x/2., x/2. + xs*0.001, xs) <NEW_LINE> yvals = np.arange(-y/2., y/2. + ys*0.001, ys) <NEW_LINE> glColor4f(1, 1, 1, .3) <NEW_LINE> for x in xvals: <NEW_LINE> <INDENT> glVertex3f(x, yvals[0], 0) <NEW_LINE> glVertex3f(x, yvals[-1], 0) <NEW_LINE> <DEDENT> for y in yvals: <NEW_LINE> <INDENT> glVertex3f(xvals[0], y, 0) <NEW_LINE> glVertex3f(xvals[-1], y, 0) <NEW_LINE> <DEDENT> glEnd() | **Bases:** :class:`GLGraphicsItem <pyqtgraph.opengl.GLGraphicsItem>`
Displays a wire-grame grid. | 625990724e4d562566373cf9 |
class ChallengeBase(ABC): <NEW_LINE> <INDENT> def load_input(self, input_file): <NEW_LINE> <INDENT> with open(input_file, "rt") as f: <NEW_LINE> <INDENT> self.lines = f.readlines() <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def challenge1(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def challenge2(self): <NEW_LINE> <INDENT> pass | Base class for Advent of Code challenges | 625990724527f215b58eb619 |
class PythonScriptBuildManager(InterpretedLanguageBuildManager): <NEW_LINE> <INDENT> source_extension = '.py' <NEW_LINE> language = 'python-script' <NEW_LINE> def syntax_check(self): <NEW_LINE> <INDENT> python3_syntax_check(self.source) | Python 3.x builder that executes code in a separate interpreter. | 625990727d43ff248742808c |
class Node(object): <NEW_LINE> <INDENT> def __init__(self, move=None, parent=None, state=None): <NEW_LINE> <INDENT> self.move = move <NEW_LINE> self.parent_node = parent <NEW_LINE> self.child_nodes = [] <NEW_LINE> self.wins = 0 <NEW_LINE> self.visits = 0 <NEW_LINE> self.untried_moves = state.get_moves() <NEW_LINE> self.player_just_moved = state.player_just_moved <NEW_LINE> <DEDENT> def uct_select_child(self): <NEW_LINE> <INDENT> s = sorted(self.child_nodes, key=lambda c: c.wins / c.visits + sqrt( 2 * log(self.visits) / c.visits))[-1] <NEW_LINE> return s <NEW_LINE> <DEDENT> def add_child(self, m, s): <NEW_LINE> <INDENT> n = Node(move=m, parent=self, state=s) <NEW_LINE> self.untried_moves.remove(m) <NEW_LINE> self.child_nodes.append(n) <NEW_LINE> return n <NEW_LINE> <DEDENT> def update(self, result): <NEW_LINE> <INDENT> self.visits += 1 <NEW_LINE> self.wins += result <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[M:' + str(self.move) + ' W/V:' + str(self.wins) + '/' + str(self.visits) + ' U:' + str(self.untried_moves) + ']' <NEW_LINE> <DEDENT> def tree_to_string(self, indent): <NEW_LINE> <INDENT> s = self.indent_string(indent) + str(self) <NEW_LINE> for c in self.child_nodes: <NEW_LINE> <INDENT> s += c.tree_to_string(indent + 1) <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def indent_string(indent): <NEW_LINE> <INDENT> s = '\n' <NEW_LINE> for i in range(1, indent + 1): <NEW_LINE> <INDENT> s += '| ' <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def children_to_string(self): <NEW_LINE> <INDENT> s = '' <NEW_LINE> for c in self.child_nodes: <NEW_LINE> <INDENT> s += str(c) + '\n' <NEW_LINE> <DEDENT> return s | A node in the game tree.
Note wins is always from the viewpoint of playerJustMoved.
Crashes if state not specified. | 6259907267a9b606de54771d |
class Predictor(): <NEW_LINE> <INDENT> def __init__(self, predict_dataset, trained_model, serendipity_dic=None, output=None): <NEW_LINE> <INDENT> self.predict_dataset = predict_dataset <NEW_LINE> f = open(trained_model) <NEW_LINE> self.model = cPickle.load(f) <NEW_LINE> f.close() <NEW_LINE> try: <NEW_LINE> <INDENT> f = open(serendipity_dic) <NEW_LINE> self.serendipity_dic = cPickle.load(f) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.serendipity_dic = None <NEW_LINE> <DEDENT> self.output = output <NEW_LINE> <DEDENT> def predict(self): <NEW_LINE> <INDENT> (p, p_names, r, r_names) = self.predict_dataset <NEW_LINE> assert p.dtype == 'float32' <NEW_LINE> assert r.dtype == 'float32' <NEW_LINE> y_hat = self.model.predict(p, r) <NEW_LINE> ordering = sorted(range(len(y_hat)), key=lambda x: y_hat[x], reverse=True) <NEW_LINE> p_names = p_names[ordering] <NEW_LINE> r_names = r_names[ordering] <NEW_LINE> y_hat = y_hat[ordering] <NEW_LINE> if self.output is None: <NEW_LINE> <INDENT> print("RBP\ttarget\ty_hat\tserendipity") <NEW_LINE> if self.serendipity_dic is None: <NEW_LINE> <INDENT> for (p_, r_, s_) in izip(p_names, r_names, y_hat): <NEW_LINE> <INDENT> print("%s\t%s\t%.3f\t---" % (p_, r_, s_)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for (p_, r_, s_) in izip(p_names, r_names, y_hat): <NEW_LINE> <INDENT> print("%s\t%s\t%.3f\t%.2f" % (p_, r_, s_, get_serendipity_val(self.serendipity_dic, r_))) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> nf = open(self.output, "w") <NEW_LINE> nf.write("RBP\ttarget\ty_hat\tserendipity\n") <NEW_LINE> if self.serendipity_dic is None: <NEW_LINE> <INDENT> for (p_, r_, s_) in izip(p_names, r_names, y_hat): <NEW_LINE> <INDENT> nf.write("%s\t%s\t%.3f\t---\n" % (p_, r_, s_)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for (p_, r_, s_) in izip(p_names, r_names, y_hat): <NEW_LINE> <INDENT> nf.write("%s\t%s\t%.3f\t%.2f\n" % (p_, r_, s_, get_serendipity_val(self.serendipity_dic, r_))) <NEW_LINE> <DEDENT> <DEDENT> nf.close() | Predict interactions. | 6259907255399d3f05627e0c |
class LoadGraph(unittest.TestCase): <NEW_LINE> <INDENT> def test_a(self): <NEW_LINE> <INDENT> TOWNS_EXPECTED = 5 <NEW_LINE> n = trains.Network() <NEW_LINE> n.load_graph('tests/data/graph1') <NEW_LINE> msg = "expected {0} towns loaded".format(TOWNS_EXPECTED) <NEW_LINE> self.assertEqual(n.towns_count(), TOWNS_EXPECTED, msg) <NEW_LINE> for i in range(ord('A'), ord('E')+1): <NEW_LINE> <INDENT> c = chr(i) <NEW_LINE> self.assertIn(c, n.town_map, "town {0} is loaded".format(c)) <NEW_LINE> <DEDENT> c = 'F' <NEW_LINE> self.assertNotIn(c, n.town_map, "town {0} is NOT loaded".format(c)) <NEW_LINE> <DEDENT> def test_b(self): <NEW_LINE> <INDENT> n = trains.Network() <NEW_LINE> with self.assertRaisesRegexp(trains.LoadError, "not a valid route-definition"): <NEW_LINE> <INDENT> n.load_graph('tests/data/graph2') <NEW_LINE> <DEDENT> <DEDENT> def test_c(self): <NEW_LINE> <INDENT> n = trains.Network() <NEW_LINE> with self.assertRaisesRegexp(trains.LoadError, "route definition already known"): <NEW_LINE> <INDENT> n.load_graph('tests/data/graph3') <NEW_LINE> <DEDENT> <DEDENT> def test_d(self): <NEW_LINE> <INDENT> n = trains.Network() <NEW_LINE> with self.assertRaisesRegexp(trains.LoadError, "doesn't go anywhere"): <NEW_LINE> <INDENT> n.load_graph('tests/data/graph4') | Prove that an input file containing route directives gets loaded and parsed correctly | 6259907201c39578d7f143ae |
class TimeLimit(object): <NEW_LINE> <INDENT> def __init__(self, timeout, env=None, msg=None): <NEW_LINE> <INDENT> self.timeout = timeout <NEW_LINE> self.env = env <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> signal.signal(signal.SIGALRM, self.handler) <NEW_LINE> signal.setitimer(signal.ITIMER_REAL, self.timeout, 0) <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> signal.setitimer(signal.ITIMER_REAL, 0) <NEW_LINE> signal.signal(signal.SIGALRM, signal.SIG_DFL) <NEW_LINE> <DEDENT> def handler(self, signum, frame): <NEW_LINE> <INDENT> if self.env is not None: <NEW_LINE> <INDENT> self.env.assertTrue(False, message='Timedout %s' % (str(self.msg) if self.msg is not None else 'Error')) <NEW_LINE> <DEDENT> raise Exception('timeout') | A context manager that fires a TimeExpired exception if it does not
return within the specified amount of time. | 6259907299cbb53fe68327dd |
class ExtremeValueCopula(object): <NEW_LINE> <INDENT> def __init__(self, transform): <NEW_LINE> <INDENT> self.transform = transform <NEW_LINE> <DEDENT> def cdf(self, u, args=()): <NEW_LINE> <INDENT> u, v = np.asarray(u).T <NEW_LINE> cdfv = np.exp(np.log(u * v) * self.transform(np.log(u)/np.log(u*v), *args)) <NEW_LINE> return cdfv <NEW_LINE> <DEDENT> def pdf(self, u, args=()): <NEW_LINE> <INDENT> tr = self.transform <NEW_LINE> u1, u2 = np.asarray(u).T <NEW_LINE> log_u12 = np.log(u1 * u2) <NEW_LINE> t = np.log(u1) / log_u12 <NEW_LINE> cdf = self.cdf(u, args) <NEW_LINE> dep = tr(t, *args) <NEW_LINE> d1 = tr.deriv(t, *args) <NEW_LINE> d2 = tr.deriv2(t, *args) <NEW_LINE> pdf_ = cdf / (u1 * u2) * ((dep + (1 - t) * d1) * (dep - t * d1) - d2 * (1 - t) * t / log_u12) <NEW_LINE> return pdf_ <NEW_LINE> <DEDENT> def logpdf(self, u, args=()): <NEW_LINE> <INDENT> return np.log(self.pdf(u, args=args)) <NEW_LINE> <DEDENT> def conditional_2g1(self, u, args=()): <NEW_LINE> <INDENT> raise NotImplementedError | Extreme value copula constructed from Pickand's dependence function.
Currently only bivariate copulas are available.
Parameters
----------
transform: instance of transformation class
Pickand's dependence function with required methods including first
and second derivatives
Notes
-----
currently the following dependence function and copulas are available
- AsymLogistic
- AsymNegLogistic
- AsymMixed
- HR
TEV and AsymBiLogistic currently do not have required derivaties.
See Also
--------
dep_func_ev | 62599072097d151d1a2c2966 |
class NewTicketView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> template_new_ticket = "ticket/pages/new_ticket.html" <NEW_LINE> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> form = AddTicketForm(initial={"assigned_user": self.request.user}) <NEW_LINE> content = { "form": form, } <NEW_LINE> return render(self.request, self.template_new_ticket, content) <NEW_LINE> <DEDENT> def post(self, *args, **kwargs): <NEW_LINE> <INDENT> form = AddTicketForm(self.request.POST) <NEW_LINE> user = self.request.user <NEW_LINE> recurrences = self.request.POST.get('recurrences', '') <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> instance = form.save(commit=False) <NEW_LINE> instance.creator_user = user <NEW_LINE> instance.recurrences = recurrences <NEW_LINE> if instance.assigned_user == self.request.user: <NEW_LINE> <INDENT> instance.accepted = True <NEW_LINE> <DEDENT> elif instance.assigned_user: <NEW_LINE> <INDENT> instance.dispatcher = user <NEW_LINE> instance.save() <NEW_LINE> send_assigned_notification(user, instance) <NEW_LINE> <DEDENT> instance.save() <NEW_LINE> if instance.assigned_group: <NEW_LINE> <INDENT> send_new_group_ticket(user, instance) <NEW_LINE> <DEDENT> ticket = form.instance <NEW_LINE> messages.success(self.request, _(get_ticket(ticket) + 'wurde erfolgreich erstellt'), extra_tags='safe') <NEW_LINE> create_ticket_log(ticket, user, "add") <NEW_LINE> return HttpResponseRedirect(reverse('dashboard')) <NEW_LINE> <DEDENT> content = { "form": form, "recurrences": recurrences, } <NEW_LINE> messages.error(self.request, _('Ticket konnte nicht erstellt werden')) <NEW_LINE> return render(self.request, self.template_new_ticket, content) | Backend to create a new ticket template in new_ticket.html | 62599072dd821e528d6da5fb |
class UserAsk(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=50, verbose_name='姓名') <NEW_LINE> mobile = models.CharField(max_length=11, verbose_name='手机号') <NEW_LINE> course_name = models.CharField(max_length=50, verbose_name='课程名') <NEW_LINE> add_time = models.DateTimeField(default=datetime.now, verbose_name='添加时间') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = '用户咨询' <NEW_LINE> verbose_name_plural = verbose_name | 用户咨询 | 625990723346ee7daa3382d9 |
class DebianPackagesStatusParser( parsers.SingleFileParser[rdf_client.SoftwarePackages]): <NEW_LINE> <INDENT> output_types = [rdf_client.SoftwarePackages] <NEW_LINE> supported_artifacts = ["DebianPackagesStatus"] <NEW_LINE> installed_re = re.compile(r"^\w+ \w+ installed$") <NEW_LINE> def __init__(self, deb822): <NEW_LINE> <INDENT> self._deb822 = deb822 <NEW_LINE> <DEDENT> def ParseFile( self, knowledge_base: rdf_client.KnowledgeBase, pathspec: rdf_paths.PathSpec, filedesc: IO[bytes], ) -> Iterator[rdf_client.SoftwarePackages]: <NEW_LINE> <INDENT> del knowledge_base <NEW_LINE> del pathspec <NEW_LINE> packages = [] <NEW_LINE> sw_data = utils.ReadFileBytesAsUnicode(filedesc) <NEW_LINE> try: <NEW_LINE> <INDENT> for pkg in self._deb822.Packages.iter_paragraphs(sw_data.splitlines()): <NEW_LINE> <INDENT> if self.installed_re.match(pkg["Status"]): <NEW_LINE> <INDENT> packages.append( rdf_client.SoftwarePackage( name=pkg["Package"], description=pkg["Description"], version=pkg["Version"], architecture=pkg["Architecture"], publisher=pkg["Maintainer"], install_state="INSTALLED")) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except SystemError: <NEW_LINE> <INDENT> yield rdf_anomaly.Anomaly( type="PARSER_ANOMALY", symptom="Invalid dpkg status file") <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if packages: <NEW_LINE> <INDENT> yield rdf_client.SoftwarePackages(packages=packages) | Parser for /var/lib/dpkg/status. Yields SoftwarePackage semantic values. | 62599072435de62698e9d6fa |
class ProblemResetStatement(ProblemStatement): <NEW_LINE> <INDENT> def get_verb(self, event): <NEW_LINE> <INDENT> return Verb( id=constants.XAPI_VERB_INITIALIZED, display=LanguageMap({'en': 'reset'}), ) <NEW_LINE> <DEDENT> def get_result(self, event): <NEW_LINE> <INDENT> event_data = self.get_event_data(event) <NEW_LINE> return Result( completion=False, response=json.dumps("[]") ) | Statement for student resetting answer to a problem. | 6259907292d797404e3897d5 |
class TestGETAccountSummarySubscriptionRatePlanType(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testGETAccountSummarySubscriptionRatePlanType(self): <NEW_LINE> <INDENT> pass | GETAccountSummarySubscriptionRatePlanType unit test stubs | 62599072be8e80087fbc0984 |
class TestAuroraSensorSetUp(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.hass = get_test_home_assistant() <NEW_LINE> self.lat = 37.8267 <NEW_LINE> self.lon = -122.423 <NEW_LINE> self.hass.config.latitude = self.lat <NEW_LINE> self.hass.config.longitude = self.lon <NEW_LINE> self.entities = [] <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.hass.stop() <NEW_LINE> <DEDENT> @requests_mock.Mocker() <NEW_LINE> def test_setup_and_initial_state(self, mock_req): <NEW_LINE> <INDENT> uri = re.compile( r"http://services\.swpc\.noaa\.gov/text/aurora-nowcast-map\.txt" ) <NEW_LINE> mock_req.get(uri, text=load_fixture('aurora.txt')) <NEW_LINE> entities = [] <NEW_LINE> def mock_add_entities(new_entities, update_before_add=False): <NEW_LINE> <INDENT> if update_before_add: <NEW_LINE> <INDENT> for entity in new_entities: <NEW_LINE> <INDENT> entity.update() <NEW_LINE> <DEDENT> <DEDENT> for entity in new_entities: <NEW_LINE> <INDENT> entities.append(entity) <NEW_LINE> <DEDENT> <DEDENT> config = { "name": "Test", "forecast_threshold": 75 } <NEW_LINE> aurora.setup_platform(self.hass, config, mock_add_entities) <NEW_LINE> aurora_component = entities[0] <NEW_LINE> assert len(entities) == 1 <NEW_LINE> assert aurora_component.name == "Test" <NEW_LINE> assert aurora_component.device_state_attributes["visibility_level"] == '0' <NEW_LINE> assert aurora_component.device_state_attributes["message"] == "nothing's out" <NEW_LINE> assert not aurora_component.is_on <NEW_LINE> <DEDENT> @requests_mock.Mocker() <NEW_LINE> def test_custom_threshold_works(self, mock_req): <NEW_LINE> <INDENT> uri = re.compile( r"http://services\.swpc\.noaa\.gov/text/aurora-nowcast-map\.txt" ) <NEW_LINE> mock_req.get(uri, text=load_fixture('aurora.txt')) <NEW_LINE> entities = [] <NEW_LINE> def mock_add_entities(new_entities, update_before_add=False): <NEW_LINE> <INDENT> if update_before_add: <NEW_LINE> <INDENT> for entity in new_entities: <NEW_LINE> <INDENT> entity.update() <NEW_LINE> <DEDENT> <DEDENT> for entity in new_entities: <NEW_LINE> <INDENT> entities.append(entity) <NEW_LINE> <DEDENT> <DEDENT> config = { "name": "Test", "forecast_threshold": 1 } <NEW_LINE> self.hass.config.longitude = 5 <NEW_LINE> self.hass.config.latitude = 5 <NEW_LINE> aurora.setup_platform(self.hass, config, mock_add_entities) <NEW_LINE> aurora_component = entities[0] <NEW_LINE> assert aurora_component.aurora_data.visibility_level == '5' <NEW_LINE> assert aurora_component.is_on | Test the aurora platform. | 62599072e5267d203ee6d037 |
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('Users must have an email address.') <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email, name, password) <NEW_LINE> user.is_superuser = True <NEW_LINE> user.is_staff = True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user | Helps Django work with our custom user models | 62599072cc0a2c111447c74b |
class inspectionViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.inspection.objects.all() <NEW_LINE> serializer_class = serializers.inspectionSerializer <NEW_LINE> permission_classes = [permissions.IsAuthenticated] | ViewSet for the inspection class | 62599072adb09d7d5dc0be5f |
class StgkStarterApp(Application): <NEW_LINE> <INDENT> def init_app(self): <NEW_LINE> <INDENT> app_payload = self.import_module("app") <NEW_LINE> menu_callback = lambda: app_payload.dialog.show_dialog(self) <NEW_LINE> self.engine.register_command("Show Starter Template App...", menu_callback) | The app entry point. This class is responsible for initializing and tearing down
the application, handle menu registration etc. | 6259907223849d37ff8529ab |
class ErrorWrongEncoding(ClientError): <NEW_LINE> <INDENT> pass | Invalid encoding from client. | 62599072e76e3b2f99fda2f7 |
class ModelCopySelected(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__label = "Un moment..." <NEW_LINE> self.startSignal = Signal() <NEW_LINE> self.refreshSignal = Signal() <NEW_LINE> self.finishSignal = Signal() <NEW_LINE> self.NbrJobsSignal = Signal() <NEW_LINE> <DEDENT> def start(self, lstFiles): <NEW_LINE> <INDENT> self.startSignal.emit(self.__label, max(1, len(lstFiles))) <NEW_LINE> if config.Filigrane: <NEW_LINE> <INDENT> filigrane = Signature(config.FiligraneSource) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filigrane = None <NEW_LINE> <DEDENT> SelectedDir = os.path.join(config.DefaultRepository, config.SelectedDirectory) <NEW_LINE> self.refreshSignal.emit(-1, "copie des fichiers existants") <NEW_LINE> if not os.path.isdir(SelectedDir): fileutils.mkdir(SelectedDir) <NEW_LINE> for day in os.listdir(SelectedDir): <NEW_LINE> <INDENT> for File in os.listdir(os.path.join(SelectedDir, day)): <NEW_LINE> <INDENT> if File.find(config.PagePrefix) == 0: <NEW_LINE> <INDENT> if os.path.isdir(os.path.join(SelectedDir, day, File)): <NEW_LINE> <INDENT> for strImageFile in os.listdir(os.path.join(SelectedDir, day, File)): <NEW_LINE> <INDENT> src = os.path.join(SelectedDir, day, File, strImageFile) <NEW_LINE> dst = os.path.join(SelectedDir, day, strImageFile) <NEW_LINE> if os.path.isfile(src) and not os.path.exists(dst): <NEW_LINE> <INDENT> shutil.move(src, dst) <NEW_LINE> <DEDENT> if (os.path.isdir(src)) and (os.path.split(src)[1] in [config.ScaledImages["Suffix"], config.Thumbnails["Suffix"]]): <NEW_LINE> <INDENT> shutil.rmtree(src) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> globalCount = 0 <NEW_LINE> for File in lstFiles: <NEW_LINE> <INDENT> dest = os.path.join(SelectedDir, File) <NEW_LINE> src = os.path.join(config.DefaultRepository, File) <NEW_LINE> destdir = os.path.dirname(dest) <NEW_LINE> self.refreshSignal.emit(globalCount, File) <NEW_LINE> globalCount += 1 <NEW_LINE> if not os.path.isdir(destdir): <NEW_LINE> <INDENT> fileutils.makedir(destdir) <NEW_LINE> <DEDENT> if not os.path.exists(dest): <NEW_LINE> <INDENT> if filigrane: <NEW_LINE> <INDENT> image = Image.open(src) <NEW_LINE> filigrane.substract(image).save(dest, quality=config.FiligraneQuality, optimize=config.FiligraneOptimize, progressive=config.FiligraneOptimize) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shutil.copy(src, dest) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.chmod(dest, config.DefaultFileMode) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> logger.warning("In ModelCopySelected: unable to chmod %s", dest) <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> logger.info("In ModelCopySelected: %s already exists", dest) <NEW_LINE> <DEDENT> <DEDENT> AlreadyDone = [] <NEW_LINE> for File in lstFiles: <NEW_LINE> <INDENT> directory = os.path.split(File)[0] <NEW_LINE> if directory in AlreadyDone: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> AlreadyDone.append(directory) <NEW_LINE> dst = os.path.join(SelectedDir, directory, config.CommentFile) <NEW_LINE> src = os.path.join(config.DefaultRepository, directory, config.CommentFile) <NEW_LINE> if os.path.isfile(src): <NEW_LINE> <INDENT> shutil.copy(src, dst) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.finishSignal.emit() | Implemantation MVC de la procedure copySelected | 6259907299cbb53fe68327de |
class TestMemoryRead: <NEW_LINE> <INDENT> def test_calculated_length(self): <NEW_LINE> <INDENT> payload = MemoryRead() <NEW_LINE> assert payload.calculated_length() == 3 <NEW_LINE> <DEDENT> def test_from_knx(self): <NEW_LINE> <INDENT> payload = MemoryRead() <NEW_LINE> payload.from_knx(bytes([0x02, 0x0B, 0x12, 0x34])) <NEW_LINE> assert payload == MemoryRead(address=0x1234, count=11) <NEW_LINE> <DEDENT> def test_to_knx(self): <NEW_LINE> <INDENT> payload = MemoryRead(address=0x1234, count=11) <NEW_LINE> assert payload.to_knx() == bytes([0x02, 0x0B, 0x12, 0x34]) <NEW_LINE> <DEDENT> def test_to_knx_conversion_error(self): <NEW_LINE> <INDENT> payload = MemoryRead(address=0xAABBCCDD, count=11) <NEW_LINE> with pytest.raises(ConversionError, match=r".*Address.*"): <NEW_LINE> <INDENT> payload.to_knx() <NEW_LINE> <DEDENT> payload = MemoryRead(address=0x1234, count=255) <NEW_LINE> with pytest.raises(ConversionError, match=r".*Count.*"): <NEW_LINE> <INDENT> payload.to_knx() <NEW_LINE> <DEDENT> <DEDENT> def test_str(self): <NEW_LINE> <INDENT> payload = MemoryRead(address=0x1234, count=11) <NEW_LINE> assert str(payload) == '<MemoryRead address="0x1234" count="11" />' | Test class for MemoryRead objects. | 62599072fff4ab517ebcf10f |
class Akai(protocol_base.IrProtocolBase): <NEW_LINE> <INDENT> irp = '{38k,289,lsb}<1,-2.6|1,-6.3>(D:3,F:7,1,^25.3m)*' <NEW_LINE> frequency = 38000 <NEW_LINE> bit_count = 10 <NEW_LINE> encoding = 'lsb' <NEW_LINE> _lead_out = [TIMING, 25300] <NEW_LINE> _bursts = [ [TIMING, -int(round(TIMING * 2.6))], [TIMING, -int(round(TIMING * 6.3))] ] <NEW_LINE> _code_order = [ ['D', 3], ['F', 7] ] <NEW_LINE> _parameters = [ ['D', 0, 2], ['F', 3, 9], ] <NEW_LINE> encode_parameters = [ ['device', 0, 7], ['function', 0, 127], ] <NEW_LINE> def encode( self, device: int, function: int, repeat_count: int = 0 ) -> protocol_base.IRCode: <NEW_LINE> <INDENT> packet = self._build_packet( D=device, F=function ) <NEW_LINE> params = dict( frequency=self.frequency, D=device, F=function ) <NEW_LINE> code = protocol_base.IRCode( self, packet[:] * (repeat_count + 1), [packet[:]] * (repeat_count + 1), params, repeat_count ) <NEW_LINE> return code | IR decoder for the Akai protocol. | 62599072f9cc0f698b1c5f45 |
class JDoodle(JDoodleWrapper): <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> super().__init__(bot) <NEW_LINE> <DEDENT> async def __error(self, ctx, error): <NEW_LINE> <INDENT> if isinstance(error, (commands.BadArgument, JDoodleRequestFailedError)): <NEW_LINE> <INDENT> await ctx.send(error) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> async def send_embed(ctx, lang, emote, result): <NEW_LINE> <INDENT> title = f'Code evaluation: {lang.capitalize()} {emote}' <NEW_LINE> if not result.memory: <NEW_LINE> <INDENT> result.memory = 0 <NEW_LINE> <DEDENT> if not result.cpu_time: <NEW_LINE> <INDENT> result.cpu_time = 0.00 <NEW_LINE> <DEDENT> embed = (discord.Embed(title=title, description='━━━━━━━━━━━━━━━━━━━', color=random_color()) .add_field(name='Memory usage:', value=f'{int(result.memory) / 1000}mb') .add_field(name='Evaluated in:', value=f'{result.cpu_time}ms') .add_field(name='Output:', value=f'```\n{truncate(result.output, 1013, "...")}```', inline=False) .set_footer(text='Evaluated using the JDoodle API.', icon_url='https://bit.ly/2CvWRiA')) <NEW_LINE> await ctx.send(embed=embed) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def clean_code(code): <NEW_LINE> <INDENT> if code.startswith('```') and code.endswith('```'): <NEW_LINE> <INDENT> base = code.split('\n') <NEW_LINE> return [base[0].strip('```'), base[1:-1]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise commands.BadArgument('You need to use multiline codeblocks.') <NEW_LINE> <DEDENT> <DEDENT> @commands.command(name='exec', aliases=['execute', 'run', 'debug']) <NEW_LINE> @commands.cooldown(1, 60.0, commands.BucketType.user) <NEW_LINE> async def _execute(self, ctx, *, code: JDoodleCode): <NEW_LINE> <INDENT> body = self.clean_code(code) <NEW_LINE> try: <NEW_LINE> <INDENT> language, version, emoji = languages[body[0]] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return await ctx.send('Unsupported language!') <NEW_LINE> <DEDENT> to_eval = '\n'.join(body[1]) <NEW_LINE> await ctx.trigger_typing() <NEW_LINE> result = await self._request('execute', script=to_eval, language=language, version_index=version) <NEW_LINE> await self.send_embed(ctx, language, emoji, result) <NEW_LINE> <DEDENT> @commands.command(name='jdoodle') <NEW_LINE> @commands.is_owner() <NEW_LINE> async def _jdoodle(self, ctx): <NEW_LINE> <INDENT> result = await self._request('credit-spent') <NEW_LINE> embed = (discord.Embed(description=f'The bot made **{result.used}** requests, **{200 - result.used}** remaining.', color=random_color()) .set_author(name='JDoodle API requests', icon_url=ctx.author.avatar_url)) <NEW_LINE> await ctx.send(embed=embed) | Commands to interact with the JDoodle compiler API. | 62599072f548e778e596ce83 |
@tf_export( "initializers.uniform_unit_scaling", v1=[ "initializers.uniform_unit_scaling", "uniform_unit_scaling_initializer" ]) <NEW_LINE> @deprecation.deprecated_endpoints("uniform_unit_scaling_initializer") <NEW_LINE> class UniformUnitScaling(Initializer): <NEW_LINE> <INDENT> @deprecated(None, "Use tf.initializers.variance_scaling instead with distribution=" "uniform to get equivalent behavior.") <NEW_LINE> def __init__(self, factor=1.0, seed=None, dtype=dtypes.float32): <NEW_LINE> <INDENT> self.factor = factor <NEW_LINE> self.seed = seed <NEW_LINE> self.dtype = _assert_float_dtype(dtypes.as_dtype(dtype)) <NEW_LINE> <DEDENT> def __call__(self, shape, dtype=None, partition_info=None): <NEW_LINE> <INDENT> if dtype is None: <NEW_LINE> <INDENT> dtype = self.dtype <NEW_LINE> <DEDENT> scale_shape = shape <NEW_LINE> if partition_info is not None: <NEW_LINE> <INDENT> scale_shape = partition_info.full_shape <NEW_LINE> <DEDENT> input_size = 1.0 <NEW_LINE> for dim in scale_shape[:-1]: <NEW_LINE> <INDENT> input_size *= float(dim) <NEW_LINE> <DEDENT> input_size = max(input_size, 1.0) <NEW_LINE> max_val = math.sqrt(3 / input_size) * self.factor <NEW_LINE> return random_ops.random_uniform( shape, -max_val, max_val, dtype, seed=self.seed) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return {"factor": self.factor, "seed": self.seed, "dtype": self.dtype.name} | Initializer that generates tensors without scaling variance.
When initializing a deep network, it is in principle advantageous to keep
the scale of the input variance constant, so it does not explode or diminish
by reaching the final layer. If the input is `x` and the operation `x * W`,
and we want to initialize `W` uniformly at random, we need to pick `W` from
[-sqrt(3) / sqrt(dim), sqrt(3) / sqrt(dim)]
to keep the scale intact, where `dim = W.shape[0]` (the size of the input).
A similar calculation for convolutional networks gives an analogous result
with `dim` equal to the product of the first 3 dimensions. When
nonlinearities are present, we need to multiply this by a constant `factor`.
See (Sussillo et al., 2014) for deeper motivation, experiments
and the calculation of constants. In section 2.3 there, the constants were
numerically computed: for a linear layer it's 1.0, relu: ~1.43, tanh: ~1.15.
Args:
factor: Float. A multiplicative factor by which the values will be scaled.
seed: A Python integer. Used to create random seeds. See
`tf.set_random_seed`
for behavior.
dtype: The data type. Only floating point types are supported.
References:
[Sussillo et al., 2014](https://arxiv.org/abs/1412.6558)
([pdf](http://arxiv.org/pdf/1412.6558.pdf)) | 6259907297e22403b383c7f8 |
class SeriesPieValid(Validted): <NEW_LINE> <INDENT> def validte(self, instance, value): <NEW_LINE> <INDENT> if isinstance(value, list): <NEW_LINE> <INDENT> __temp = [] <NEW_LINE> for serie in value: <NEW_LINE> <INDENT> if serie.__class__.__name__ == "SeriesPie": <NEW_LINE> <INDENT> __temp.append(eval(str(serie))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("series pie属性必须是SeriesPie的实例") <NEW_LINE> <DEDENT> <DEDENT> return __temp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("series pie属性必须是列表类型") | 验证饼状图的series属性是否正确 | 6259907260cbc95b063659e8 |
class Plan: <NEW_LINE> <INDENT> def __init__(self, security, barSize, fileName): <NEW_LINE> <INDENT> self.str_security = str(security) <NEW_LINE> self.barSize = barSize <NEW_LINE> self.fullFilePath = fileName <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.str_security + ' ' + self.barSize + ' ' + self.fullFilePath | Each plan contains the loading setup of each security | 6259907266673b3332c31cf4 |
class GenericX86HyperVNoWifi(GenericX86NoWifi): <NEW_LINE> <INDENT> identifier = 'generic-x86hyperv-nowifi' <NEW_LINE> name = "x86 Hyper-V (no wireless)" <NEW_LINE> architecture = 'x86_hyperv' <NEW_LINE> profiles = { 'openwrt': { 'name': 'Generic', 'files': [ 'openwrt-x86-generic-combined-ext4.img.gz' ] } } | Generic x86 Hyper-V device descriptor. | 62599072ad47b63b2c5a9143 |
class Area(BaseModel, db.Model): <NEW_LINE> <INDENT> __tablename__ = 'ihome_area' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(32), nullable=False) <NEW_LINE> houses = db.relationship('House', backref='area') <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'name': self.name } | 区域 | 6259907271ff763f4b5e909e |
class clear_log(ProtectedPage): <NEW_LINE> <INDENT> def GET(self): <NEW_LINE> <INDENT> with io.open(u"./data/log.json", u"w") as f: <NEW_LINE> <INDENT> f.write(u"") <NEW_LINE> <DEDENT> raise web.seeother(u"/vl") | Delete all log records | 625990725fdd1c0f98e5f87d |
class Network: <NEW_LINE> <INDENT> network_range, addresses, mask_length = {}, 0, 0 <NEW_LINE> routers = None <NEW_LINE> uid, name = -1, None <NEW_LINE> def __init__(self, starting_ip, mask, uid, name=None): <NEW_LINE> <INDENT> inst_ = IPv4Network().init_from_couple(starting_ip, mask) <NEW_LINE> self.uid = uid <NEW_LINE> self.name = name if name else None <NEW_LINE> self.cidr = f"{starting_ip}/{inst_.mask_length}" <NEW_LINE> self.routers = {} <NEW_LINE> self.network_range = inst_.network_range <NEW_LINE> self.mask_length = inst_.mask_length <NEW_LINE> self.addresses = inst_.addresses <NEW_LINE> <DEDENT> def connect(self, router_uid, router_ip): <NEW_LINE> <INDENT> self.routers[router_uid] = router_ip <NEW_LINE> <DEDENT> def disconnect(self, router_uid): <NEW_LINE> <INDENT> if router_uid in self.routers: <NEW_LINE> <INDENT> del self.routers[router_uid] | The virtual subnetwork class
Used to create virtual subnetworks and link them with routers | 62599072627d3e7fe0e0877d |
class MarketDataAveragePriceField(Base): <NEW_LINE> <INDENT> _fields_ = [ ('AveragePrice', ctypes.c_double), ] <NEW_LINE> def __init__(self, AveragePrice=0.0): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.AveragePrice = float(AveragePrice) | 成交均价 | 6259907292d797404e3897d6 |
class SqlAlchemyTask(Task): <NEW_LINE> <INDENT> abstract = True <NEW_LINE> def after_return(self, *args, **kwargs): <NEW_LINE> <INDENT> db_session.remove() <NEW_LINE> super(SqlAlchemyTask, self).after_return(*args, **kwargs) | An abstract Celery Task that ensures that the connection the the
database is closed on task completion | 625990724a966d76dd5f07e0 |
class BaLayerNorm(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_size, learnable=True, epsilon=1e-5): <NEW_LINE> <INDENT> super(BaLayerNorm, self).__init__() <NEW_LINE> self.input_size = input_size <NEW_LINE> self.learnable = learnable <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = T(1, input_size).fill_(0) <NEW_LINE> self.beta = T(1, input_size).fill_(0) <NEW_LINE> if learnable: <NEW_LINE> <INDENT> W = P <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> W = V <NEW_LINE> <DEDENT> self.alpha = W(self.alpha) <NEW_LINE> self.beta = W(self.beta) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> size = x.size() <NEW_LINE> x = x.view(x.size(0), -1) <NEW_LINE> mean = th.mean(x, 1).expand_as(x) <NEW_LINE> center = x - mean <NEW_LINE> std = th.sqrt(th.mean(th.square(center), 1)).expand_as(x) <NEW_LINE> output = center / (std + self.epsilon) <NEW_LINE> if self.learnable: <NEW_LINE> <INDENT> output = self.alpha * output + self.beta <NEW_LINE> <DEDENT> return output.view(size) | Layer Normalization based on Ba & al.:
'Layer Normalization'
https://arxiv.org/pdf/1607.06450.pdf
This implementation mimicks the original torch implementation at:
https://github.com/ryankiros/layer-norm/blob/master/torch_modules/LayerNormalization.lua | 62599072442bda511e95d9d2 |
class PunktSentenceTokenizer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.trained = nltk.data.load('tokenizers/punkt/english.pickle') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise "Could not train the Punkt sentence tokenizer" <NEW_LINE> <DEDENT> <DEDENT> def tokenize(self, value): <NEW_LINE> <INDENT> self.trained.tokenize(value) <NEW_LINE> return self.trained.tokenize(value) | A trained PunktSentenceTokenizer
| 625990724e4d562566373cfd |
class TestContact(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testContact(self): <NEW_LINE> <INDENT> pass | Contact unit test stubs | 625990724428ac0f6e659e2b |
class MAVLink_gopro_set_response_message(MAVLink_message): <NEW_LINE> <INDENT> id = MAVLINK_MSG_ID_GOPRO_SET_RESPONSE <NEW_LINE> name = 'GOPRO_SET_RESPONSE' <NEW_LINE> fieldnames = ['cmd_id', 'status'] <NEW_LINE> ordered_fieldnames = [ 'cmd_id', 'status' ] <NEW_LINE> format = '<BB' <NEW_LINE> native_format = bytearray('<BB', 'ascii') <NEW_LINE> orders = [0, 1] <NEW_LINE> lengths = [1, 1] <NEW_LINE> array_lengths = [0, 0] <NEW_LINE> crc_extra = 162 <NEW_LINE> def __init__(self, cmd_id, status): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLink_gopro_set_response_message.id, MAVLink_gopro_set_response_message.name) <NEW_LINE> self._fieldnames = MAVLink_gopro_set_response_message.fieldnames <NEW_LINE> self.cmd_id = cmd_id <NEW_LINE> self.status = status <NEW_LINE> <DEDENT> def pack(self, mav, force_mavlink1=False): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 162, struct.pack('<BB', self.cmd_id, self.status), force_mavlink1=force_mavlink1) | Response from a GOPRO_COMMAND set request | 62599072bf627c535bcb2dc3 |
class IUUpdateFailedError(IpaacaError): <NEW_LINE> <INDENT> def __init__(self, iu): <NEW_LINE> <INDENT> super(IUUpdateFailedError, self).__init__('Remote update failed for IU ' + str(iu.uid) + '.') | Error indicating that a remote IU update failed. | 625990721f5feb6acb1644ea |
class LoggerWriter(object): <NEW_LINE> <INDENT> def __init__(self, level): <NEW_LINE> <INDENT> self.level = level <NEW_LINE> <DEDENT> def write(self, message): <NEW_LINE> <INDENT> msg = message.strip() <NEW_LINE> if msg != "\n": <NEW_LINE> <INDENT> self.level(msg) <NEW_LINE> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> pass | Python logger that looks and acts like a file.
| 62599072a219f33f346c8101 |
class MD5PasswordHasher(BasePasswordHasher): <NEW_LINE> <INDENT> algorithm = "md5" <NEW_LINE> def encode(self, password, salt): <NEW_LINE> <INDENT> assert password <NEW_LINE> assert salt and '$' not in salt <NEW_LINE> hash = hashlib.md5(force_bytes(salt + password)).hexdigest() <NEW_LINE> return "%s$%s$%s" % (self.algorithm, salt, hash) <NEW_LINE> <DEDENT> def verify(self, password, encoded): <NEW_LINE> <INDENT> algorithm, salt, hash = encoded.split('$', 2) <NEW_LINE> assert algorithm == self.algorithm <NEW_LINE> encoded_2 = self.encode(password, salt) <NEW_LINE> return constant_time_compare(encoded, encoded_2) <NEW_LINE> <DEDENT> def safe_summary(self, encoded): <NEW_LINE> <INDENT> algorithm, salt, hash = encoded.split('$', 2) <NEW_LINE> assert algorithm == self.algorithm <NEW_LINE> return SortedDict([ (_('algorithm'), algorithm), (_('salt'), mask_hash(salt, show=2)), (_('hash'), mask_hash(hash)), ]) | The Salted MD5 password hashing algorithm (not recommended) | 62599072fff4ab517ebcf111 |
class Processor(object): <NEW_LINE> <INDENT> def __init__(self, graph): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def query(self, strOrQuery, initBindings={}, initNs={}, DEBUG=False): <NEW_LINE> <INDENT> pass | Query plugin interface.
This module is useful for those wanting to write a query processor
that can plugin to rdf. If you are wanting to execute a query you
likely want to do so through the Graph class query method. | 625990727d43ff248742808e |
class ASTAssignmentExpr(ASTNode): <NEW_LINE> <INDENT> def __init__(self,ID,expression): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.children = [ASTID(ID), expression] <NEW_LINE> <DEDENT> @property <NEW_LINE> def binding(self): <NEW_LINE> <INDENT> return self.children[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self.children[1] | A class that takes an ID to assign to and an expression to be assigned as its arguments
and defines an assignment expression node in an abstract syntax tree
Parameters
----------
ID: string
expression: an AST node
Returns
-------
__init__(ID,expression): None
returns nothing, but sets the children of ASTAssignmentExpr to ASTID(ID) and expression
binding: self.children[0]
returns the ASTID node representing the variable to be assigned to
value: self.children[1]
returns the AST node representing the value of the variable
Examples
--------
>>> a = ASTAssignmentExpr('var1',ASTNode())
>>> a.binding.__class__.__name__
'ASTID'
>>> a.value.__class__.__name__
'ASTNode' | 625990724e4d562566373cfe |
class Solution: <NEW_LINE> <INDENT> def checkPerfectNumber(self, num): <NEW_LINE> <INDENT> if not num: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> numSum = 1 <NEW_LINE> i = 2 <NEW_LINE> while i * i <= num: <NEW_LINE> <INDENT> if num % i == 0: <NEW_LINE> <INDENT> numSum += i <NEW_LINE> numSum += num // i <NEW_LINE> <DEDENT> if numSum == num: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif numSum > num: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> return False | @param num: an integer
@return: returns true when it is a perfect number and false when it is not | 625990724f88993c371f119c |
class CliAgentRunErrorPredicate(jp.ValuePredicate): <NEW_LINE> <INDENT> def __init__(self, title, error_regex): <NEW_LINE> <INDENT> super(CliAgentRunErrorPredicate, self).__init__() <NEW_LINE> self.__title = title <NEW_LINE> self.__error_regex = error_regex <NEW_LINE> <DEDENT> def export_to_json_snapshot(self, snapshot, entity): <NEW_LINE> <INDENT> entity.add_metadata('_title', self.__title) <NEW_LINE> snapshot.edge_builder.make(entity, 'Title', self.__title) <NEW_LINE> snapshot.edge_builder.make_control(entity, 'Regex', self.__error_regex) <NEW_LINE> <DEDENT> def __call__(self, context, value): <NEW_LINE> <INDENT> if not isinstance(value, CliAgentRunError): <NEW_LINE> <INDENT> return jp.JsonError('Expected program to fail, but it did not') <NEW_LINE> <DEDENT> ok = value.match_regex(self.__error_regex) <NEW_LINE> return jp.PredicateResult(ok is not None) | An Predicate expects specify CliAgentRunError. | 625990725166f23b2e244ccb |
class ProgressLogger(logging.Logger): <NEW_LINE> <INDENT> def __init__(self, name='Progress Log', level=logging.INFO, format='%(asctime)s\t%(levelname)s\t%(message)s', dateformat='%H:%M:%S', logToConsole=False, logToFile=False, maxprogress=100, logfile=None, mode='w', callback=lambda:None): <NEW_LINE> <INDENT> self._logfile=logfile <NEW_LINE> self.mode=mode <NEW_LINE> self.level=level <NEW_LINE> self.dateformat=dateformat <NEW_LINE> self.logging=True <NEW_LINE> logging.Logger.__init__(self,name,level=level-1) <NEW_LINE> self.format = logging.Formatter(format, dateformat) <NEW_LINE> if logToConsole: <NEW_LINE> <INDENT> ch = logging.StreamHandler() <NEW_LINE> ch.setLevel(level) <NEW_LINE> ch.setFormatter(self.format) <NEW_LINE> self.addHandler(ch) <NEW_LINE> <DEDENT> if logToFile: <NEW_LINE> <INDENT> if logfile: <NEW_LINE> <INDENT> fh = logging.FileHandler(logfile, mode=mode) <NEW_LINE> fh.setLevel(level) <NEW_LINE> fh.setFormatter(self.format) <NEW_LINE> self.addHandler(fh) <NEW_LINE> <DEDENT> <DEDENT> warnings.simplefilter('always') <NEW_LINE> warnings.showwarning = self.showwarning <NEW_LINE> <DEDENT> def showwarning(self, msg, cat, fname, lno, file=None, line=None): <NEW_LINE> <INDENT> self.warn(msg) <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> self.logging=False <NEW_LINE> for h in self.handlers: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> h.flush() <NEW_LINE> h.close() <NEW_LINE> <DEDENT> except:pass <NEW_LINE> <DEDENT> <DEDENT> def __classproperty__(fcn): <NEW_LINE> <INDENT> try:return property( **fcn() ) <NEW_LINE> except:pass <NEW_LINE> <DEDENT> @__classproperty__ <NEW_LINE> def logfile(): <NEW_LINE> <INDENT> def fget(self): <NEW_LINE> <INDENT> return self._logfile <NEW_LINE> <DEDENT> def fset(self, *args, **kwargs): <NEW_LINE> <INDENT> self._logfile=args[0] <NEW_LINE> for h in self.handlers: <NEW_LINE> <INDENT> if isinstance(h, logging.FileHandler): <NEW_LINE> <INDENT> h.flush() <NEW_LINE> h.close() <NEW_LINE> self.removeHandler(h) <NEW_LINE> fh = logging.FileHandler(self._logfile, mode=self.mode) <NEW_LINE> fh.setLevel(self.level) <NEW_LINE> fh.setFormatter(self.format) <NEW_LINE> self.addHandler(fh) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def fdel(self):pass <NEW_LINE> return locals() | Provide logger interface | 6259907299cbb53fe68327e1 |
class DeletePackageException(Exception): <NEW_LINE> <INDENT> def __init__(self, details=None): <NEW_LINE> <INDENT> self.message = "An error occurred during package deletion." <NEW_LINE> self.details = details <NEW_LINE> display = self.details <NEW_LINE> if display is None: <NEW_LINE> <INDENT> display = self.message <NEW_LINE> <DEDENT> super().__init__(display) | Exception raised during package deletion.
| 6259907297e22403b383c7fa |
class ExternalRequestFailed(Exception): <NEW_LINE> <INDENT> pass | Raised when a call to an external service fails. | 62599072fff4ab517ebcf112 |
class PresignedUrlProvider(object): <NEW_LINE> <INDENT> request: DownloadRequest <NEW_LINE> _cached_info: PresignedUrlInfo <NEW_LINE> _TIME_BUFFER: datetime.timedelta = datetime.timedelta(seconds=5) <NEW_LINE> def __init__(self, client, request: DownloadRequest): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.request: DownloadRequest = request <NEW_LINE> self._cached_info: PresignedUrlInfo = self._get_pre_signed_info() <NEW_LINE> self._lock = _threading.Lock() <NEW_LINE> <DEDENT> def get_info(self) -> PresignedUrlInfo: <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if datetime.datetime.utcnow() + PresignedUrlProvider._TIME_BUFFER >= self._cached_info.expiration_utc: <NEW_LINE> <INDENT> self._cached_info = self._get_pre_signed_info() <NEW_LINE> <DEDENT> return self._cached_info <NEW_LINE> <DEDENT> <DEDENT> def _get_pre_signed_info(self) -> PresignedUrlInfo: <NEW_LINE> <INDENT> response = self.client._getFileHandleDownload( self.request.file_handle_id, self.request.object_id, objectType=self.request.object_type, ) <NEW_LINE> file_name = response["fileHandle"]["fileName"] <NEW_LINE> pre_signed_url = response["preSignedURL"] <NEW_LINE> return PresignedUrlInfo(file_name, pre_signed_url, _pre_signed_url_expiration_time(pre_signed_url)) | Provides an un-exipired pre-signed url to download a file | 62599072167d2b6e312b820b |
class delete_with_version_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'url', None, None, ), (2, TType.BYTE, 'version', None, None, ), ) <NEW_LINE> def __init__(self, url=None, version=None,): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.version = version <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.url = iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.BYTE: <NEW_LINE> <INDENT> self.version = iprot.readByte() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('delete_with_version_args') <NEW_LINE> if self.url is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('url', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.url) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.version is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('version', TType.BYTE, 2) <NEW_LINE> oprot.writeByte(self.version) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.url) <NEW_LINE> value = (value * 31) ^ hash(self.version) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- url
- version | 6259907291f36d47f2231b0a |
class ForeignKeyField(models.ForeignKey): <NEW_LINE> <INDENT> parent_fields = None <NEW_LINE> title_field = None <NEW_LINE> dialog_title = None <NEW_LINE> show_simple = False <NEW_LINE> def __init__(self, to, to_field=None, rel_class=models.ManyToOneRel, **kwargs): <NEW_LINE> <INDENT> self.parent_fields = kwargs.pop('parents') if kwargs.has_key('parents') else None <NEW_LINE> self.title_field = kwargs.pop('title_field') if kwargs.has_key('title_field') else None <NEW_LINE> self.dialog_title = kwargs.pop('dialog_title') if kwargs.has_key('dialog_title') else None <NEW_LINE> self.show_simple = kwargs.pop('show_simple') if kwargs.has_key('show_simple') else None <NEW_LINE> super(ForeignKeyField, self).__init__(to, to_field, rel_class, **kwargs) <NEW_LINE> <DEDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> content_type = ContentType.objects.get_for_model(self.model) <NEW_LINE> field = self.name <NEW_LINE> if self.parent_fields or is_treebeard(self.rel.to): <NEW_LINE> <INDENT> kwargs['widget'] = ForeignKeyTreeWidget(content_type=content_type, field=field) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs['widget'] = ForeignKeyListWidget(content_type=content_type, field=field) <NEW_LINE> <DEDENT> return super(ForeignKeyField, self).formfield(**kwargs) | Поле выбора для ForeignKey
Можно выбирать в дереве если указаны родительские поля в parent_fields
title_field нужен для фильтрации по символам | 625990721f037a2d8b9e54e6 |
class XenAPISRSelectionTestCase(test.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(XenAPISRSelectionTestCase, self).setUp() <NEW_LINE> xenapi_fake.reset() <NEW_LINE> <DEDENT> def test_safe_find_sr_raise_exception(self): <NEW_LINE> <INDENT> self.flags(sr_matching_filter='yadayadayada') <NEW_LINE> stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) <NEW_LINE> session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass') <NEW_LINE> self.assertRaises(exception.StorageRepositoryNotFound, vm_utils.safe_find_sr, session) <NEW_LINE> <DEDENT> def test_safe_find_sr_local_storage(self): <NEW_LINE> <INDENT> self.flags(sr_matching_filter='other-config:i18n-key=local-storage') <NEW_LINE> stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) <NEW_LINE> session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass') <NEW_LINE> host_ref = xenapi_fake.get_all('host')[0] <NEW_LINE> local_sr = xenapi_fake.create_sr( name_label='Fake Storage', type='lvm', other_config={'i18n-original-value-name_label': 'Local storage', 'i18n-key': 'local-storage'}, host_ref=host_ref) <NEW_LINE> expected = vm_utils.safe_find_sr(session) <NEW_LINE> self.assertEqual(local_sr, expected) <NEW_LINE> <DEDENT> def test_safe_find_sr_by_other_criteria(self): <NEW_LINE> <INDENT> self.flags(sr_matching_filter='other-config:my_fake_sr=true') <NEW_LINE> stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) <NEW_LINE> session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass') <NEW_LINE> host_ref = xenapi_fake.get_all('host')[0] <NEW_LINE> local_sr = xenapi_fake.create_sr(name_label='Fake Storage', type='lvm', other_config={'my_fake_sr': 'true'}, host_ref=host_ref) <NEW_LINE> expected = vm_utils.safe_find_sr(session) <NEW_LINE> self.assertEqual(local_sr, expected) <NEW_LINE> <DEDENT> def test_safe_find_sr_default(self): <NEW_LINE> <INDENT> self.flags(sr_matching_filter='default-sr:true') <NEW_LINE> stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) <NEW_LINE> session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass') <NEW_LINE> pool_ref = xenapi_fake.create_pool('') <NEW_LINE> expected = vm_utils.safe_find_sr(session) <NEW_LINE> self.assertEqual(session.call_xenapi('pool.get_default_SR', pool_ref), expected) | Unit tests for testing we find the right SR. | 6259907238b623060ffaa4d0 |
class MainWindow(QtGui.QDialog): <NEW_LINE> <INDENT> def __init__(self, parent, win_parent=None): <NEW_LINE> <INDENT> QtGui.QDialog.__init__(self, win_parent) <NEW_LINE> self.parent = parent <NEW_LINE> self.ui = Ui_dlgMain() <NEW_LINE> self.ui.setupUi(self) <NEW_LINE> QtCore.QObject.connect(self.ui.btnNext, QtCore.SIGNAL("clicked()"), self.__click_btn_next) <NEW_LINE> QtCore.QObject.connect(self.ui.btnPrev, QtCore.SIGNAL("clicked()"), self.__click_btn_prev) <NEW_LINE> QtCore.QObject.connect(self.ui.btnLaunch, QtCore.SIGNAL("clicked()"), self.__click_btn_launch) <NEW_LINE> QtCore.QObject.connect(self.ui.btnAddGame, QtCore.SIGNAL("clicked()"), self.__click_btn_add_game) <NEW_LINE> self.game_pointer = 0 <NEW_LINE> self.games = ["Home"] <NEW_LINE> self.__refresh_games() <NEW_LINE> <DEDENT> def __refresh_games(self): <NEW_LINE> <INDENT> self.games = ["Home"] <NEW_LINE> path = "../../../object_files" <NEW_LINE> for f in os.listdir(path): <NEW_LINE> <INDENT> if fnmatch.fnmatch(f, '*.gme'): <NEW_LINE> <INDENT> game_path = path + os.sep + f <NEW_LINE> self.games.append(game.load(game_path)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __update_game_info(self): <NEW_LINE> <INDENT> if self.game_pointer == 0: <NEW_LINE> <INDENT> self.ui.lblTitle.setText("Home") <NEW_LINE> self.ui.lblInfo.setText("Welcome to Tunnel, scroll through " + "your games with the arrow buttons") <NEW_LINE> self.ui.lblPlayTime.setText("") <NEW_LINE> self.ui.btnLaunch.setEnabled(False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ui.lblTitle.setText(self.games[self.game_pointer].name) <NEW_LINE> self.ui.lblInfo.setText(self.games[self.game_pointer].description) <NEW_LINE> self.ui.lblPlayTime.setText("Play time: " + str(self.games[self.game_pointer].total_play_time)) <NEW_LINE> self.ui.btnLaunch.setEnabled(True) <NEW_LINE> <DEDENT> <DEDENT> def __change_game(self, game_number): <NEW_LINE> <INDENT> self.game_pointer = game_number <NEW_LINE> self.__update_game_info() <NEW_LINE> <DEDENT> def __click_btn_next(self): <NEW_LINE> <INDENT> if self.game_pointer == (len(self.games) - 1): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.game_pointer += 1 <NEW_LINE> <DEDENT> self.__update_game_info() <NEW_LINE> <DEDENT> def __click_btn_prev(self): <NEW_LINE> <INDENT> if self.game_pointer == 0: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.game_pointer -= 1 <NEW_LINE> <DEDENT> self.__update_game_info() <NEW_LINE> <DEDENT> def __click_btn_launch(self): <NEW_LINE> <INDENT> self.games[self.game_pointer].launch() <NEW_LINE> self.ui.lblPlayTime.setText("Play time: " + str(self.games[self.game_pointer].total_play_time)) <NEW_LINE> game.save(self.games[self.game_pointer]) <NEW_LINE> <DEDENT> def __click_btn_add_game(self): <NEW_LINE> <INDENT> addgame = AddGameWindow(self.parent) <NEW_LINE> addgame.exec_() <NEW_LINE> self.__refresh_games() <NEW_LINE> self.__change_game((len(self.games) - 1)) <NEW_LINE> <DEDENT> def closeEvent(self, e): <NEW_LINE> <INDENT> sys.exit() | Game launcher dialog and associated methods | 62599072796e427e53850070 |
class TimeMemoize(object): <NEW_LINE> <INDENT> _caches = {} <NEW_LINE> _delays = {} <NEW_LINE> def __init__(self, delay=10): <NEW_LINE> <INDENT> self.delay = delay <NEW_LINE> <DEDENT> def collect(self): <NEW_LINE> <INDENT> for func in self._caches: <NEW_LINE> <INDENT> cache = {} <NEW_LINE> for key in self._caches[func]: <NEW_LINE> <INDENT> if (datetime.now().timestamp() - self._caches[func][key][1]) < self._delays[func]: <NEW_LINE> <INDENT> cache[key] = self._caches[func][key] <NEW_LINE> <DEDENT> <DEDENT> self._caches[func] = cache <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, f): <NEW_LINE> <INDENT> self.cache = self._caches[f] = {} <NEW_LINE> self._delays[f] = self.delay <NEW_LINE> def func(*args, **kwargs): <NEW_LINE> <INDENT> kw = sorted(kwargs.items()) <NEW_LINE> key = (args, tuple(kw)) <NEW_LINE> time = datetime.now().timestamp() <NEW_LINE> try: <NEW_LINE> <INDENT> v = self.cache[key] <NEW_LINE> if (time - v[1]) > self.delay: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> v = self.cache[key] = f(*args, **kwargs), time <NEW_LINE> <DEDENT> return v[0] <NEW_LINE> <DEDENT> func.func_name = f.__name__ <NEW_LINE> return func | Memoize with timeout | 62599072435de62698e9d6fe |
class SoQtExaminerViewer(SoQtFullViewer): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def initClass(): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_initClass() <NEW_LINE> <DEDENT> initClass = staticmethod(initClass) <NEW_LINE> def getClassTypeId(): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_getClassTypeId() <NEW_LINE> <DEDENT> getClassTypeId = staticmethod(getClassTypeId) <NEW_LINE> def getTypeId(self): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_getTypeId(self) <NEW_LINE> <DEDENT> def createInstance(): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_createInstance() <NEW_LINE> <DEDENT> createInstance = staticmethod(createInstance) <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _soqt.new_SoQtExaminerViewer(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _soqt.delete_SoQtExaminerViewer <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def setAnimationEnabled(self, *args): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_setAnimationEnabled(self, *args) <NEW_LINE> <DEDENT> def isAnimationEnabled(self): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_isAnimationEnabled(self) <NEW_LINE> <DEDENT> def stopAnimating(self): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_stopAnimating(self) <NEW_LINE> <DEDENT> def isAnimating(self): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_isAnimating(self) <NEW_LINE> <DEDENT> def setFeedbackVisibility(self, *args): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_setFeedbackVisibility(self, *args) <NEW_LINE> <DEDENT> def isFeedbackVisible(self): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_isFeedbackVisible(self) <NEW_LINE> <DEDENT> def setFeedbackSize(self, *args): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_setFeedbackSize(self, *args) <NEW_LINE> <DEDENT> def getFeedbackSize(self): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_getFeedbackSize(self) <NEW_LINE> <DEDENT> def setViewing(self, *args): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_setViewing(self, *args) <NEW_LINE> <DEDENT> def setCamera(self, *args): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_setCamera(self, *args) <NEW_LINE> <DEDENT> def setCursorEnabled(self, *args): <NEW_LINE> <INDENT> return _soqt.SoQtExaminerViewer_setCursorEnabled(self, *args) | Proxy of C++ SoQtExaminerViewer class | 625990722ae34c7f260ac9dc |
class ComplexDateTimeField(LongField): <NEW_LINE> <INDENT> def _convert_from_datetime(self, val): <NEW_LINE> <INDENT> result = self._datetime_to_microseconds_since_epoch(value=val) <NEW_LINE> return result <NEW_LINE> <DEDENT> def _convert_from_db(self, value): <NEW_LINE> <INDENT> result = self._microseconds_since_epoch_to_datetime(data=value) <NEW_LINE> return result <NEW_LINE> <DEDENT> def _microseconds_since_epoch_to_datetime(self, data): <NEW_LINE> <INDENT> result = datetime.datetime.fromtimestamp(data // SECOND_TO_MICROSECONDS) <NEW_LINE> microseconds_reminder = (data % SECOND_TO_MICROSECONDS) <NEW_LINE> result = result.replace(microsecond=microseconds_reminder) <NEW_LINE> return result <NEW_LINE> <DEDENT> def _datetime_to_microseconds_since_epoch(self, value): <NEW_LINE> <INDENT> seconds = time.mktime(value.timetuple()) <NEW_LINE> microseconds_reminder = value.time().microsecond <NEW_LINE> result = (int(seconds * SECOND_TO_MICROSECONDS) + microseconds_reminder) <NEW_LINE> return result <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> data = super(ComplexDateTimeField, self).__get__(instance, owner) <NEW_LINE> if data is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(data, datetime.datetime): <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> return self._convert_from_db(data) <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> value = self._convert_from_datetime(value) if value else value <NEW_LINE> return super(ComplexDateTimeField, self).__set__(instance, value) <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> value = self.to_python(value) <NEW_LINE> if not isinstance(value, datetime.datetime): <NEW_LINE> <INDENT> self.error('Only datetime objects may used in a ' 'ComplexDateTimeField') <NEW_LINE> <DEDENT> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> original_value = value <NEW_LINE> try: <NEW_LINE> <INDENT> return self._convert_from_db(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return original_value <NEW_LINE> <DEDENT> <DEDENT> def to_mongo(self, value): <NEW_LINE> <INDENT> value = self.to_python(value) <NEW_LINE> return self._convert_from_datetime(value) <NEW_LINE> <DEDENT> def prepare_query_value(self, op, value): <NEW_LINE> <INDENT> return self._convert_from_datetime(value) | Date time field which handles microseconds exactly and internally stores
the timestamp as number of microseconds since the unix epoch.
Note: We need to do that because mongoengine serializes this field as comma
delimited string which breaks sorting. | 62599072be8e80087fbc0988 |
class ProductOfSimplicialSets_finite(ProductOfSimplicialSets, PullbackOfSimplicialSets_finite): <NEW_LINE> <INDENT> def __init__(self, factors=None): <NEW_LINE> <INDENT> PullbackOfSimplicialSets_finite.__init__(self, [space.constant_map() for space in factors]) <NEW_LINE> self._factors = tuple([f.domain() for f in self._maps]) <NEW_LINE> <DEDENT> def projection_map(self, i): <NEW_LINE> <INDENT> return self.structure_map(i) <NEW_LINE> <DEDENT> def wedge_as_subset(self): <NEW_LINE> <INDENT> basept_factors = [sset.base_point() for sset in self.factors()] <NEW_LINE> to_factors = dict((v,k) for k,v in self._translation) <NEW_LINE> simps = [] <NEW_LINE> for x in self.nondegenerate_simplices(): <NEW_LINE> <INDENT> simplices = to_factors[x] <NEW_LINE> not_base_pt = 0 <NEW_LINE> for sigma, star in zip(simplices, basept_factors): <NEW_LINE> <INDENT> if not_base_pt > 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if sigma[0].nondegenerate() != star: <NEW_LINE> <INDENT> not_base_pt += 1 <NEW_LINE> <DEDENT> <DEDENT> if not_base_pt <= 1: <NEW_LINE> <INDENT> simps.append(x) <NEW_LINE> <DEDENT> <DEDENT> return self.subsimplicial_set(simps) <NEW_LINE> <DEDENT> def fat_wedge_as_subset(self): <NEW_LINE> <INDENT> basept_factors = [sset.base_point() for sset in self.factors()] <NEW_LINE> to_factors = {v: k for k, v in self._translation} <NEW_LINE> simps = [] <NEW_LINE> for x in self.nondegenerate_simplices(): <NEW_LINE> <INDENT> simplices = to_factors[x] <NEW_LINE> combined = zip(simplices, basept_factors) <NEW_LINE> if any(sigma[0] == pt for (sigma, pt) in combined): <NEW_LINE> <INDENT> simps.append(x) <NEW_LINE> <DEDENT> <DEDENT> return self.subsimplicial_set(simps) | The product of finite simplicial sets.
When the factors are all finite, there are more methods available
for the resulting product, as compared to products with infinite
factors: projection maps, the wedge as a subcomplex, and the fat
wedge as a subcomplex. See :meth:`projection_map`,
:meth:`wedge_as_subset`, and :meth:`fat_wedge_as_subset` | 62599072d268445f2663a7d9 |
class Login(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'routes': {'key': 'routes', 'type': 'LoginRoutes'}, 'token_store': {'key': 'tokenStore', 'type': 'TokenStore'}, 'preserve_url_fragments_for_logins': {'key': 'preserveUrlFragmentsForLogins', 'type': 'bool'}, 'allowed_external_redirect_urls': {'key': 'allowedExternalRedirectUrls', 'type': '[str]'}, 'cookie_expiration': {'key': 'cookieExpiration', 'type': 'CookieExpiration'}, 'nonce': {'key': 'nonce', 'type': 'Nonce'}, } <NEW_LINE> def __init__( self, *, routes: Optional["LoginRoutes"] = None, token_store: Optional["TokenStore"] = None, preserve_url_fragments_for_logins: Optional[bool] = None, allowed_external_redirect_urls: Optional[List[str]] = None, cookie_expiration: Optional["CookieExpiration"] = None, nonce: Optional["Nonce"] = None, **kwargs ): <NEW_LINE> <INDENT> super(Login, self).__init__(**kwargs) <NEW_LINE> self.routes = routes <NEW_LINE> self.token_store = token_store <NEW_LINE> self.preserve_url_fragments_for_logins = preserve_url_fragments_for_logins <NEW_LINE> self.allowed_external_redirect_urls = allowed_external_redirect_urls <NEW_LINE> self.cookie_expiration = cookie_expiration <NEW_LINE> self.nonce = nonce | The configuration settings of the login flow of users using App Service Authentication/Authorization.
:ivar routes: The routes that specify the endpoints used for login and logout requests.
:vartype routes: ~azure.mgmt.web.v2020_12_01.models.LoginRoutes
:ivar token_store: The configuration settings of the token store.
:vartype token_store: ~azure.mgmt.web.v2020_12_01.models.TokenStore
:ivar preserve_url_fragments_for_logins: :code:`<code>true</code>` if the fragments from the
request are preserved after the login request is made; otherwise, :code:`<code>false</code>`.
:vartype preserve_url_fragments_for_logins: bool
:ivar allowed_external_redirect_urls: External URLs that can be redirected to as part of
logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:vartype allowed_external_redirect_urls: list[str]
:ivar cookie_expiration: The configuration settings of the session cookie's expiration.
:vartype cookie_expiration: ~azure.mgmt.web.v2020_12_01.models.CookieExpiration
:ivar nonce: The configuration settings of the nonce used in the login flow.
:vartype nonce: ~azure.mgmt.web.v2020_12_01.models.Nonce | 62599072091ae35668706530 |
class SKUSimpleView(ListAPIView): <NEW_LINE> <INDENT> serializer_class = SKUSimpleSerializer <NEW_LINE> queryset = SKU.objects.all() <NEW_LINE> pagination_class = None | 获取sku商品数据 | 6259907256ac1b37e630395e |
class ThreadSynchronizationFactory(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def create_lock(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def create_wait_condition(self, lock): <NEW_LINE> <INDENT> raise NotImplementedError() | Factory interface for creating synchronization primitives. | 625990724e4d562566373cff |
class List(base.HighlanderLister): <NEW_LINE> <INDENT> def _get_format_function(self): <NEW_LINE> <INDENT> return format <NEW_LINE> <DEDENT> def _get_resources(self, parsed_args): <NEW_LINE> <INDENT> return maccleods.MaccleodManager(self.app.client).list() | List all maccleods dialog. | 62599072bf627c535bcb2dc5 |
class Movie(): <NEW_LINE> <INDENT> VALID_RATINGS = ["G","PG","PG-13","R"] <NEW_LINE> def __init__(self,movie_title,movie_storyline,poster_image,trailer_youtube): <NEW_LINE> <INDENT> self.title = movie_title <NEW_LINE> self.storyline = movie_storyline <NEW_LINE> self.poster_image_url = poster_image <NEW_LINE> self.trailer_youtube_url = trailer_youtube <NEW_LINE> <DEDENT> def show_trailer(self): <NEW_LINE> <INDENT> webbrowser.open(self.trailer_youtube_url) | This class helps to store movie related information | 625990729c8ee82313040e03 |
class AudioStreamInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Bitrate = None <NEW_LINE> self.SamplingRate = None <NEW_LINE> self.Codec = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Bitrate = params.get("Bitrate") <NEW_LINE> self.SamplingRate = params.get("SamplingRate") <NEW_LINE> self.Codec = params.get("Codec") | 音频流信息。
| 62599072d486a94d0ba2d8b5 |
class ik(): <NEW_LINE> <INDENT> def __init__(self,lengths, ee_pos): <NEW_LINE> <INDENT> self.length = np.asarray(lengths, dtype=np.float32) <NEW_LINE> self.ee_pos = np.asarray(ee_pos, dtype= np.float32) <NEW_LINE> self.q0 = [np.pi/3,-np.pi/4,np.pi/6] <NEW_LINE> self.q = self.q0[:] <NEW_LINE> <DEDENT> def inv_kin(self, ee_pos=None): <NEW_LINE> <INDENT> if ee_pos != None: <NEW_LINE> <INDENT> self.ee_pos = ee_pos <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ee_pos = self.ee_pos <NEW_LINE> <DEDENT> def distance_to_default(q, *args): <NEW_LINE> <INDENT> weight = [1, 1, 1.3] <NEW_LINE> return np.sqrt(np.sum([(qi - q0i)**2 * wi for qi, q0i, wi in zip(q, self.q0, weight)])) <NEW_LINE> <DEDENT> def x_constraint(q,ee_pos): <NEW_LINE> <INDENT> x = (self.length[0]*np.cos(q[0]) + self.length[1]*np.cos(q[0]+q[1]) + self.length[2]*np.cos(np.sum(q))) - self.ee_pos[0] <NEW_LINE> return x <NEW_LINE> <DEDENT> def y_constraint(q,ee_pos): <NEW_LINE> <INDENT> y = (self.length[0]*np.sin(q[0]) + self.length[1]*np.sin(q[0]+q[1]) + self.length[2]*np.sin(np.sum(q))) - self.ee_pos[1] <NEW_LINE> return y <NEW_LINE> <DEDENT> def joint_limits_upper_constraint(q,ee_pos): <NEW_LINE> <INDENT> return self.max_angles - q <NEW_LINE> <DEDENT> def joint_limits_lower_constraint(q,ee_pos): <NEW_LINE> <INDENT> return q - self.min_angles <NEW_LINE> <DEDENT> return scipy.optimize.fmin_slsqp( func=distance_to_default, x0=self.q, eqcons=[x_constraint, y_constraint], args=(self.ee_pos,), iprint=0) | Inverse Kinematics class for 3 links.
Pass list of link lengths and list of default joint angles.
Assumptions:
1) All rotations are about z-axis.
2) Translation is about x-axis.
Reference:
1) https://github.com/AliShug/EvoArm/tree/master/PyIK/src
2) https://github.com/lanius/tinyik/tree/master/tinyik
*3) https://github.com/studywolf/blog/blob/master/InvKin/Arm.py
*4) https://docs.scipy.org/doc/scipy-0.13.0/reference/tutorial/optimize.html#tutorial-sqlsp | 625990727047854f46340cb0 |
class PP_STATUS(object): <NEW_LINE> <INDENT> SUCCESS = 0 <NEW_LINE> VERIFY_FAIL = 1 <NEW_LINE> UNPACK_FAIL = 2 <NEW_LINE> VERIFY_UNPACK_FAIL = 3 <NEW_LINE> FAILURE = -1 | This provides the Post Process Status of SABNzbd set in the
SAB_PP_STATUS environment variable | 6259907299cbb53fe68327e3 |
class WrongDatabaseError(Exception): <NEW_LINE> <INDENT> def __init__(self, clientDatabaseName, serverDatabaseName): <NEW_LINE> <INDENT> Exception.__init__(self, clientDatabaseName, serverDatabaseName) <NEW_LINE> self.clientDatabaseName = clientDatabaseName <NEW_LINE> self.serverDatabaseName = serverDatabaseName | The client's database name doesn't match our database. | 6259907263b5f9789fe86a5d |
class NoDeletes(Exception): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) | Raised when the document no longer contains any pending deletes
(DEL_START/DEL_END) | 625990725fcc89381b266dd4 |
class ProgressLineEdit(QLineEdit): <NEW_LINE> <INDENT> INITIAL_PROGRESS_OPACITY = 0.25 <NEW_LINE> text_changed = Signal(str) <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(ProgressLineEdit, self).__init__(parent) <NEW_LINE> self._progress = 0 <NEW_LINE> self.text_changed.connect(self.setText) <NEW_LINE> self.setProperty("_progress_opacity", self.INITIAL_PROGRESS_OPACITY) <NEW_LINE> self._progress_finished_anim = QPropertyAnimation(self, "_progress_opacity") <NEW_LINE> self._progress_finished_anim.setStartValue(self.INITIAL_PROGRESS_OPACITY) <NEW_LINE> self._progress_finished_anim.setEndValue(0) <NEW_LINE> self._progress_finished_anim.setDuration(1000) <NEW_LINE> self._progress_finished_anim.valueChanged.connect(self.repaint) <NEW_LINE> <DEDENT> def setProgress(self, progress): <NEW_LINE> <INDENT> self._progress = progress <NEW_LINE> if progress == 100: <NEW_LINE> <INDENT> self._progress_finished_anim.start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setProperty("_progress_opacity", self.INITIAL_PROGRESS_OPACITY) <NEW_LINE> self.repaint() <NEW_LINE> <DEDENT> <DEDENT> def paintEvent(self, pe): <NEW_LINE> <INDENT> super(ProgressLineEdit, self).paintEvent(pe) <NEW_LINE> painter = QPainter(self) <NEW_LINE> painter.setOpacity(self.property("_progress_opacity")) <NEW_LINE> sopb = QStyleOptionProgressBarV2() <NEW_LINE> sopb.minimum = 0 <NEW_LINE> sopb.maximum = 100 <NEW_LINE> sopb.progress = self._progress <NEW_LINE> sopb.initFrom(self) <NEW_LINE> self.style().drawControl(QStyle.CE_ProgressBarContents, sopb, painter, self) | A lineedit with a progress bar overlaid | 62599072627d3e7fe0e08781 |
class SchemaTheme(DictTheme): <NEW_LINE> <INDENT> @property <NEW_LINE> def tag(self): <NEW_LINE> <INDENT> return self.get_attribute('tag') <NEW_LINE> <DEDENT> @property <NEW_LINE> def maptype(self): <NEW_LINE> <INDENT> return self.get_attribute('maptype') <NEW_LINE> <DEDENT> @property <NEW_LINE> def tileformat(self): <NEW_LINE> <INDENT> return self.get_attribute('tileformat') <NEW_LINE> <DEDENT> @property <NEW_LINE> def pyramid(self): <NEW_LINE> <INDENT> return self.get_attribute('pyramid') <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage(self): <NEW_LINE> <INDENT> return self.get_attribute('storage') <NEW_LINE> <DEDENT> @property <NEW_LINE> def renderer(self): <NEW_LINE> <INDENT> return self.get_attribute('renderer') | Schema Theme
A `SchemaTheme` object configs attributes for a
:class:`~stonemason.mason.mapsheet.MapSheet`. | 625990727047854f46340cb1 |
class PCIeSERDESAligner(PCIeSERDESInterface): <NEW_LINE> <INDENT> def __init__(self, lane): <NEW_LINE> <INDENT> self.ratio = lane.ratio <NEW_LINE> self.rx_invert = lane.rx_invert <NEW_LINE> self.rx_align = lane.rx_align <NEW_LINE> self.rx_present = lane.rx_present <NEW_LINE> self.rx_locked = lane.rx_locked <NEW_LINE> self.rx_aligned = lane.rx_aligned <NEW_LINE> self.rx_symbol = Signal(lane.ratio * 9) <NEW_LINE> self.rx_valid = Signal(lane.ratio) <NEW_LINE> self.tx_symbol = lane.tx_symbol <NEW_LINE> self.tx_set_disp = lane.tx_set_disp <NEW_LINE> self.tx_disp = lane.tx_disp <NEW_LINE> self.tx_e_idle = lane.tx_e_idle <NEW_LINE> self.det_enable = lane.det_enable <NEW_LINE> self.det_valid = lane.det_valid <NEW_LINE> self.det_status = lane.det_status <NEW_LINE> self.submodules.slip = SymbolSlip(symbol_size=10, word_size=lane.ratio, comma=(1<<9)|K(28,5)) <NEW_LINE> self.comb += [ self.slip.en.eq(self.rx_align), self.slip.i.eq(Cat( (lane.rx_symbol.part(9 * n, 9), lane.rx_valid[n]) for n in range(lane.ratio) )), self.rx_symbol.eq(Cat( self.slip.o.part(10 * n, 9) for n in range(lane.ratio) )), self.rx_valid.eq(Cat( self.slip.o[10 * n + 9] for n in range(lane.ratio) )), ] | A multiplexer that aligns commas to the first symbol of the word, for SERDESes that only
perform bit alignment and not symbol alignment. | 62599072460517430c432cd4 |
class Lerp( IntervalAction ): <NEW_LINE> <INDENT> def init(self, attrib, start, end, duration): <NEW_LINE> <INDENT> self.attrib = attrib <NEW_LINE> self.duration = duration <NEW_LINE> self.start_p = start <NEW_LINE> self.end_p = end <NEW_LINE> self.delta = end-start <NEW_LINE> <DEDENT> def update(self, t): <NEW_LINE> <INDENT> setattr(self.target, self.attrib, self.start_p + self.delta * t ) <NEW_LINE> <DEDENT> def __reversed__(self): <NEW_LINE> <INDENT> return Lerp(self.attrib, self.end_p, self.start_p, self.duration) | Interpolate between values for some specified attribute | 625990723539df3088ecdb90 |
class Songbook(object): <NEW_LINE> <INDENT> def __init__(self, name=u'', entry=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.entry = entry <NEW_LINE> <DEDENT> def _to_xml(self): <NEW_LINE> <INDENT> elem = etree.Element(u'songbook') <NEW_LINE> if self.entry: <NEW_LINE> <INDENT> elem.set(u'entry', self.entry) <NEW_LINE> <DEDENT> if self.name: <NEW_LINE> <INDENT> elem.set(u'name', self.name) <NEW_LINE> <DEDENT> return elem <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return u'%s #%s' % (self.name, self.entry) | A songbook/collection with an entry/number.
name: The name of the songbook or collection.
entry: A number or string representing the index in this songbook. | 625990728e7ae83300eea98b |
class ElectionCycleModel(pm.Model): <NEW_LINE> <INDENT> def __init__(self, election_model, name, cycle_config, parties, election_polls, eta, adjacent_day_fn, min_polls_per_pollster, test_results=None, real_results=None, house_effects_model=None, chol=None, votes=None, after_polls_chol=None, election_day_chol=None): <NEW_LINE> <INDENT> super(ElectionCycleModel, self).__init__(name) <NEW_LINE> self.config = cycle_config <NEW_LINE> self.house_effects_model = house_effects_model <NEW_LINE> self.forecast_day = election_polls.forecast_day <NEW_LINE> self.election_polls = election_polls <NEW_LINE> self.parties = parties <NEW_LINE> self.num_days = election_polls.num_days <NEW_LINE> self.pollster_ids = election_polls.pollster_ids <NEW_LINE> self.party_ids = election_polls.party_ids <NEW_LINE> self.num_parties = len(self.parties) <NEW_LINE> self.eta = eta <NEW_LINE> if chol is None: <NEW_LINE> <INDENT> self.cholesky_pmatrix = pm.LKJCholeskyCov('cholesky_pmatrix', n=self.num_parties, eta=self.eta, sd_dist=pm.HalfCauchy.dist(0.1, shape=[self.num_parties])) <NEW_LINE> self.cholesky_matrix = pm.Deterministic('cholesky_matrix', pm.expand_packed_triangular(self.num_parties, self.cholesky_pmatrix)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cholesky_matrix = chol <NEW_LINE> <DEDENT> self.after_polls_cholesky_matrix=after_polls_chol if after_polls_chol is not None else self.cholesky_matrix <NEW_LINE> self.election_day_cholesky_matrix=election_day_chol if election_day_chol is not None else self.cholesky_matrix <NEW_LINE> if votes is None: <NEW_LINE> <INDENT> self.votes = pm.Flat('votes', shape=self.num_parties) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.votes = votes <NEW_LINE> <DEDENT> self.dynamics = ElectionDynamicsModel( name=name + '_polls', votes=self.votes, polls=election_polls, cholesky_matrix=self.cholesky_matrix, after_polls_cholesky_matrix=self.after_polls_cholesky_matrix, election_day_cholesky_matrix=self.election_day_cholesky_matrix, test_results=test_results, house_effects_model=house_effects_model, min_polls_per_pollster=min_polls_per_pollster, adjacent_day_fn=adjacent_day_fn) <NEW_LINE> self.support = pm.Deterministic('support', self.dynamics.support) | A pymc3 model that models the full election cycle. This can
include a fundamentals model as well as a dynamics model. | 62599072091ae35668706532 |
class ModPad(FunPad): <NEW_LINE> <INDENT> def __init__(self, pad_ij: Tuple[int], mod: FunMod): <NEW_LINE> <INDENT> self.mod = mod <NEW_LINE> super(ModPad, self).__init__(pad_ij) <NEW_LINE> <DEDENT> def set_registry_id(self): <NEW_LINE> <INDENT> return 'Mod: ' + str(self.mod) <NEW_LINE> <DEDENT> def default_color(self): <NEW_LINE> <INDENT> if self.mod in mod_color_map: <NEW_LINE> <INDENT> return mod_color_map[self.mod] <NEW_LINE> <DEDENT> return 'white' <NEW_LINE> <DEDENT> def get_modifier(self): <NEW_LINE> <INDENT> return self.mod.get_func() | Modifies chords. | 62599072a8370b77170f1cc5 |
class ObjTypeScan(common.AbstractScanCommand): <NEW_LINE> <INDENT> scanners = [ObjectTypeScanner] <NEW_LINE> def unified_output(self, data): <NEW_LINE> <INDENT> def generator(data): <NEW_LINE> <INDENT> for object_type in data: <NEW_LINE> <INDENT> yield (0, [ Address(object_type.obj_offset), Hex(object_type.TotalNumberOfObjects), Hex(object_type.TotalNumberOfHandles), str(object_type.Key), str(object_type.Name or ''), str(object_type.TypeInfo.PoolType)]) <NEW_LINE> <DEDENT> <DEDENT> return renderers.TreeGrid( [("Offset", Address), ("nObjects", Hex), ("nHandles", Hex), ("Key", str), ("Name", str), ("PoolType", str)], generator(data)) <NEW_LINE> <DEDENT> def render_text(self, outfd, data): <NEW_LINE> <INDENT> self.table_header(outfd, [("Offset", "[addrpad]"), ("nObjects", "[addr]"), ("nHandles", "[addr]"), ("Key", "8"), ("Name", "30"), ("PoolType", "20")]) <NEW_LINE> for object_type in data: <NEW_LINE> <INDENT> self.table_row(outfd, object_type.obj_offset, object_type.TotalNumberOfObjects, object_type.TotalNumberOfHandles, str(object_type.Key), str(object_type.Name or ''), object_type.TypeInfo.PoolType) | Scan for Windows object type objects | 625990723d592f4c4edbc7d9 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.