code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class URLOnlyValidator(BasicValidator): <NEW_LINE> <INDENT> ALLOWED_TYPES = { 'html', 'aspx', 'php', 'htm', } <NEW_LINE> def _validate(self, link): <NEW_LINE> <INDENT> if not link: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> pathSplit = [i for i in (urllib .parse .urlparse(link)[2] .split('/')) if i] <NEW_LINE> fileName = pathSplit[-1] <NEW_LINE> fSplit = fileName.split(".") <NEW_LINE> if fSplit[-1] == fileName: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> fileExt = fSplit[-1] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return fileExt in self.ALLOWED_TYPES
Does not permit crawling of text files, documents, etc. i.e. only crawls links.
6259904423e79379d538d815
class Normalize(dataprocessor.DataProcessor): <NEW_LINE> <INDENT> def __init__(self, ord=None): <NEW_LINE> <INDENT> self.ord = ord <NEW_LINE> <DEDENT> def process_frame(self, frame): <NEW_LINE> <INDENT> return frame / (np.linalg.norm(frame, self.ord) + 1e-16)
Normalize each frame using a norm of the given order. Attributes ---------- ord : see numpy.linalg.norm Order of the norm. See Also -------- numpy.linalg.norm
62599044d53ae8145f919775
class handshake_begin: <NEW_LINE> <INDENT> def POST(self): <NEW_LINE> <INDENT> clean_sessions() <NEW_LINE> postdata = web.data() <NEW_LINE> request = json.loads(postdata) <NEW_LINE> server_secret, gotpub, challenge, challenge_plain = null_proto.server_handshake_begin(PRIVKEY, request) <NEW_LINE> session = Session() <NEW_LINE> session.secret = server_secret <NEW_LINE> session.pubkey = gotpub <NEW_LINE> session.challenge_plain = challenge_plain <NEW_LINE> pubhash = identity.pubkeyhash(gotpub) <NEW_LINE> with session_lock: <NEW_LINE> <INDENT> pending_sessions[pubhash] = session <NEW_LINE> <DEDENT> return json.dumps(challenge) <NEW_LINE> <DEDENT> def GET(self): <NEW_LINE> <INDENT> return "the server is running"
null protocol handshake begin
625990444e696a045264e7ac
class EnOceanWindowHandle(EnOceanSensor): <NEW_LINE> <INDENT> def value_changed(self, packet): <NEW_LINE> <INDENT> action = (packet.data[1] & 0x70) >> 4 <NEW_LINE> if action == 0x07: <NEW_LINE> <INDENT> self._attr_native_value = STATE_CLOSED <NEW_LINE> <DEDENT> if action in (0x04, 0x06): <NEW_LINE> <INDENT> self._attr_native_value = STATE_OPEN <NEW_LINE> <DEDENT> if action == 0x05: <NEW_LINE> <INDENT> self._attr_native_value = "tilt" <NEW_LINE> <DEDENT> self.schedule_update_ha_state()
Representation of an EnOcean window handle device. EEPs (EnOcean Equipment Profiles): - F6-10-00 (Mechanical handle / Hoppe AG)
62599044be383301e0254b2f
class DataContent(BaseContent): <NEW_LINE> <INDENT> associated = models.OneToOneField( "spider_base.AssignedContent", on_delete=models.CASCADE, null=True ) <NEW_LINE> quota_data = JSONField(default=dict, blank=True) <NEW_LINE> free_data = JSONField(default=dict, blank=True) <NEW_LINE> objects = DataContentManager() <NEW_LINE> def get_size(self, prepared_attachements=None) -> int: <NEW_LINE> <INDENT> s = super().get_size(prepared_attachements) <NEW_LINE> s += len(str(self.quota_data)) <NEW_LINE> return s
inherit from it with proxy objects when possible speedier than BaseContent by beeing prefetched
6259904424f1403a92686258
class OutputText(gtk.ScrolledWindow): <NEW_LINE> <INDENT> NAME = 'Adium Output' <NEW_LINE> DESCRIPTION = _('A widget to display conversation messages using adium style') <NEW_LINE> AUTHOR = 'Mariano Guerra' <NEW_LINE> WEBSITE = 'www.emesene.org' <NEW_LINE> def __init__(self, config, add_emoticon_cb): <NEW_LINE> <INDENT> gtk.ScrolledWindow.__init__(self) <NEW_LINE> self.config = config <NEW_LINE> self.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) <NEW_LINE> self.set_shadow_type(gtk.SHADOW_IN) <NEW_LINE> self._texts = [] <NEW_LINE> self.loaded = False <NEW_LINE> picture = utils.path_to_url(os.path.abspath(gui.theme.user)) <NEW_LINE> self.view = OutputView(gui.theme.conv_theme, "", "", "", picture, picture, add_emoticon_cb) <NEW_LINE> self.view.connect('load-finished', self._loading_stop_cb) <NEW_LINE> self.view.connect('console-message', self._error_cb) <NEW_LINE> self.clear() <NEW_LINE> self.view.show() <NEW_LINE> self.add(self.view) <NEW_LINE> <DEDENT> def clear(self, source="", target="", target_display="", source_img="", target_img=""): <NEW_LINE> <INDENT> self._texts = [] <NEW_LINE> self.loaded = False <NEW_LINE> self.view.clear(source, Renderers.msnplus_to_plain_text(target), Renderers.msnplus_to_plain_text(target_display), source_img, target_img) <NEW_LINE> <DEDENT> def _error_cb(self, view, message, line, source_id): <NEW_LINE> <INDENT> message = "Webkit message: %s %s %s" % (message, line, source_id) <NEW_LINE> log.debug(message) <NEW_LINE> <DEDENT> def _loading_stop_cb(self, view, frame): <NEW_LINE> <INDENT> self.loaded = True <NEW_LINE> for text in self._texts: <NEW_LINE> <INDENT> self.append(text) <NEW_LINE> <DEDENT> self._texts = [] <NEW_LINE> <DEDENT> def send_message(self, formatter, contact, message, cedict, cedir, is_first): <NEW_LINE> <INDENT> msg = gui.Message.from_contact(contact, message, is_first, False, message.timestamp) <NEW_LINE> self.view.add_message(msg, message.style, cedict, cedir) <NEW_LINE> <DEDENT> def receive_message(self, formatter, contact, message, cedict, cedir, is_first): <NEW_LINE> <INDENT> msg = gui.Message.from_contact(contact, message, is_first, True, message.timestamp) <NEW_LINE> self.view.add_message(msg, message.style, cedict, cedir) <NEW_LINE> <DEDENT> def information(self, formatter, contact, message): <NEW_LINE> <INDENT> msg = gui.Message.from_information(contact, message) <NEW_LINE> msg.message = Renderers.msnplus_to_plain_text(msg.message) <NEW_LINE> self.view.add_message(msg, None, None, None) <NEW_LINE> <DEDENT> def update_p2p(self, account, _type, *what): <NEW_LINE> <INDENT> if _type == 'emoticon': <NEW_LINE> <INDENT> _creator, _friendly, path = what <NEW_LINE> _id = base64.b64encode(_creator+xml.sax.saxutils.unescape(_friendly)) <NEW_LINE> mystr = "var now=new Date();var x=document.images;for(var i=0;i<x.length;i++){if(x[i].name=='%s'){x[i].src='%s?'+now.getTime();}}" % (_id, path) <NEW_LINE> self.view.execute_script(mystr)
a text box inside a scroll that provides methods to get and set the text in the widget
62599044ec188e330fdf9bb2
class RemoveSpace(AbstractSearchSpace): <NEW_LINE> <INDENT> def __init__(self, columns=[], set_sizes=[]): <NEW_LINE> <INDENT> if columns and not set_sizes: <NEW_LINE> <INDENT> raise SpaceException("No subset size specified.") <NEW_LINE> <DEDENT> for size in set_sizes: <NEW_LINE> <INDENT> if size > (len(columns)): <NEW_LINE> <INDENT> raise SpaceException("Subset size %d is bigger then columns count." % size) <NEW_LINE> <DEDENT> <DEDENT> self._columns = columns <NEW_LINE> self._set_sizes = set_sizes <NEW_LINE> <DEDENT> def generate(self, descriptor): <NEW_LINE> <INDENT> if not self._set_sizes: <NEW_LINE> <INDENT> descriptor.removed_columns = [] <NEW_LINE> yield descriptor <NEW_LINE> <DEDENT> for size in self._set_sizes: <NEW_LINE> <INDENT> if size > len(self._columns): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if size == 0: <NEW_LINE> <INDENT> descriptor.removed_columns = [] <NEW_LINE> yield descriptor <NEW_LINE> <DEDENT> generator = SubsetGenerator(self._columns, size) <NEW_LINE> for subset in generator: <NEW_LINE> <INDENT> descriptor.removed_columns = subset <NEW_LINE> yield descriptor <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> args = [] <NEW_LINE> if self._columns: args.append('columns=%s' % ','.join(self._columns)) <NEW_LINE> if self._set_sizes: args.append('sizes=%s' % ','.join([str(s) for s in self._set_sizes])) <NEW_LINE> if args: <NEW_LINE> <INDENT> return 'Remove(%s)' % ', '.join(args) <NEW_LINE> <DEDENT> return ''
Describes the search space of removing columns from the data file.
62599044462c4b4f79dbcd15
class BASE64(PoundSeparatedCommand): <NEW_LINE> <INDENT> pass
Base64 encode/decode in/out of a socket.
6259904407d97122c4217fb8
class FunctionNegativeTripletSelector(object): <NEW_LINE> <INDENT> def __init__(self, margin, negative_selection_fn, cpu=True): <NEW_LINE> <INDENT> self.cpu = cpu <NEW_LINE> self.margin = margin <NEW_LINE> self.negative_selection_fn = negative_selection_fn <NEW_LINE> <DEDENT> def get_triplets(self, embeddings, labels): <NEW_LINE> <INDENT> if self.cpu: <NEW_LINE> <INDENT> embeddings = embeddings.cpu() <NEW_LINE> <DEDENT> distance_matrix = pdist(embeddings) <NEW_LINE> distance_matrix = distance_matrix.cpu() <NEW_LINE> labels = labels.cpu().data.numpy() <NEW_LINE> triplets = [] <NEW_LINE> for label in set(labels): <NEW_LINE> <INDENT> label_mask = (labels == label) <NEW_LINE> label_indices = np.where(label_mask)[0] <NEW_LINE> if len(label_indices) < 2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> negative_indices = np.where(np.logical_not(label_mask))[0] <NEW_LINE> anchor_positives = list(combinations(label_indices, 2)) <NEW_LINE> anchor_positives = np.array(anchor_positives) <NEW_LINE> ap_distances = distance_matrix[anchor_positives[:, 0], anchor_positives[:, 1]] <NEW_LINE> for anchor_positive, ap_distance in zip(anchor_positives, ap_distances): <NEW_LINE> <INDENT> loss_values = ap_distance - distance_matrix[ torch.LongTensor(np.array([anchor_positive[0]])), torch.LongTensor(negative_indices)] + self.margin <NEW_LINE> loss_values = loss_values.data.cpu().numpy() <NEW_LINE> hard_negative = self.negative_selection_fn(loss_values) <NEW_LINE> if hard_negative is not None: <NEW_LINE> <INDENT> hard_negative = negative_indices[hard_negative] <NEW_LINE> triplets.append([anchor_positive[0], anchor_positive[1], hard_negative]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(triplets) == 0: <NEW_LINE> <INDENT> triplets.append([anchor_positive[0], anchor_positive[1], negative_indices[0]]) <NEW_LINE> <DEDENT> triplets = np.array(triplets) <NEW_LINE> return torch.LongTensor(triplets)
for each positive pair, takes the hardest negative sample (with the greatest triplet loss value) to create a triplet Margin should match the margin used in triplet loss. negative_selection_fn should take array of loss_values for a given anchor-positive pair and all negative samples and return a negative index for that pair.
625990446fece00bbacccccb
class CmdTime(COMMAND_DEFAULT_CLASS): <NEW_LINE> <INDENT> key = "@time" <NEW_LINE> aliases = "@uptime" <NEW_LINE> locks = "cmd:perm(time) or perm(Player)" <NEW_LINE> help_category = "System" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> table1 = EvTable("|wServer time", "", align="l", width=78) <NEW_LINE> table1.add_row("Current uptime", utils.time_format(gametime.uptime(), 3)) <NEW_LINE> table1.add_row("Total runtime", utils.time_format(gametime.runtime(), 2)) <NEW_LINE> table1.add_row("First start", datetime.datetime.fromtimestamp(gametime.server_epoch())) <NEW_LINE> table1.add_row("Current time", datetime.datetime.now()) <NEW_LINE> table1.reformat_column(0, width=30) <NEW_LINE> table2 = EvTable("|wIn-Game time", "|wReal time x %g" % gametime.TIMEFACTOR, align="l", width=77, border_top=0) <NEW_LINE> epochtxt = "Epoch (%s)" % ("from settings" if settings.TIME_GAME_EPOCH else "server start") <NEW_LINE> table2.add_row(epochtxt, datetime.datetime.fromtimestamp(gametime.game_epoch())) <NEW_LINE> table2.add_row("Total time passed:", utils.time_format(gametime.gametime(), 2)) <NEW_LINE> table2.add_row("Current time ", datetime.datetime.fromtimestamp(gametime.gametime(absolute=True))) <NEW_LINE> table2.reformat_column(0, width=30) <NEW_LINE> self.caller.msg(unicode(table1) + "\n" + unicode(table2))
show server time statistics Usage: @time List Server time statistics such as uptime and the current time stamp.
625990440a366e3fb87ddcfd
class FontDialog(QtGui.QFontDialog): <NEW_LINE> <INDENT> def __init__(self, store, parent): <NEW_LINE> <INDENT> super(FontDialog, self).__init__(parent) <NEW_LINE> self.__hide() <NEW_LINE> font = QtGui.QFont() <NEW_LINE> font.fromString(store.data[store.key]) <NEW_LINE> self.setCurrentFont(font) <NEW_LINE> <DEDENT> def __hide(self): <NEW_LINE> <INDENT> for widget in self.children(): <NEW_LINE> <INDENT> if isinstance(widget, QtGui.QComboBox): <NEW_LINE> <INDENT> widget.hide() <NEW_LINE> <DEDENT> elif isinstance(widget, QtGui.QLabel): <NEW_LINE> <INDENT> if widget.text() == 'Wr&iting System': <NEW_LINE> <INDENT> widget.hide() <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(widget, QtGui.QGroupBox): <NEW_LINE> <INDENT> widget.hide()
Custom QFontDialog which hides effects and system type for defaults
6259904410dbd63aa1c71ef1
class Meta: <NEW_LINE> <INDENT> ordering = ['-created'] <NEW_LINE> verbose_name = "subcategoria" <NEW_LINE> verbose_name_plural = "subcategorias" <NEW_LINE> unique_together = ('category', 'name')
Meta class.
62599044d4950a0f3b1117cd
class StandardOrMethodType: <NEW_LINE> <INDENT> MAX="MAX" <NEW_LINE> PROBOR="PROBOR" <NEW_LINE> BSUM="BSUM" <NEW_LINE> DRS="DRS" <NEW_LINE> ESUM="ESUM" <NEW_LINE> HSUM="HSUM" <NEW_LINE> NILMAX="NILMAX"
Python class for StandardOrMethodType
62599044d53ae8145f919776
class BaseProcedure: <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def procedure(self, stack: Any) -> Any: <NEW_LINE> <INDENT> pass
Interface of procedure object. Used in execute_chain.
625990443617ad0b5ee07452
class GetRunnerRaise(CookbookBase): <NEW_LINE> <INDENT> def get_runner(self, args): <NEW_LINE> <INDENT> raise RuntimeError("get_runner raise")
Class API get_runner raise cookbook.
625990441d351010ab8f4e37
class GclientUtilsUnittest(GclientUtilBase): <NEW_LINE> <INDENT> def testMembersChanged(self): <NEW_LINE> <INDENT> members = [ 'Annotated', 'AutoFlush', 'CheckCallAndFilter', 'CheckCallAndFilterAndHeader', 'Error', 'ExecutionQueue', 'FileRead', 'FileWrite', 'FindFileUpwards', 'FindGclientRoot', 'GetGClientRootAndEntries', 'GetEditor', 'IsDateRevision', 'MakeDateRevision', 'MakeFileAutoFlush', 'MakeFileAnnotated', 'PathDifference', 'ParseCodereviewSettingsContent', 'NumLocalCpus', 'PrintableObject', 'RunEditor', 'GCLIENT_CHILDREN', 'GCLIENT_CHILDREN_LOCK', 'GClientChildren', 'SplitUrlRevision', 'SyntaxErrorToError', 'UpgradeToHttps', 'Wrapper', 'WorkItem', 'codecs', 'lockedmethod', 'logging', 'os', 'Queue', 're', 'rmtree', 'safe_makedirs', 'stat', 'subprocess', 'subprocess2', 'sys', 'tempfile', 'threading', 'time', 'urlparse', ] <NEW_LINE> self.compareMembers(gclient_utils, members)
General gclient_utils.py tests.
62599044c432627299fa428e
class reload_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.success = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('reload_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.BOOL, 0) <NEW_LINE> oprot.writeBool(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62599044507cdc57c63a60b5
class MBIEAgent(BaseAgent): <NEW_LINE> <INDENT> def __init__(self, observation_space, action_space, name="MBIE Agent", params={}, starting_policy=None): <NEW_LINE> <INDENT> BaseAgent.__init__(self, observation_space, action_space, name, params=dict(MBIE_DEFAULTS, **params)) <NEW_LINE> if starting_policy: <NEW_LINE> <INDENT> self.predict_policy = starting_policy <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.predict_policy = DiscreteTabularPolicy(self.observation_space, self.action_space, default_value=1/(1-self.gamma)) <NEW_LINE> <DEDENT> self.backup_lim = int(np.log(1/(self.params['epsilon_one'] * (1 - self.gamma))) / (1 - self.gamma)) <NEW_LINE> self.policy_iterations = 0 <NEW_LINE> self.model = DiscreteTabularModel(observation_space, action_space, default_reward=self.params['max_reward'], limit=self.params['known_threshold']) <NEW_LINE> self.learn_policy = self.predict_policy <NEW_LINE> <DEDENT> def stepwise_update(self, state, reward): <NEW_LINE> <INDENT> if not self.model.is_known(self.prev_state, self.prev_action): <NEW_LINE> <INDENT> self.model.update(self.prev_state, self.prev_action, reward, state) <NEW_LINE> self.vectorized_iterate_policy(self.prev_state, self.prev_action, num_steps=min(self.params['max_stepwise_backups'], self.backup_lim)) <NEW_LINE> <DEDENT> <DEDENT> def episodic_update(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def vectorized_iterate_policy(self, state, action, num_steps): <NEW_LINE> <INDENT> eb = 0 if not self.params['include_eb'] else self.params['beta']/ np.sqrt(self.model.counts[state,action]) <NEW_LINE> for _ in range(num_steps): <NEW_LINE> <INDENT> self.predict_policy.q_table = self.model.rewards + self.gamma*np.dot(self.model.transitions, self.predict_policy.get_max_q_values()) + eb <NEW_LINE> <DEDENT> <DEDENT> def get_bellman_backup_function(self): <NEW_LINE> <INDENT> def update_fn(state, action, policy, model, gamma): <NEW_LINE> <INDENT> value = bellman_policy_backup(state, action, policy, model, gamma) <NEW_LINE> if self.model.get_count(state, action) > 0: <NEW_LINE> <INDENT> value += self.beta / np.sqrt(self.model.get_count(state, action)) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> return update_fn
Implementation for an R-Max Agent [Strehl, Li and Littman 2009]
6259904416aa5153ce401806
class FileType(Field): <NEW_LINE> <INDENT> length = 1 <NEW_LINE> valid_values = ('7') <NEW_LINE> def __init__(self, file_id='7'): <NEW_LINE> <INDENT> self.value = file_id
Mandatory field. Default value = 7 7 = Direct Credit type Field Format N(1)
62599044d7e4931a7ef3d38f
class CRUDFormRequestManager(object): <NEW_LINE> <INDENT> def __init__(self, request, form_class, form_template, doc_id=None, delete=False): <NEW_LINE> <INDENT> if not issubclass(form_class, BaseCRUDForm): <NEW_LINE> <INDENT> raise CRUDActionError("form_class must be a subclass of BaseCRUDForm to complete this action") <NEW_LINE> <DEDENT> if delete and not doc_id: <NEW_LINE> <INDENT> raise CRUDActionError("A doc_id is required to perform the delete action.") <NEW_LINE> <DEDENT> self.request = request <NEW_LINE> self.form_class = form_class <NEW_LINE> self.form_template = form_template <NEW_LINE> self.errors = list() <NEW_LINE> self.doc_id = doc_id <NEW_LINE> self.delete = delete <NEW_LINE> self.success = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def json_response(self): <NEW_LINE> <INDENT> if self.delete: <NEW_LINE> <INDENT> form, result = self._delete_doc() <NEW_LINE> <DEDENT> elif self.request.method == 'POST': <NEW_LINE> <INDENT> form, result = self._update_or_create_doc() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form = self._get_form() <NEW_LINE> result = [] <NEW_LINE> <DEDENT> form_update = render_to_string(self.form_template, dict(form=form)) if form else "" <NEW_LINE> return json.dumps(dict( success=self.success, deleted=self.delete and self.success, form_update=form_update, rows=result, errors=self.errors )) <NEW_LINE> <DEDENT> def _get_form(self): <NEW_LINE> <INDENT> if self.request.method == 'POST' and not self.success: <NEW_LINE> <INDENT> return self.form_class(self.request.POST, doc_id=self.doc_id) <NEW_LINE> <DEDENT> return self.form_class(doc_id=self.doc_id) <NEW_LINE> <DEDENT> def _update_or_create_doc(self): <NEW_LINE> <INDENT> form = self._get_form() <NEW_LINE> result = [] <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> result = form.save() <NEW_LINE> self.success = True <NEW_LINE> form = self._get_form() <NEW_LINE> <DEDENT> return form, result <NEW_LINE> <DEDENT> def _delete_doc(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> doc = self.form_class.doc_class.get(self.doc_id) <NEW_LINE> doc.delete() <NEW_LINE> self.success = True <NEW_LINE> self.doc_id = None <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.errors.append("Could not delete document with id %s due to error: %s" % (self.doc_id, e)) <NEW_LINE> <DEDENT> return self._get_form(), []
How to handle the form post/get in a django view.
62599044004d5f362081f972
class DFHolder(object): <NEW_LINE> <INDENT> def __init__(self, df_lines, shotswrapper, bswrapper): <NEW_LINE> <INDENT> self.lines = df_lines <NEW_LINE> self.shots = shotswrapper <NEW_LINE> self.boxscores = bswrapper
DFHolder: Class that wraps around a collection of dataframes for easier access. Attributes: - lines: a dataframe from the OU data file - shots: Wrapper of xefg dataframes, accessible by shots.players, shots.teams, and shots.globals - boxscores: Same as shots but for boxscore dataframes
6259904407d97122c4217fb9
class DatapointExtractor(Subject, Observer): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Subject.__init__(self) <NEW_LINE> Observer.__init__(self) <NEW_LINE> <DEDENT> def update(self, message): <NEW_LINE> <INDENT> self.process_xml(message) <NEW_LINE> <DEDENT> def process_xml(self, message): <NEW_LINE> <INDENT> seconds_since_2000 = self.extract_field('TimeStamp', message) <NEW_LINE> demand = self.extract_field('Demand', message) <NEW_LINE> multiplier = self.extract_field('Multiplier', message) <NEW_LINE> divisor = self.extract_field('Divisor', message) <NEW_LINE> if seconds_since_2000 and demand and multiplier and divisor: <NEW_LINE> <INDENT> self.notify({"at": self.convert_to_gmt(seconds_since_2000) +'Z', "value": str(1000.0 * demand * multiplier / divisor)}) <NEW_LINE> <DEDENT> <DEDENT> def extract_field(self, name, xml): <NEW_LINE> <INDENT> m = re.search('<'+name+'>(.*?)<\/'+name+'>', xml) <NEW_LINE> if m: <NEW_LINE> <INDENT> return int(m.group(1), 16) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def convert_to_gmt(self, seconds_since_2000): <NEW_LINE> <INDENT> epoch_offset = calendar.timegm(time.strptime("2000-01-01", "%Y-%m-%d")) <NEW_LINE> return datetime.datetime.utcfromtimestamp(seconds_since_2000+epoch_offset).isoformat()
Receive an XML fragment of the form: <InstantaneousDemand> <DeviceMacId>0x00158d00001ab152</DeviceMacId> <MeterMacId>0x000781000028c07d</MeterMacId> <TimeStamp>0x1918513b</TimeStamp> <Demand>0x0000be</Demand> <Multiplier>0x00000001</Multiplier> <Divisor>0x000003e8</Divisor> <DigitsRight>0x03</DigitsRight> <DigitsLeft>0x06</DigitsLeft> <SuppressLeadingZero>Y</SuppressLeadingZero> </InstantaneousDemand> Note that timestamp is seconds from Jan 1, 2000 rather than Jan 1 1970. From this, extract the timestamp as an ISO 8601 string and the instantaneous usage in Watts and pass as a dictionary on to the observers: {"at": "2013-05-20T11:01:43Z", "value": 22.3}
625990441f5feb6acb163f0d
class DeviceDataUpdateCoordinator(DataUpdateCoordinator): <NEW_LINE> <INDENT> def __init__( self, hass: HomeAssistant, logger: logging.Logger, api: AbstractGateApi, *, name: str, update_interval: timedelta, update_method: Callable[[], Awaitable] | None = None, request_refresh_debouncer: Debouncer | None = None, ) -> None: <NEW_LINE> <INDENT> DataUpdateCoordinator.__init__( self, hass, logger, name=name, update_interval=update_interval, update_method=update_method, request_refresh_debouncer=request_refresh_debouncer, ) <NEW_LINE> self.api = api
Manages polling for state changes from the device.
62599044a4f1c619b294f814
class Database(object): <NEW_LINE> <INDENT> def __init__(self, configure, name, echo=False, pool_size=10, pool_recycle=1800, poolclass=None, thread=True): <NEW_LINE> <INDENT> self.configure = configure <NEW_LINE> extend_args = {'pool_size': pool_size} <NEW_LINE> if poolclass == NullPool: <NEW_LINE> <INDENT> extend_args = {"poolclass": NullPool} <NEW_LINE> <DEDENT> self.engine = create_engine(self.get_url(name), echo=echo, pool_recycle=pool_recycle, pool_pre_ping=True, encoding="utf-8", convert_unicode=False, **extend_args) <NEW_LINE> if thread: <NEW_LINE> <INDENT> self.Session = scoped_session(sessionmaker(bind=self.engine, autocommit=False, autoflush=False)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Session = sessionmaker(bind=self.engine, autocommit=False, autoflush=False) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if name == "session": <NEW_LINE> <INDENT> return self.Session() <NEW_LINE> <DEDENT> <DEDENT> def get_url(self, config_name): <NEW_LINE> <INDENT> self.url = self.configure.get(config_name) <NEW_LINE> return self.url
Database Manager Object
625990448e71fb1e983bcde8
class RevolutionAngle(Array): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__(args=[EVALARGS], shape=[], dtype=float) <NEW_LINE> <DEDENT> def evalf(self, evalargs): <NEW_LINE> <INDENT> raise Exception('RevolutionAngle should not be evaluated') <NEW_LINE> <DEDENT> def _derivative(self, var, seen): <NEW_LINE> <INDENT> return (ones_like if isinstance(var, LocalCoords) and len(var) > 0 else zeros_like)(var) <NEW_LINE> <DEDENT> def _optimized_for_numpy(self): <NEW_LINE> <INDENT> return Zeros(self.shape, float)
Pseudo coordinates of a :class:`nutils.topology.RevolutionTopology`.
62599044462c4b4f79dbcd17
class RecordStream(object): <NEW_LINE> <INDENT> def __init__(self, graph, response): <NEW_LINE> <INDENT> self.graph = graph <NEW_LINE> self.__response = response <NEW_LINE> self.__response_item = self.__response_iterator() <NEW_LINE> self.columns = next(self.__response_item) <NEW_LINE> log.info("stream %r", self.columns) <NEW_LINE> <DEDENT> def __response_iterator(self): <NEW_LINE> <INDENT> producer = None <NEW_LINE> columns = [] <NEW_LINE> record_data = None <NEW_LINE> for key, value in self.__response: <NEW_LINE> <INDENT> key_len = len(key) <NEW_LINE> if key_len > 0: <NEW_LINE> <INDENT> section = key[0] <NEW_LINE> if section == "columns": <NEW_LINE> <INDENT> if key_len > 1: <NEW_LINE> <INDENT> columns.append(value) <NEW_LINE> <DEDENT> <DEDENT> elif section == "data": <NEW_LINE> <INDENT> if key_len == 1: <NEW_LINE> <INDENT> producer = RecordProducer(columns) <NEW_LINE> yield tuple(columns) <NEW_LINE> <DEDENT> elif key_len == 2: <NEW_LINE> <INDENT> if record_data is not None: <NEW_LINE> <INDENT> yield producer.produce(self.graph.hydrate(assembled(record_data))) <NEW_LINE> <DEDENT> record_data = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> record_data.append((key[2:], value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if record_data is not None: <NEW_LINE> <INDENT> yield producer.produce(self.graph.hydrate(assembled(record_data))) <NEW_LINE> <DEDENT> self.close() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> return next(self.__response_item) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> return self.__next__() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.__response.close()
An accessor for a sequence of records yielded by a streamed Cypher statement. :: for record in graph.cypher.stream("START n=node(*) RETURN n LIMIT 10") print record[0] Each record returned is cast into a :py:class:`namedtuple` with names derived from the resulting column names. .. note :: Results are available as returned from the server and are decoded incrementally. This means that there is no need to wait for the entire response to be received before processing can occur.
62599044d10714528d69f019
class BusStage: <NEW_LINE> <INDENT> mode = "bus" <NEW_LINE> def __init__( self, boarding, ): <NEW_LINE> <INDENT> self.boarding = boarding <NEW_LINE> self.entry_ts = boarding.timestamp <NEW_LINE> self.entry_stop = BusSchedule().get_stop(boarding.stop_id) <NEW_LINE> self.route = self.route_from_transaction(boarding) <NEW_LINE> self.exit_ts = None <NEW_LINE> self.exit_stop = None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def route_from_transaction(transaction): <NEW_LINE> <INDENT> route_tup = BusRouteTuple( transaction.route_id, transaction.route_direction, transaction.route_variant, ) <NEW_LINE> return BusSchedule().get_route(route_tup) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"[BUS] [{self.entry_ts}] ({self.entry_stop}) -> [{self.exit_ts }] ({self.exit_stop}) [{self.route}]"
Represents a bus stage
62599044711fe17d825e162a
class TabList(FrontendMessage): <NEW_LINE> <INDENT> def __init__(self, typ): <NEW_LINE> <INDENT> self.type = typ <NEW_LINE> self.toplevels = [] <NEW_LINE> self.folder_children = {} <NEW_LINE> self.expanded_folders = set() <NEW_LINE> self.root_expanded = None <NEW_LINE> <DEDENT> def append(self, info): <NEW_LINE> <INDENT> self.toplevels.append(info) <NEW_LINE> if info.is_folder: <NEW_LINE> <INDENT> self.folder_children[info.id] = [] <NEW_LINE> <DEDENT> <DEDENT> def append_child(self, parent_id, info): <NEW_LINE> <INDENT> self.folder_children[parent_id].append(info) <NEW_LINE> <DEDENT> def expand_folder(self, folder_id): <NEW_LINE> <INDENT> self.expanded_folders.add(folder_id)
Sends the frontend the current list of channels and playlists This is sent at startup and when the changes to the list of channels/playlists is too complex to describe with a TabsChanged message. :param type: ``feed`` or ``playlist`` :param toplevels: the list of ChannelInfo/PlaylistInfo objects without parents :param folder_children: dict mapping channel folder ids to a list of ChannelInfo/PlaylistInfo objects for their children :param expanded_folders: set containing ids of the folders that should be initially expanded.
62599044097d151d1a2c2383
class Scored_Pattern(): <NEW_LINE> <INDENT> def __init__(self, pattern, score=3, flags=0): <NEW_LINE> <INDENT> self.score = score <NEW_LINE> self.pattern = pattern <NEW_LINE> self.re_obj = re.compile(pattern, flags)
A pattern with a score attatched to it
625990440a366e3fb87ddcff
class Card: <NEW_LINE> <INDENT> uid = None <NEW_LINE> name = None <NEW_LINE> stats = None <NEW_LINE> count = 1 <NEW_LINE> def __init__(self, name_or_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.uid = int(name_or_id) <NEW_LINE> self.name = db.find_card_with_id(self.uid)["name"] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.name = name_or_id <NEW_LINE> self.uid = db.find_card(self.name)["id"] <NEW_LINE> <DEDENT> self.stats = db.get_card_stat(self.uid) <NEW_LINE> self.logic = CardLogic.get_logic(self.uid)
Represents a card class. Goes through each class defined in card_logic and loads them as needed in order to get card value.
6259904445492302aabfd7f5
class Proposal(object): <NEW_LINE> <INDENT> def _generate(self, pop, ref_pop, weights=None): <NEW_LINE> <INDENT> raise NotImplemented("You must define this method in a subclass.") <NEW_LINE> <DEDENT> def generate(self, pop, ref_pop=None, weights=None, fixed=None): <NEW_LINE> <INDENT> if fixed is None: <NEW_LINE> <INDENT> fixed = np.zeros(pop.shape[1], dtype=np.bool) <NEW_LINE> <DEDENT> if ref_pop is None or len(ref_pop) == 0: <NEW_LINE> <INDENT> ref_pop = pop.copy() <NEW_LINE> <DEDENT> proposal = np.ones_like(pop) * np.nan <NEW_LINE> proposal[:, fixed] = pop[:, fixed] <NEW_LINE> proposal[:, ~fixed] = self._generate(pop[:, ~fixed], ref_pop[:, ~fixed], weights=weights) <NEW_LINE> return proposal <NEW_LINE> <DEDENT> def __call__(self, pop, ref_pop=None, weights=None, fixed=None): <NEW_LINE> <INDENT> return self.generate(pop, ref_pop, weights, fixed)
Generate a new proposal population based on the current population and their weights.
62599044d4950a0f3b1117ce
class Arrow(Patch): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return "Arrow()" <NEW_LINE> <DEDENT> _path = Path([ [0.0, 0.1], [0.0, -0.1], [0.8, -0.1], [0.8, -0.3], [1.0, 0.0], [0.8, 0.3], [0.8, 0.1], [0.0, 0.1]], closed=True) <NEW_LINE> @docstring.dedent_interpd <NEW_LINE> def __init__(self, x, y, dx, dy, width=1.0, **kwargs): <NEW_LINE> <INDENT> Patch.__init__(self, **kwargs) <NEW_LINE> L = np.sqrt(dx ** 2 + dy ** 2) or 1 <NEW_LINE> cx = float(dx) / L <NEW_LINE> sx = float(dy) / L <NEW_LINE> trans1 = transforms.Affine2D().scale(L, width) <NEW_LINE> trans2 = transforms.Affine2D.from_values(cx, sx, -sx, cx, 0.0, 0.0) <NEW_LINE> trans3 = transforms.Affine2D().translate(x, y) <NEW_LINE> trans = trans1 + trans2 + trans3 <NEW_LINE> self._patch_transform = trans.frozen() <NEW_LINE> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> return self._path <NEW_LINE> <DEDENT> def get_patch_transform(self): <NEW_LINE> <INDENT> return self._patch_transform
An arrow patch.
6259904430c21e258be99b21
class ShellHelperTest(CmdLineTest): <NEW_LINE> <INDENT> COMMAND: Optional[str] = None <NEW_LINE> EXPECTED: Optional[Union[str, List[str], Set[str]]] = None <NEW_LINE> def _assert(self, out: str) -> None: <NEW_LINE> <INDENT> if isinstance(self.EXPECTED, list): <NEW_LINE> <INDENT> assert out.split() == self.EXPECTED <NEW_LINE> <DEDENT> elif isinstance(self.EXPECTED, set): <NEW_LINE> <INDENT> assert set(out.split()) == self.EXPECTED <NEW_LINE> <DEDENT> <DEDENT> def test_method(self) -> None: <NEW_LINE> <INDENT> args: List[str] = [] <NEW_LINE> cmds = getattr(shell_helper, str(self.COMMAND))(args) <NEW_LINE> self._assert("\n".join(cmds)) <NEW_LINE> <DEDENT> def test_cmdline( self, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] ) -> None: <NEW_LINE> <INDENT> CmdLineTest.run_module(monkeypatch, "helper_main", ["cobib", f"_{self.COMMAND}"]) <NEW_LINE> self._assert(capsys.readouterr().out) <NEW_LINE> <DEDENT> def test_cmdline_via_main( self, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] ) -> None: <NEW_LINE> <INDENT> with pytest.raises(SystemExit): <NEW_LINE> <INDENT> CmdLineTest.run_module(monkeypatch, "main", ["cobib", f"_{self.COMMAND}"]) <NEW_LINE> <DEDENT> self._assert(capsys.readouterr().out)
A base class for some common shell helper unit tests.
62599044baa26c4b54d505c3
class TourSerializers(serializers.ModelSerializer): <NEW_LINE> <INDENT> images = TourImageSerializer(many=True) <NEW_LINE> creator = UserSerializers() <NEW_LINE> travel_agent_id = TravelAgentSerializer() <NEW_LINE> average_review = serializers.SerializerMethodField() <NEW_LINE> type_of_tour = TypeOfTourSerializer(many=True) <NEW_LINE> provided_services = ProvidedServicesSerializer(many=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Tour <NEW_LINE> fields = ('creator', 'id', 'title', 'text', 'duration', 'images', 'travel_agent_id', 'average_review', 'tour_rating', 'type_of_tour', 'provided_services') <NEW_LINE> <DEDENT> def get_my_absolute_url(self, obj): <NEW_LINE> <INDENT> return obj.get_absolute_url() <NEW_LINE> <DEDENT> def get_average_review(self, obj): <NEW_LINE> <INDENT> av = TourReview.objects.filter(id=obj.id).aggregate(avg_rating=Avg('review_rating')) <NEW_LINE> if av['avg_rating'] is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return av['avg_rating']
Serializer for Tour model
6259904476d4e153a661dc02
class Function(object): <NEW_LINE> <INDENT> def __init__(self, fun, samples=100, range=None): <NEW_LINE> <INDENT> self._fun = fun <NEW_LINE> self._samples = samples <NEW_LINE> self.range = range <NEW_LINE> self._x0 = None <NEW_LINE> self._x1 = None <NEW_LINE> self._dx = None <NEW_LINE> self._i = None <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self._x0 = self.range[0] <NEW_LINE> self._x1 = self.range[1] <NEW_LINE> self._dx = float(self._x1-self._x0)/(self._samples-1) <NEW_LINE> self._i = 0 <NEW_LINE> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> return self.next() <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self._i == self._samples: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> x = self._x0 + self._i*self._dx <NEW_LINE> self._i += 1 <NEW_LINE> return (x, self._fun(x))
Data generator for python functions Args: fun (function): python function samples (int): number of sampled points range (tuple): range of the data
62599044b830903b9686ee07
class DdosProtection(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required=[] <NEW_LINE> self.b_key = "ddos-protection" <NEW_LINE> self.a10_url="/axapi/v3/cgnv6/ddos-protection" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.packets_per_second = {} <NEW_LINE> self.toggle = "" <NEW_LINE> self.logging = {} <NEW_LINE> self.uuid = "" <NEW_LINE> self.sampling_enable = [] <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
:param toggle: {"description": "'enable': Enable CGNV6 NAT pool DDoS protection (default); 'disable': Disable CGNV6 NAT pool DDoS protection; ", "format": "enum", "default": "enable", "type": "string", "enum": ["enable", "disable"], "optional": true} :param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"} :param sampling_enable: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "entry_added", "entry_deleted", "entry_added_to_hw", "entry_removed_from_hw", "hw_out_of_entries", "entry_match_drop", "entry_match_drop_hw", "entry_list_alloc", "entry_list_free", "entry_list_alloc_failure", "ip_node_alloc", "ip_node_free", "ip_node_alloc_failure", "ip_port_block_alloc", "ip_port_block_free", "ip_port_block_alloc_failure", "ip_other_block_alloc", "ip_other_block_free", "ip_other_block_alloc_failure", "entry_added_shadow", "entry_invalidated"], "type": "string", "description": "'all': all; 'entry_added': entry_added; 'entry_deleted': entry_deleted; 'entry_added_to_hw': entry_added_to_hw; 'entry_removed_from_hw': entry_removed_from_hw; 'hw_out_of_entries': hw_out_of_entries; 'entry_match_drop': entry_match_drop; 'entry_match_drop_hw': entry_match_drop_hw; 'entry_list_alloc': entry_list_alloc; 'entry_list_free': entry_list_free; 'entry_list_alloc_failure': entry_list_alloc_failure; 'ip_node_alloc': ip_node_alloc; 'ip_node_free': ip_node_free; 'ip_node_alloc_failure': ip_node_alloc_failure; 'ip_port_block_alloc': ip_port_block_alloc; 'ip_port_block_free': ip_port_block_free; 'ip_port_block_alloc_failure': ip_port_block_alloc_failure; 'ip_other_block_alloc': ip_other_block_alloc; 'ip_other_block_free': ip_other_block_free; 'ip_other_block_alloc_failure': ip_other_block_alloc_failure; 'entry_added_shadow': entry_added_shadow; 'entry_invalidated': entry_invalidated; ", "format": "enum"}}}]} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` Class Description:: Configure CGNV6 DDoS Protection. Class ddos-protection supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/cgnv6/ddos-protection`.
62599044a8ecb0332587252b
class ListVpnSiteLinkConnectionsResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[VpnSiteLinkConnection]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ListVpnSiteLinkConnectionsResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None)
Result of the request to list all vpn connections to a virtual wan vpn gateway. It contains a list of Vpn Connections and a URL nextLink to get the next set of results. :param value: List of VpnSiteLinkConnections. :type value: list[~azure.mgmt.network.v2019_09_01.models.VpnSiteLinkConnection] :param next_link: URL to get the next set of operation list results if there are any. :type next_link: str
6259904416aa5153ce401808
class OrientationReader( object ): <NEW_LINE> <INDENT> def __init__( self, f ): <NEW_LINE> <INDENT> if f.endswith('.m1') or f.endswith('.m4') or f.endswith('.m5'): <NEW_LINE> <INDENT> self.orientations = _parse_orientation( f ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = 'Invalid Orientation format (M1, M4, M5 valid)' <NEW_LINE> log.error( msg ) <NEW_LINE> raise TypeError( msg ) <NEW_LINE> <DEDENT> <DEDENT> def is_forward( self, item ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.orientations[item] == 'forward': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> msg = 'Item not found! (%s)' % item <NEW_LINE> log.error( msg ) <NEW_LINE> raise KeyError( msg ) <NEW_LINE> <DEDENT> <DEDENT> def is_reverse( self, item ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.orientations[item] == 'reverse': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> msg = 'Item not found! (%s)' % item <NEW_LINE> log.error( msg ) <NEW_LINE> raise KeyError( msg ) <NEW_LINE> <DEDENT> <DEDENT> def __iter__( self ): <NEW_LINE> <INDENT> return iter(self.orientations) <NEW_LINE> <DEDENT> def __contains__( self, item ): <NEW_LINE> <INDENT> if item in self.orientations: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
A Class for parsing the orientation of reads from a Blasr alignment
6259904423e79379d538d818
class LikeComment(APIView): <NEW_LINE> <INDENT> serializer_class = CommentSerializer <NEW_LINE> permission_classes = (IsAuthenticatedOrReadOnly,) <NEW_LINE> renderer_classes = (CommentJSONRenderer,) <NEW_LINE> def put(self, request, **kwargs): <NEW_LINE> <INDENT> slug = self.kwargs['slug'] <NEW_LINE> pk = self.kwargs['pk'] <NEW_LINE> try: <NEW_LINE> <INDENT> article = Article.objects.get(slug=slug) <NEW_LINE> <DEDENT> except Article.DoesNotExist: <NEW_LINE> <INDENT> raise NotFound('An article with this slug does not exist.') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> comment = Comment.objects.get(pk=pk) <NEW_LINE> <DEDENT> except Comment.DoesNotExist: <NEW_LINE> <INDENT> raise NotFound('A comment with this ID does not exist.') <NEW_LINE> <DEDENT> comment.dislikes.remove(request.user) <NEW_LINE> if request.user in comment.likes.all(): <NEW_LINE> <INDENT> comment.likes.remove(request.user) <NEW_LINE> return Response({"message": "You unliked this article."}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> comment.likes.add(request.user) <NEW_LINE> return Response({"message": "You liked this comment"}, status=status.HTTP_200_OK)
Like a comment
62599044a79ad1619776b39a
class ScalingUpExecuteWebhookTest(AutoscaleFixture): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(ScalingUpExecuteWebhookTest, self).setUp() <NEW_LINE> self.create_group_response = self.autoscale_behaviors.create_scaling_group_given( gc_min_entities=self.gc_min_entities_alt) <NEW_LINE> self.group = self.create_group_response.entity <NEW_LINE> self.resources.add(self.group, self.empty_scaling_group) <NEW_LINE> <DEDENT> @tags(speed='quick', convergence='yes') <NEW_LINE> def test_system_execute_webhook_scale_up_change(self): <NEW_LINE> <INDENT> policy_up = {'change': 1} <NEW_LINE> execute_webhook_in_change_policy = self.autoscale_behaviors.create_policy_webhook( group_id=self.group.id, policy_data=policy_up, execute_webhook=True) <NEW_LINE> self.assertEquals(execute_webhook_in_change_policy[ 'execute_response'], 202) <NEW_LINE> self.check_for_expected_number_of_building_servers( group_id=self.group.id, expected_servers=policy_up['change'] + self.group.groupConfiguration.minEntities) <NEW_LINE> <DEDENT> @tags(speed='quick', convergence='yes') <NEW_LINE> def test_system_execute_webhook_scale_up_change_percent(self): <NEW_LINE> <INDENT> policy_up = {'change_percent': 100} <NEW_LINE> execute_webhook_in_change_percent_policy = self.autoscale_behaviors.create_policy_webhook( group_id=self.group.id, policy_data=policy_up, execute_webhook=True) <NEW_LINE> self.assertEquals(execute_webhook_in_change_percent_policy[ 'execute_response'], 202) <NEW_LINE> servers_from_scale_up = self.autoscale_behaviors.calculate_servers( current=self.group.groupConfiguration.minEntities, percentage=policy_up['change_percent']) <NEW_LINE> self.check_for_expected_number_of_building_servers( group_id=self.group.id, expected_servers=servers_from_scale_up) <NEW_LINE> <DEDENT> @tags(speed='quick', convergence='yes') <NEW_LINE> def test_system_execute_webhook_scale_up_desired_capacity(self): <NEW_LINE> <INDENT> desired_capacity = self.group.groupConfiguration.minEntities + 1 <NEW_LINE> policy_up = {'desired_capacity': desired_capacity} <NEW_LINE> execute_webhook_in_desired_capacity_policy = self.autoscale_behaviors.create_policy_webhook( group_id=self.group.id, policy_data=policy_up, execute_webhook=True) <NEW_LINE> self.assertEquals(execute_webhook_in_desired_capacity_policy[ 'execute_response'], 202) <NEW_LINE> self.check_for_expected_number_of_building_servers( group_id=self.group.id, expected_servers=policy_up['desired_capacity'])
System tests to verify execute scaling policies scenarios
625990448a349b6b43687565
class IWordProcessor: <NEW_LINE> <INDENT> def connect(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def is_connected(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def cite(self, keys): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update_keys(self, keymap): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update_biblio(self): <NEW_LINE> <INDENT> pass
Interface a WordProcessor object should provide
625990448a43f66fc4bf34ae
class Ray(object): <NEW_LINE> <INDENT> def __init__(self, origin, direction): <NEW_LINE> <INDENT> self.origin = origin <NEW_LINE> self.direction = direction.normalized() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Ray(%s, %s' % (repr(self.origin), repr(self.direction)) <NEW_LINE> <DEDENT> def pointAtParameter(self, t): <NEW_LINE> <INDENT> return self.origin + self.direction.scale(t)
Defines a ray
625990448e05c05ec3f6f7e8
class OAuthSiteTestCase(TestCase): <NEW_LINE> <INDENT> oauth = True <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(OAuthSiteTestCase, cls).setUpClass() <NEW_LINE> if isinstance(mwoauth, ImportError): <NEW_LINE> <INDENT> raise unittest.SkipTest('mwoauth not installed') <NEW_LINE> <DEDENT> <DEDENT> def _get_oauth_tokens(self): <NEW_LINE> <INDENT> tokens_env = 'OAUTH_TOKENS_' + self.family.upper() <NEW_LINE> tokens = os.environ.get(tokens_env + '_' + self.code.upper(), None) <NEW_LINE> tokens = tokens or os.environ.get(tokens_env, None) <NEW_LINE> return tuple(tokens.split(':')) if tokens is not None else None <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> super(OAuthSiteTestCase, self).setUp() <NEW_LINE> self.site = self.get_site() <NEW_LINE> if not self.site.has_extension('OAuth'): <NEW_LINE> <INDENT> raise unittest.SkipTest('OAuth extension not loaded on test site') <NEW_LINE> <DEDENT> tokens = self._get_oauth_tokens() <NEW_LINE> if tokens is None: <NEW_LINE> <INDENT> raise unittest.SkipTest('OAuth tokens not set') <NEW_LINE> <DEDENT> self.assertEqual(len(tokens), 4) <NEW_LINE> self.consumer_token = tokens[:2] <NEW_LINE> self.access_token = tokens[2:]
Run tests related to OAuth authentication.
62599044097d151d1a2c2385
class EnvironHeaders(ImmutableHeadersMixin, Headers): <NEW_LINE> <INDENT> def __init__(self, environ): <NEW_LINE> <INDENT> self.environ = environ <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.environ is other.environ <NEW_LINE> <DEDENT> def __getitem__(self, key, _get_mode=False): <NEW_LINE> <INDENT> key = key.upper().replace('-', '_') <NEW_LINE> if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'): <NEW_LINE> <INDENT> return _unicodify_value(self.environ[key]) <NEW_LINE> <DEDENT> return _unicodify_value(self.environ['HTTP_' + key]) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(list(iter(self))) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for key, value in iteritems(self.environ): <NEW_LINE> <INDENT> if key.startswith('HTTP_') and key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): <NEW_LINE> <INDENT> yield (key[5:].replace('_', '-').title(), _unicodify_value(value)) <NEW_LINE> <DEDENT> elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'): <NEW_LINE> <INDENT> yield (key.replace('_', '-').title(), _unicodify_value(value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def copy(self): <NEW_LINE> <INDENT> raise TypeError('cannot create %r copies' % self.__class__.__name__)
Read only version of the headers from a WSGI environment. This provides the same interface as `Headers` and is constructed from a WSGI environment. From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP exceptions.
62599044d10714528d69f01a
class DeleteSSIDResponse(object): <NEW_LINE> <INDENT> openapi_types = { 'is_error': 'bool', 'failure_reason': 'str', 'success_message': 'str' } <NEW_LINE> attribute_map = { 'is_error': 'isError', 'failure_reason': 'failureReason', 'success_message': 'successMessage' } <NEW_LINE> def __init__(self, is_error=None, failure_reason=None, success_message=None): <NEW_LINE> <INDENT> self._is_error = None <NEW_LINE> self._failure_reason = None <NEW_LINE> self._success_message = None <NEW_LINE> self.discriminator = None <NEW_LINE> if is_error is not None: <NEW_LINE> <INDENT> self.is_error = is_error <NEW_LINE> <DEDENT> if failure_reason is not None: <NEW_LINE> <INDENT> self.failure_reason = failure_reason <NEW_LINE> <DEDENT> if success_message is not None: <NEW_LINE> <INDENT> self.success_message = success_message <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_error(self): <NEW_LINE> <INDENT> return self._is_error <NEW_LINE> <DEDENT> @is_error.setter <NEW_LINE> def is_error(self, is_error): <NEW_LINE> <INDENT> self._is_error = is_error <NEW_LINE> <DEDENT> @property <NEW_LINE> def failure_reason(self): <NEW_LINE> <INDENT> return self._failure_reason <NEW_LINE> <DEDENT> @failure_reason.setter <NEW_LINE> def failure_reason(self, failure_reason): <NEW_LINE> <INDENT> self._failure_reason = failure_reason <NEW_LINE> <DEDENT> @property <NEW_LINE> def success_message(self): <NEW_LINE> <INDENT> return self._success_message <NEW_LINE> <DEDENT> @success_message.setter <NEW_LINE> def success_message(self, success_message): <NEW_LINE> <INDENT> self._success_message = success_message <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DeleteSSIDResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62599044462c4b4f79dbcd19
class TransformedPDEntry(PDEntry): <NEW_LINE> <INDENT> def __init__(self, comp, original_entry): <NEW_LINE> <INDENT> super(TransformedPDEntry, self).__init__(comp, original_entry.energy) <NEW_LINE> self.original_entry = original_entry <NEW_LINE> self.name = original_entry.name <NEW_LINE> <DEDENT> def __getattr__(self, a): <NEW_LINE> <INDENT> if hasattr(self.original_entry, a): <NEW_LINE> <INDENT> return getattr(self.original_entry, a) <NEW_LINE> <DEDENT> raise AttributeError(a) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> output = ["TransformedPDEntry {}".format(self.composition), " with original composition {}" .format(self.original_entry.composition), ", E = {:.4f}".format(self.original_entry.energy)] <NEW_LINE> return "".join(output) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__() <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> return {"@module": self.__class__.__module__, "@class": self.__class__.__name__, "entry": self.original_entry.as_dict(), "composition": self.composition} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, d): <NEW_LINE> <INDENT> entry = MontyDecoder().process_decoded(d["entry"]) <NEW_LINE> return cls(d["composition"], entry)
This class repesents a TransformedPDEntry, which allows for a PDEntry to be transformed to a different composition coordinate space. It is used in the construction of phase diagrams that do not have elements as the terminal compositions. Args: comp (Composition): Transformed composition as a Composition. original_entry (PDEntry): Original entry that this entry arose from.
6259904407d97122c4217fbc
class CentralRuleD1 (CentralRule) : <NEW_LINE> <INDENT> def __init__ ( self , I = 2 , with_error = False , max_step = -1 ) : <NEW_LINE> <INDENT> CentralRule.__init__ ( self , 1 , I , with_error = with_error , max_step = max_step )
Central rule for the 1st derivative
62599044baa26c4b54d505c5
class Command(Runserver): <NEW_LINE> <INDENT> def get_handler(self, *args, **options): <NEW_LINE> <INDENT> handler = super(Command, self).get_handler(*args, **options) <NEW_LINE> if settings.DEBUG: <NEW_LINE> <INDENT> return MezzStaticFilesHandler(handler) <NEW_LINE> <DEDENT> return handler
Overrides runserver so that we can serve uploaded files during development, and not require every single developer on every single one of their projects to have to set up multiple web server aliases for serving static content. See https://code.djangoproject.com/ticket/15199
62599044b5575c28eb713657
class FailureDetail(_messages.Message): <NEW_LINE> <INDENT> crashed = _messages.BooleanField(1) <NEW_LINE> notInstalled = _messages.BooleanField(2) <NEW_LINE> timedOut = _messages.BooleanField(3)
A FailureDetail object. Fields: crashed: If the failure was severe because the system under test crashed. notInstalled: If an app is not installed and thus no test can be run with the app. This might be caused by trying to run a test on an unsupported platform. timedOut: If the test overran some time limit, and that is why it failed.
6259904421a7993f00c67285
class ResPartnerExtended(models.Model): <NEW_LINE> <INDENT> _inherit = 'res.partner' <NEW_LINE> d_id = fields.Char(string='ID-Card', size=64) <NEW_LINE> is_driver = fields.Boolean(string='Is Driver') <NEW_LINE> insurance = fields.Boolean(string='Insurance')
Model res partner extended.
6259904482261d6c52730853
class VideoCoder(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._sess = tf.Session() <NEW_LINE> self._video_path = tf.placeholder(dtype=tf.string) <NEW_LINE> self._decode_video = decode_video(self._video_path) <NEW_LINE> self._raw_frame = tf.placeholder(dtype=tf.uint8, shape=[None, None, 3]) <NEW_LINE> self._raw_mask = tf.placeholder(dtype=tf.uint8, shape=[None, None, 1]) <NEW_LINE> self._encode_frame = tf.image.encode_jpeg(self._raw_frame, quality=100) <NEW_LINE> self._encode_mask = tf.image.encode_png(self._raw_mask) <NEW_LINE> <DEDENT> def decode_video(self, video_data): <NEW_LINE> <INDENT> video, _, _, seq_length = self._sess.run(self._decode_video, feed_dict={self._video_path: video_data}) <NEW_LINE> raw_height, raw_width = video.shape[1], video.shape[2] <NEW_LINE> assert len(video.shape) == 4 <NEW_LINE> assert video.shape[3] == 3 <NEW_LINE> return video, raw_height, raw_width, seq_length <NEW_LINE> <DEDENT> def encode_frame(self, raw_frame): <NEW_LINE> <INDENT> return self._sess.run(self._encode_frame, feed_dict={self._raw_frame: raw_frame}) <NEW_LINE> <DEDENT> def encode_mask(self, raw_mask): <NEW_LINE> <INDENT> return self._sess.run(self._encode_mask, feed_dict={self._raw_mask: raw_mask})
Helper class providing TensorFlow image coding utilities
62599044596a897236128f3d
class ExcludedUsersUpdateArg(bb.Struct): <NEW_LINE> <INDENT> __slots__ = [ '_users_value', '_users_present', ] <NEW_LINE> _has_required_fields = False <NEW_LINE> def __init__(self, users=None): <NEW_LINE> <INDENT> self._users_value = None <NEW_LINE> self._users_present = False <NEW_LINE> if users is not None: <NEW_LINE> <INDENT> self.users = users <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def users(self): <NEW_LINE> <INDENT> if self._users_present: <NEW_LINE> <INDENT> return self._users_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @users.setter <NEW_LINE> def users(self, val): <NEW_LINE> <INDENT> if val is None: <NEW_LINE> <INDENT> del self.users <NEW_LINE> return <NEW_LINE> <DEDENT> val = self._users_validator.validate(val) <NEW_LINE> self._users_value = val <NEW_LINE> self._users_present = True <NEW_LINE> <DEDENT> @users.deleter <NEW_LINE> def users(self): <NEW_LINE> <INDENT> self._users_value = None <NEW_LINE> self._users_present = False <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, processor): <NEW_LINE> <INDENT> super(ExcludedUsersUpdateArg, self)._process_custom_annotations(annotation_type, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'ExcludedUsersUpdateArg(users={!r})'.format( self._users_value, )
Argument of excluded users update operation. Should include a list of users to add/remove (according to endpoint), Maximum size of the list is 1000 users. :ivar team.ExcludedUsersUpdateArg.users: List of users to be added/removed.
62599044e64d504609df9d5f
class IngredientSearcher(six.with_metaclass(abc.ABCMeta, object)): <NEW_LINE> <INDENT> def __init__(self, dish_ids): <NEW_LINE> <INDENT> self.dish_ids = list(dish_ids) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_ingredients(self): <NEW_LINE> <INDENT> raise NotImplementedError("Please implement this function in derived classes")
General class that provides API for querying ingredients databases, APIs, or cached ingredients.
625990448a43f66fc4bf34b0
class WorkaroundForTls12ForCipherSuites: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def requires_legacy_openssl(cls, openssl_cipher_name: str) -> bool: <NEW_LINE> <INDENT> legacy_client = LegacySslClient(ssl_version=OpenSslVersionEnum.TLSV1_2, ssl_verify=OpenSslVerifyEnum.NONE) <NEW_LINE> legacy_client.set_cipher_list("ALL:COMPLEMENTOFALL") <NEW_LINE> legacy_ciphers = legacy_client.get_cipher_list() <NEW_LINE> return openssl_cipher_name in legacy_ciphers
Helper to figure out which version of OpenSSL to use for a given TLS 1.2 cipher suite. The nassl module supports using either a legacy or a modern version of OpenSSL. When using TLS 1.2, specific cipher suites are only supported by one of the two implementation.
62599044e76e3b2f99fd9d29
class Central(object): <NEW_LINE> <INDENT> def __init__(self, mass=1.0, radius=1.0, flux=1.0, q1=None, q2=None, mu1=None, mu2=None): <NEW_LINE> <INDENT> self.mass = mass <NEW_LINE> self.radius = radius <NEW_LINE> self.flux = flux <NEW_LINE> if mu1 is not None and mu2 is not None: <NEW_LINE> <INDENT> if q1 is not None or q2 is not None: <NEW_LINE> <INDENT> raise RuntimeError("You can't use *both* limb-darkening " "parameterizations!") <NEW_LINE> <DEDENT> self.coeffs = (mu1, mu2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.q1 = q1 if q1 is not None else 0.5 <NEW_LINE> self.q2 = q2 if q2 is not None else 0.5 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def q1(self): <NEW_LINE> <INDENT> return self._q1 <NEW_LINE> <DEDENT> @q1.setter <NEW_LINE> def q1(self, v): <NEW_LINE> <INDENT> if not 0 <= v <= 1: <NEW_LINE> <INDENT> raise ValueError("Invalid limb darkening coefficient") <NEW_LINE> <DEDENT> self._q1 = v <NEW_LINE> <DEDENT> @property <NEW_LINE> def q2(self): <NEW_LINE> <INDENT> return self._q2 <NEW_LINE> <DEDENT> @q2.setter <NEW_LINE> def q2(self, v): <NEW_LINE> <INDENT> if not 0 <= v <= 1: <NEW_LINE> <INDENT> raise ValueError("Invalid limb darkening coefficient") <NEW_LINE> <DEDENT> self._q2 = v <NEW_LINE> <DEDENT> @property <NEW_LINE> def coeffs(self): <NEW_LINE> <INDENT> q1, q2 = self.q1, self.q2 <NEW_LINE> q1 = np.sqrt(np.abs(q1)) <NEW_LINE> return 2*q1*q2, q1*(1-2*q2) <NEW_LINE> <DEDENT> @coeffs.setter <NEW_LINE> def coeffs(self, value): <NEW_LINE> <INDENT> u1, u2 = value <NEW_LINE> u2 = u1+u2 <NEW_LINE> self.q1, self.q2 = u2*u2, 0.5*u1/u2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def density(self): <NEW_LINE> <INDENT> r = self.radius * _Rsun <NEW_LINE> m = self.mass * _Msun <NEW_LINE> return 0.75 * m / (np.pi * r * r * r) <NEW_LINE> <DEDENT> @density.setter <NEW_LINE> def density(self, rho): <NEW_LINE> <INDENT> r = self.radius * _Rsun <NEW_LINE> m = np.pi * rho * r * r * r / 0.75 <NEW_LINE> self.mass = m / _Msun
The "central"---in this context---is the massive central body in a :class:`System`. :param mass: The mass of the body measured in Solar masses. (default: ``1.0``) :param radius: The radius of the body measured in Solar radii. (default: ``1.0``) :param flux: The un-occulted flux measured in whatever units you feel like using. (default: ``1.0``) **Limb darkening** can be specified using ``(mu1, mu2)`` or ``(q1, q2)``. TODO: explain.
6259904463b5f9789fe86489
class DSMREntity(Entity): <NEW_LINE> <INDENT> def __init__(self, name, obis): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._obis = obis <NEW_LINE> self.telegram = {} <NEW_LINE> <DEDENT> def get_dsmr_object_attr(self, attribute): <NEW_LINE> <INDENT> if self._obis not in self.telegram: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> dsmr_object = self.telegram[self._obis] <NEW_LINE> return getattr(dsmr_object, attribute, None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> if 'Power' in self._name: <NEW_LINE> <INDENT> return ICON_POWER <NEW_LINE> <DEDENT> elif 'Gas' in self._name: <NEW_LINE> <INDENT> return ICON_GAS <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> from dsmr_parser import obis_references as obis <NEW_LINE> value = self.get_dsmr_object_attr('value') <NEW_LINE> if self._obis == obis.ELECTRICITY_ACTIVE_TARIFF: <NEW_LINE> <INDENT> return self.translate_tariff(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return STATE_UNKNOWN <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self.get_dsmr_object_attr('unit') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def translate_tariff(value): <NEW_LINE> <INDENT> if value == '0002': <NEW_LINE> <INDENT> return 'normal' <NEW_LINE> <DEDENT> elif value == '0001': <NEW_LINE> <INDENT> return 'low' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return STATE_UNKNOWN
Entity reading values from DSMR telegram.
625990440a366e3fb87ddd03
class _ConstraintSectionPrint(CommandManager): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(_ConstraintSectionPrint, self).__init__() <NEW_LINE> self.menuetext = "Constraint sectionprint" <NEW_LINE> self.tooltip = "Creates a FEM constraint sectionprint" <NEW_LINE> self.is_active = "with_analysis" <NEW_LINE> self.do_activated = "add_obj_on_gui_set_edit"
The FEM_ConstraintSectionPrint command definition
6259904445492302aabfd7f9
class ProductionRule: <NEW_LINE> <INDENT> def __init__(self, head, body, raw_definition): <NEW_LINE> <INDENT> self.head = head <NEW_LINE> self.body = body <NEW_LINE> self.raw_definition = raw_definition <NEW_LINE> self.head.rules.append(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.raw_definition
A production rule in a generative grammar. Attributes: head: A NonterminalSymbol object, being the head of this production rule. body: A list of NonterminalSymbol objects and strings (i.e., terminal symbols). raw_definition: The raw definition of this production rule, as found in the grammar definition file (useful for debugging).
6259904450485f2cf55dc2a4
class DeleteServerCreatedPatientView(DeleteView): <NEW_LINE> <INDENT> form_class = DeleteServerCreatedPatientForm <NEW_LINE> object_class = Patient <NEW_LINE> pk_param = ViewParam.SERVER_PK <NEW_LINE> server_pk_name = "_pk" <NEW_LINE> template_name = TEMPLATE_GENERIC_FORM <NEW_LINE> def get_object(self) -> Any: <NEW_LINE> <INDENT> patient = cast(Patient, super().get_object()) <NEW_LINE> if not patient.user_may_edit(self.request): <NEW_LINE> <INDENT> _ = self.request.gettext <NEW_LINE> raise HTTPBadRequest(_("Not authorized to delete this patient")) <NEW_LINE> <DEDENT> return patient <NEW_LINE> <DEDENT> def get_extra_context(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> _ = self.request.gettext <NEW_LINE> return { MAKO_VAR_TITLE: self.request.icon_text( icon=Icons.DELETE, text=_("Delete patient") ) } <NEW_LINE> <DEDENT> def get_success_url(self) -> str: <NEW_LINE> <INDENT> return self.request.route_url( Routes.VIEW_PATIENT_TASK_SCHEDULES ) <NEW_LINE> <DEDENT> def delete(self) -> None: <NEW_LINE> <INDENT> patient = cast(Patient, self.object) <NEW_LINE> PatientIdNumIndexEntry.unindex_patient( patient, self.request.dbsession ) <NEW_LINE> patient.delete_with_dependants(self.request)
View to delete a patient that had been created on the server.
6259904496565a6dacd2d919
class DialogNodeOutputOptionsElement(): <NEW_LINE> <INDENT> def __init__(self, label, value): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> valid_keys = ['label', 'value'] <NEW_LINE> bad_keys = set(_dict.keys()) - set(valid_keys) <NEW_LINE> if bad_keys: <NEW_LINE> <INDENT> raise ValueError( 'Unrecognized keys detected in dictionary for class DialogNodeOutputOptionsElement: ' + ', '.join(bad_keys)) <NEW_LINE> <DEDENT> if 'label' in _dict: <NEW_LINE> <INDENT> args['label'] = _dict.get('label') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'label\' not present in DialogNodeOutputOptionsElement JSON' ) <NEW_LINE> <DEDENT> if 'value' in _dict: <NEW_LINE> <INDENT> args['value'] = DialogNodeOutputOptionsElementValue._from_dict( _dict.get('value')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'value\' not present in DialogNodeOutputOptionsElement JSON' ) <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'label') and self.label is not None: <NEW_LINE> <INDENT> _dict['label'] = self.label <NEW_LINE> <DEDENT> if hasattr(self, 'value') and self.value is not None: <NEW_LINE> <INDENT> _dict['value'] = self.value._to_dict() <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
DialogNodeOutputOptionsElement. :attr str label: The user-facing label for the option. :attr DialogNodeOutputOptionsElementValue value: An object defining the message input to be sent to the Watson Assistant service if the user selects the corresponding option.
6259904491af0d3eaad3b143
class ConversionFunctionHandler(CurlyBraceBlockHandler): <NEW_LINE> <INDENT> def checkIndentation(self): <NEW_LINE> <INDENT> self.checkStartColumn() <NEW_LINE> self.checkCurlyBraces(self.config.brace_positions_function_declaration) <NEW_LINE> <DEDENT> def additionalIndentLevels(self): <NEW_LINE> <INDENT> i1 = int(self.config.brace_positions_function_declaration == 'next-line-indent') <NEW_LINE> i2 = int(self.config.indent_statements_within_function_bodies) <NEW_LINE> return i1 + i2
Handler for ConversionFunction nodes.
62599044004d5f362081f975
class ColoredSprite(pyglet.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, image, mask, color, batch=None, group=None): <NEW_LINE> <INDENT> super(ColoredSprite, self).__init__( self.__generate_image(image, mask, color), batch=batch, group=group) <NEW_LINE> <DEDENT> def __alpha_blend(self, src, dst, alpha): <NEW_LINE> <INDENT> return ((src * alpha) + (dst * (255 - alpha))) // 255 <NEW_LINE> <DEDENT> def __generate_image(self, image, mask, color): <NEW_LINE> <INDENT> mask_data = mask.get_image_data().get_data('A', mask.width) <NEW_LINE> image_data = image.get_image_data().get_data('RGBA', image.width * 4) <NEW_LINE> new_data = "" <NEW_LINE> for index, alpha in enumerate(mask_data): <NEW_LINE> <INDENT> alpha_ord = ord(alpha) <NEW_LINE> if alpha_ord > 0: <NEW_LINE> <INDENT> img_color = (ord(image_data[index*4]), ord(image_data[index*4+1]), ord(image_data[index*4+2])) <NEW_LINE> new_data += chr(self.__alpha_blend(color[0], img_color[0], alpha_ord)) + chr(self.__alpha_blend(color[1], img_color[1], alpha_ord)) + chr(self.__alpha_blend(color[2], img_color[2], alpha_ord)) + image_data[index*4+3] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_data += image_data[index*4] + image_data[index*4+1] + image_data[index*4+2] + image_data[index*4+3] <NEW_LINE> <DEDENT> <DEDENT> return pyglet.image.ImageData(image.width, image.height, "RGBA", new_data, image.width * 4)
Sprite that replaces a color based on a color mask image
625990448a349b6b43687569
class SourcefileSet(): <NEW_LINE> <INDENT> def __init__(self, name, index, runs): <NEW_LINE> <INDENT> self.real_name = name <NEW_LINE> self.name = name or str(index) <NEW_LINE> self.runs = runs
A SourcefileSet contains a list of runs and a name.
62599044498bea3a75a58e3c
class PeopleGroup(models.Model): <NEW_LINE> <INDENT> id: 'models.AutoField[int, int]' <NEW_LINE> objects: 'models.Manager[PeopleGroup]' <NEW_LINE> name: 'models.CharField[str, str]' = models.CharField( 'nombre', max_length=120, unique=True ) <NEW_LINE> members: 'models.ManyToManyField[None, RelatedManager[User]]' = models.ManyToManyField( User, through='PeopleGroupMember' ) <NEW_LINE> order: 'models.IntegerField[int, int]' = models.IntegerField( 'orden', default=1, help_text='Se ordena de forma creciente (menor número sale antes).' ) <NEW_LINE> is_hidden: 'models.BooleanField[bool, bool]' = models.BooleanField( 'ocultar grupo', default=False, help_text=( 'Ocultar los miembros de este colectivo ' 'en las vistas públicas del sitio web' ) ) <NEW_LINE> show_in_meetings: 'models.BooleanField[bool, bool]' = models.BooleanField( 'mostrar en asambleas', default=True, help_text=( 'Mostrar los miembros de este colectivo ' 'en las listas de asistencia de las asambleas' ) ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'grupo de gente importante' <NEW_LINE> verbose_name_plural = 'grupos de gente importante' <NEW_LINE> ordering = ('order', 'name') <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.name
Groups of relevant people.
62599044be383301e0254b37
class Checkin(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> time = db.Column(db.DateTime) <NEW_LINE> availability = db.Column(db.Boolean, default=False) <NEW_LINE> location_id = db.Column(db.Integer, db.ForeignKey('location.id')) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('user.id')) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Checkin {}>'. format(self.availability) <NEW_LINE> <DEDENT> def to_dict(self, incl_user=False, incl_location=False): <NEW_LINE> <INDENT> data = { 'id': self.id, 'time': self.time, 'availability': self.availability } <NEW_LINE> if incl_user: <NEW_LINE> <INDENT> data['user'] = self.User.to_dict() <NEW_LINE> <DEDENT> if incl_location: <NEW_LINE> <INDENT> data['location'] = self.Location.to_dict() <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def set_time(self): <NEW_LINE> <INDENT> return datetime.utcnow()
Checkin model
62599044a4f1c619b294f817
class MissingHeaderError(Exception): <NEW_LINE> <INDENT> pass
Header file included that is not pre-processed or available locally
62599044b57a9660fecd2d9b
class NotFoundException(errors.BasicException): <NEW_LINE> <INDENT> pass
configuration file not found
6259904429b78933be26aa52
class PtzMove(object): <NEW_LINE> <INDENT> def __init__(self, cam_id, auto_release_delay=10): <NEW_LINE> <INDENT> self.logger = logging.getLogger(self.__class__.__name__ + "@" + cam_id) <NEW_LINE> self.cam_id = cam_id <NEW_LINE> self.auto_release_delay = auto_release_delay <NEW_LINE> self.locked = False <NEW_LINE> self.delayed_unlock_coro = None <NEW_LINE> <DEDENT> def _lock(self): <NEW_LINE> <INDENT> token = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8)) <NEW_LINE> self.locked = token <NEW_LINE> self.delayed_unlock_coro = asyncio.ensure_future(self._delayed_unlock()) <NEW_LINE> return token <NEW_LINE> <DEDENT> def _renew_lock(self): <NEW_LINE> <INDENT> if self.delayed_unlock_coro: <NEW_LINE> <INDENT> self.delayed_unlock_coro.cancel() <NEW_LINE> <DEDENT> self.delayed_unlock_coro = asyncio.ensure_future(self._delayed_unlock()) <NEW_LINE> <DEDENT> def _unlock(self): <NEW_LINE> <INDENT> if self.delayed_unlock_coro: <NEW_LINE> <INDENT> self.delayed_unlock_coro.cancel() <NEW_LINE> <DEDENT> self.locked = False <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def _delayed_unlock(self): <NEW_LINE> <INDENT> yield from asyncio.sleep(self.auto_release_delay) <NEW_LINE> self._unlock() <NEW_LINE> self.logger.info("PTZ lock released after %d seconds of inactivity", self.auto_release_delay) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> args = {} <NEW_LINE> for key in ["left", "right", "up", "down", "zin", "zout", "stop"]: <NEW_LINE> <INDENT> args[key] = request.rel_url.query.get(key, 0) <NEW_LINE> <DEDENT> lock_token = request.rel_url.query.get("lock_token", None) <NEW_LINE> if isinstance(args["stop"], str) and args["stop"].isdigit(): <NEW_LINE> <INDENT> args["stop"] = int(args["stop"]) <NEW_LINE> <DEDENT> assert args["stop"] in [0, 1, True, False], "Stop must be either True or False" <NEW_LINE> args["stop"] = bool(args["stop"]) <NEW_LINE> if self.locked: <NEW_LINE> <INDENT> if self.locked != lock_token: <NEW_LINE> <INDENT> payload = {"message": "PTZ is already in use", "status": 403} <NEW_LINE> return aiohttp.web.json_response(payload, status=403) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._renew_lock() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> lock_token = self._lock() <NEW_LINE> <DEDENT> if args["stop"]: <NEW_LINE> <INDENT> payload = {"message": "PTZ move stop, lock released", "status": 200} <NEW_LINE> self._unlock() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> payload = {"message": "PTZ move applied", "status": 200, "lock_token": lock_token} <NEW_LINE> <DEDENT> rcp_service = request.app["rcp_services"][self.cam_id] <NEW_LINE> yield from rcp_service.move_ptz(**args) <NEW_LINE> return aiohttp.web.json_response(payload, status=200)
Run PTZ action using query params
62599044287bf620b6272f07
class TestExport: <NEW_LINE> <INDENT> VALID_EXPORT_FORMATS = {"yaml"} <NEW_LINE> @pytest.mark.parametrize("format", VALID_EXPORT_FORMATS) <NEW_LINE> def test_valid_export_methods_produce_a_result(self, format): <NEW_LINE> <INDENT> data = ZeroAttributeObject() <NEW_LINE> output = data.export(format) <NEW_LINE> assert output is not None <NEW_LINE> <DEDENT> @given(st.text().filter(lambda x: x not in TestExport.VALID_EXPORT_FORMATS)) <NEW_LINE> def test_invalid_export_methods_cause_an_error(self, format): <NEW_LINE> <INDENT> data = ZeroAttributeObject() <NEW_LINE> with pytest.raises(ValueError) as excinfo: <NEW_LINE> <INDENT> data.export(format) <NEW_LINE> <DEDENT> assert format in str(excinfo.value)
Verify behaviour of the export method
6259904494891a1f408ba086
class TestText(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testText(self): <NEW_LINE> <INDENT> pass
Text unit test stubs
6259904407f4c71912bb0752
class Page(object): <NEW_LINE> <INDENT> login_url ='http://www.126.com' <NEW_LINE> def __init__(self,selenium_driver,base_url=login_url): <NEW_LINE> <INDENT> self.base_url=base_url <NEW_LINE> self.driver=selenium_driver <NEW_LINE> self.timeout=30 <NEW_LINE> <DEDENT> def on_page(self): <NEW_LINE> <INDENT> return self.driver.current_url==(self.base_url+self.url) <NEW_LINE> <DEDENT> def _open(self,url): <NEW_LINE> <INDENT> url=self.base_url+url <NEW_LINE> self.driver.get(url) <NEW_LINE> assert self.on_page(),'Did not land on %s'%url <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> self._open(self.url) <NEW_LINE> <DEDENT> def find_element(self,*loc): <NEW_LINE> <INDENT> return self.driver.find_element(*loc)
基础类,用于页面对象类的继承
6259904423e79379d538d81e
class EyePactPage(Page): <NEW_LINE> <INDENT> section = SECTIONS[1] <NEW_LINE> @cherrypy.expose <NEW_LINE> def index(self): <NEW_LINE> <INDENT> self.subsection = 'EyePactOver' <NEW_LINE> return self.get_flatpage() <NEW_LINE> <DEDENT> @cherrypy.expose <NEW_LINE> def songs(self, song=''): <NEW_LINE> <INDENT> self.subsection = 'EyePactSongs' <NEW_LINE> if song: <NEW_LINE> <INDENT> return self.build(make_xspf_opn_page(song)) <NEW_LINE> <DEDENT> return self.get_flatpage() <NEW_LINE> <DEDENT> @cherrypy.expose <NEW_LINE> def opnames(self): <NEW_LINE> <INDENT> self.subsection = 'EyePactOpnames' <NEW_LINE> fnaam = str(MAGIOKIS_ROOT / self.section / (self.subsection + ".html")) <NEW_LINE> return self.build(make_xspf_objects(fnaam))
Class voor Pagina's in de Muziek met Anderen: The Eye Pact subsectie
62599044004d5f362081f976
class TTSmileySad( TT ): <NEW_LINE> <INDENT> pluginname = ':-(' <NEW_LINE> template = u'<span class="etttag sad">%s</span>' <NEW_LINE> def generate( self, node, igen, *args, **kwargs ): <NEW_LINE> <INDENT> igen.puttext( self.template % '&#9785;' ) <NEW_LINE> <DEDENT> example = u'[<:-(>]'
A simple smiley, a sad one.
6259904450485f2cf55dc2a7
class LazyProperty(object): <NEW_LINE> <INDENT> _type = type(None) <NEW_LINE> _default_name = '(anonymous)' <NEW_LINE> def __init__(self, name=None, default=None, required=False, not_none=False, exclude_if_none=True): <NEW_LINE> <INDENT> if required and default is not None: <NEW_LINE> <INDENT> raise LazyContractError('default specified for required property') <NEW_LINE> <DEDENT> self.name = name or self._default_name <NEW_LINE> self.required = required <NEW_LINE> self.default = default <NEW_LINE> self.not_none = not_none <NEW_LINE> self.exclude_if_none = exclude_if_none <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> value = obj.__dict__.get(self.name, self.default) <NEW_LINE> self.validate(value) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> self.validate(value) <NEW_LINE> obj.__dict__[self.name] = value <NEW_LINE> <DEDENT> def validate(self, obj): <NEW_LINE> <INDENT> if obj is None and self.not_none: <NEW_LINE> <INDENT> raise LazyContractValidationError( LazyContractValidationError.NOT_NONE_FMT.format( type(self).__name__, self.name)) <NEW_LINE> <DEDENT> if not isinstance(obj, self._type) and obj is not None: <NEW_LINE> <INDENT> raise LazyContractValidationError( LazyContractValidationError.ATTR_TYPE_FMT.format( type(self).__name__, self.name, repr(obj), self._type)) <NEW_LINE> <DEDENT> <DEDENT> def serialize(self, obj): <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> def deserialize(self, obj): <NEW_LINE> <INDENT> return obj if isinstance(obj, self._type) else self._type(obj)
Base class for descriptors used as properties in a LazyContract. Create a sub-class of this to define your own (de-)serialization.
6259904407d97122c4217fc1
class RemTitlebar(ShapeNode, MyDispatch): <NEW_LINE> <INDENT> def __init__(self, size, *args, **kwargs): <NEW_LINE> <INDENT> path = ui.Path.rect(0, 0, size.w, cfg.titlebar.height) <NEW_LINE> ShapeNode.__init__(self, path, fill_color=cfg.titlebar.color, *args, **kwargs) <NEW_LINE> self.position = (size.w / 2, size.h - cfg.titlebar.height / 2) <NEW_LINE> text = LabelNode(cfg.titlebar.title, tuple(cfg.titlebar.title_font)) <NEW_LINE> self.add_child(text) <NEW_LINE> self.z_position = 10 <NEW_LINE> side = cfg.titlebar.button_size <NEW_LINE> power = MyImgButton(cfg.titlebar.power_button) <NEW_LINE> power.position = (-size.w / 2 + cfg.titlebar.power_position, 0) <NEW_LINE> power.action = RemAction(lambda: root.change_scene('POWEROFF')) <NEW_LINE> power.size = (side, side) <NEW_LINE> self.add_child(power) <NEW_LINE> n = MyImgButton(cfg.titlebar.back_button) <NEW_LINE> n.position = (-size.w / 2 + cfg.titlebar.back_position, 0) <NEW_LINE> n.action = RemAction(lambda: root.change_page('POWERON')) <NEW_LINE> n.size = (side, side) <NEW_LINE> self.add_child(n) <NEW_LINE> n = RemBatteryIndicator(parent=self) <NEW_LINE> n.position = (-size.w / 2 + cfg.titlebar.battery_position, 0) <NEW_LINE> n.size = (side, side) <NEW_LINE> self.conn = RemConnectionIndicator(parent=self) <NEW_LINE> self.conn.position = (-size.w / 2 + cfg.titlebar.connection_position, 0) <NEW_LINE> self.conn.size = (side, side) <NEW_LINE> self.layout() <NEW_LINE> self.scene.controller.add_listener(self.conn.status_changed) <NEW_LINE> <DEDENT> def layout(self): <NEW_LINE> <INDENT> for n in self.children: <NEW_LINE> <INDENT> if n.position.x < -self.size.x / 2: <NEW_LINE> <INDENT> n.position = Point(n.position.x + self.size.x, n.position.y)
Title bar with power off button
625990443c8af77a43b688cd
class TestAPIProgramTermination(unittest.TestCase): <NEW_LINE> <INDENT> _PTP_TEST_NAME = "PTPJointValid" <NEW_LINE> _WAIT_TIME_FOR_MOTION_DETECTION_SEC = 8.0 <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> rospy.loginfo("SetUp called...") <NEW_LINE> self.test_data = XmlTestdataLoader(_TEST_DATA_FILE_NAME) <NEW_LINE> self.robot_motion_observer = RobotMotionObserver(_GROUP_NAME) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> rospy.loginfo("TearDown called...") <NEW_LINE> if hasattr(self, 'test_data'): <NEW_LINE> <INDENT> del self.test_data <NEW_LINE> <DEDENT> <DEDENT> def _get_robot_move_command(self): <NEW_LINE> <INDENT> ptp_goal = [str(x) for x in self.test_data.get_joints(self._PTP_TEST_NAME, _GROUP_NAME)] <NEW_LINE> movecmd = RosPack().get_path("pilz_robot_programming")+'/test/integrationtests/movecmd.py ptp joint' <NEW_LINE> return movecmd.split(" ") + ptp_goal <NEW_LINE> <DEDENT> def test01_stop_at_program_interrupt(self): <NEW_LINE> <INDENT> proc = subprocess.Popen(self._get_robot_move_command()) <NEW_LINE> self.assertTrue( self.robot_motion_observer.wait_motion_start(wait_time_out=self._WAIT_TIME_FOR_MOTION_DETECTION_SEC)) <NEW_LINE> proc.send_signal(signal.SIGINT) <NEW_LINE> proc.wait() <NEW_LINE> self.assertFalse(self.robot_motion_observer.is_robot_moving()) <NEW_LINE> <DEDENT> def test09_instantiation_after_program_kill(self): <NEW_LINE> <INDENT> proc = subprocess.Popen(self._get_robot_move_command()) <NEW_LINE> self.assertTrue( self.robot_motion_observer.wait_motion_start(wait_time_out=self._WAIT_TIME_FOR_MOTION_DETECTION_SEC)) <NEW_LINE> proc.send_signal(signal.SIGKILL) <NEW_LINE> proc.wait() <NEW_LINE> try: <NEW_LINE> <INDENT> r = Robot(_API_VERSION) <NEW_LINE> <DEDENT> except RobotMultiInstancesError: <NEW_LINE> <INDENT> self.fail('Instantiation after program kill does throw exception.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del r
Test the API behaviour when the python program is terminated.
62599044097d151d1a2c238b
class TranslationHandler(handlers.MemoryHandler): <NEW_LINE> <INDENT> def __init__(self, locale=None, target=None): <NEW_LINE> <INDENT> handlers.MemoryHandler.__init__(self, capacity=0, target=target) <NEW_LINE> self.locale = locale <NEW_LINE> <DEDENT> def setFormatter(self, fmt): <NEW_LINE> <INDENT> self.target.setFormatter(fmt) <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> original_msg = record.msg <NEW_LINE> original_args = record.args <NEW_LINE> try: <NEW_LINE> <INDENT> self._translate_and_log_record(record) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> record.msg = original_msg <NEW_LINE> record.args = original_args <NEW_LINE> <DEDENT> <DEDENT> def _translate_and_log_record(self, record): <NEW_LINE> <INDENT> record.msg = translate(record.msg, self.locale) <NEW_LINE> record.args = _translate_args(record.args, self.locale) <NEW_LINE> self.target.emit(record)
Handler that translates records before logging them. The TranslationHandler takes a locale and a target logging.Handler object to forward LogRecord objects to after translating them. This handler depends on Message objects being logged, instead of regular strings. The handler can be configured declaratively in the logging.conf as follows: [handlers] keys = translatedlog, translator [handler_translatedlog] class = handlers.WatchedFileHandler args = ('/var/log/api-localized.log',) formatter = context [handler_translator] target = translatedlog args = ('zh_CN',) If the specified locale is not available in the system, the handler will log in the default locale.
6259904463b5f9789fe8648d
class Site_Reports(models.Model): <NEW_LINE> <INDENT> reportID = models.AutoField(primary_key=True, verbose_name='Report ID') <NEW_LINE> patientCount = models.IntegerField(verbose_name='Patient Count') <NEW_LINE> projExec = models.ForeignKey(Proj_Exec_TimeStmp, db_column='exec_ID', verbose_name='TimeStamp ID') <NEW_LINE> site = models.ForeignKey(Sites, verbose_name='Site Name') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = "SITE_REPORTS" <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return str(self.reportID)
Model for Site_Reports table.
62599044d6c5a102081e343d
class ProcessMedia(celery.Task): <NEW_LINE> <INDENT> def run(self, media_id, feed_url, reprocess_action, reprocess_info=None): <NEW_LINE> <INDENT> reprocess_info = reprocess_info or {} <NEW_LINE> entry, manager = get_entry_and_processing_manager(media_id) <NEW_LINE> try: <NEW_LINE> <INDENT> processor_class = manager.get_processor(reprocess_action, entry) <NEW_LINE> with processor_class(manager, entry) as processor: <NEW_LINE> <INDENT> entry.state = u'processing' <NEW_LINE> entry.save() <NEW_LINE> _log.debug('Processing {0}'.format(entry)) <NEW_LINE> try: <NEW_LINE> <INDENT> processor.process(**reprocess_info) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> if processor.entry_orig_state == 'processed': <NEW_LINE> <INDENT> _log.error( 'Entry {0} failed to process due to the following' ' error: {1}'.format(entry.id, exc)) <NEW_LINE> _log.info( 'Setting entry.state back to "processed"') <NEW_LINE> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> entry.state = u'processed' <NEW_LINE> entry.save() <NEW_LINE> if mgg.app_config["push_urls"] and feed_url: <NEW_LINE> <INDENT> handle_push_urls.subtask().delay(feed_url) <NEW_LINE> <DEDENT> json_processing_callback(entry) <NEW_LINE> <DEDENT> except BaseProcessingFail as exc: <NEW_LINE> <INDENT> mark_entry_failed(entry.id, exc) <NEW_LINE> json_processing_callback(entry) <NEW_LINE> return <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> _log.error( 'Entry {0} failed to process due to an import error: {1}' .format( entry.title, exc)) <NEW_LINE> mark_entry_failed(entry.id, exc) <NEW_LINE> json_processing_callback(entry) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> _log.error('An unhandled exception was raised while' + ' processing {0}'.format( entry)) <NEW_LINE> mark_entry_failed(entry.id, exc) <NEW_LINE> json_processing_callback(entry) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def on_failure(self, exc, task_id, args, kwargs, einfo): <NEW_LINE> <INDENT> entry_id = args[0] <NEW_LINE> mark_entry_failed(entry_id, exc) <NEW_LINE> entry = mgg.database.MediaEntry.query.filter_by(id=entry_id).first() <NEW_LINE> json_processing_callback(entry) <NEW_LINE> mgg.database.reset_after_request() <NEW_LINE> <DEDENT> def after_return(self, *args, **kwargs): <NEW_LINE> <INDENT> if not celery.app.default_app.conf['CELERY_ALWAYS_EAGER']: <NEW_LINE> <INDENT> mgg.database.reset_after_request()
Pass this entry off for processing.
625990440a366e3fb87ddd07
class base_authentication_handler: <NEW_LINE> <INDENT> __author__ = 'built' <NEW_LINE> def authenticate_credential(self, username, password): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def add_credential(self, username, password): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def delete_credential(self, username): <NEW_LINE> <INDENT> raise NotImplementedError()
This is the base_authentication_handler that define all the required bound methods. :param __author__: Defines who the author is. This is used for accounting purposes.
6259904445492302aabfd7fd
class DNSDataMismatch(DNSError): <NEW_LINE> <INDENT> errno = 4212 <NEW_LINE> format = _('DNS check failed: Expected {%(expected)s} got {%(got)s}')
**4212** Raised when an DNS query didn't return expected answer in a configured time limit. For example: >>> raise DNSDataMismatch(expected="zone3.test. 86400 IN A 192.0.2.1", got="zone3.test. 86400 IN A 192.168.1.1") Traceback (most recent call last): ... DNSDataMismatch: DNS check failed: Expected {zone3.test. 86400 IN A 192.0.2.1} got {zone3.test. 86400 IN A 192.168.1.1}
6259904410dbd63aa1c71efb
class JSONObject: <NEW_LINE> <INDENT> __slots__ = ('type') <NEW_LINE> def __init__(self, type): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> def format(self, file, obj, prefix, indent): <NEW_LINE> <INDENT> pass
Base class of additional output formatting types. To register a new formatter use the module-level :py:meth:`json_registry.register` function. .. py:attribute:: type The type or types supported by this formatter. Any object that is a valid second argument to :py:func:`isinstance` is accepted.
6259904450485f2cf55dc2a8
class layer(): <NEW_LINE> <INDENT> def __init__(self,borders,thickness,mesh): <NEW_LINE> <INDENT> self.borders = borders <NEW_LINE> self.thickness = thickness <NEW_LINE> self.mesh = mesh <NEW_LINE> self.domain = sp.array([self.mesh.region[0][0:2],self.mesh.region[1][0:2]]) <NEW_LINE> self.extrusions = [] <NEW_LINE> self.path = [] <NEW_LINE> self.loops = None <NEW_LINE> self.shells = []
Layer object created by mesh.chop function, and acted on by lots of stuff. Thickness is the vertical thickness of the layer. Mesh is the mesh which the layer was created from. Borders is the output of mesh.chop.
62599044b830903b9686ee0b
class BaseError(Exception): <NEW_LINE> <INDENT> def __init__(self, message=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.message
Class that represents base error.
6259904482261d6c52730856
class RandomBot: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> return
It plays completely random, without any regard for victory conditions, or loosing.
62599044a79ad1619776b3a2
@attr.s(frozen=True) <NEW_LINE> class ScheduleItemData: <NEW_LINE> <INDENT> usage_key = attr.ib(type=UsageKey) <NEW_LINE> start = attr.ib(type=Optional[datetime]) <NEW_LINE> effective_start = attr.ib(type=Optional[datetime]) <NEW_LINE> due = attr.ib(type=Optional[datetime])
Scheduling specific data (start/end/due dates) for a single item.
62599044d53ae8145f919781
class ListForms(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = List <NEW_LINE> fields = ('name',)
List forms
625990448e71fb1e983bcdf2
class ScanSceneHook(Hook): <NEW_LINE> <INDENT> def execute(self, **kwargs): <NEW_LINE> <INDENT> items = [] <NEW_LINE> if not mari.projects.current(): <NEW_LINE> <INDENT> raise TankError("You must be in an open Mari project to be able to publish!") <NEW_LINE> <DEDENT> items.append({"type":"work_file", "name":None}) <NEW_LINE> for geo in mari.geo.list(): <NEW_LINE> <INDENT> for channel in geo.channelList(): <NEW_LINE> <INDENT> params = {"geo":geo.name(), "channel":channel.name()} <NEW_LINE> publishable_layers = self.find_publishable_layers_r(channel.layerList()) <NEW_LINE> if not publishable_layers: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> item_name = "%s, %s" % (geo.name(), channel.name()) <NEW_LINE> items.append({"type":"channel", "name":item_name, "other_params":params}) <NEW_LINE> found_layer_names = set() <NEW_LINE> for layer in publishable_layers: <NEW_LINE> <INDENT> layer_name = layer.name() <NEW_LINE> if layer_name in found_layer_names: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> found_layer_names.add(layer_name) <NEW_LINE> item_name = "%s, %s (%s)" % (geo.name(), channel.name(), layer_name) <NEW_LINE> params = {"geo":geo.name(), "channel":channel.name(), "layer":layer_name} <NEW_LINE> items.append({"type":"layer", "name":item_name, "other_params":params}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return items <NEW_LINE> <DEDENT> def find_publishable_layers_r(self, layers): <NEW_LINE> <INDENT> publishable = [] <NEW_LINE> for layer in layers: <NEW_LINE> <INDENT> if layer.isPaintableLayer() or layer.isProceduralLayer(): <NEW_LINE> <INDENT> publishable.append(layer) <NEW_LINE> <DEDENT> elif layer.isGroupLayer(): <NEW_LINE> <INDENT> grouped_layers = self.find_publishable_layers_r(layer.layerStack().layerList()) <NEW_LINE> publishable.extend(grouped_layers or []) <NEW_LINE> <DEDENT> <DEDENT> return publishable
Hook to scan scene for items to publish
625990440fa83653e46f61fe
class UniqueNameTracker(data_structures.TrackableDataStructure): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(UniqueNameTracker, self).__init__() <NEW_LINE> self._maybe_initialize_trackable() <NEW_LINE> self._name_counts = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def _values(self): <NEW_LINE> <INDENT> return [dep.ref for dep in self._checkpoint_dependencies] <NEW_LINE> <DEDENT> def track(self, trackable, base_name): <NEW_LINE> <INDENT> if not isinstance(trackable, trackable_lib.Trackable): <NEW_LINE> <INDENT> raise ValueError( ("Expected a trackable value, got %s which does not inherit " "from tf.track.Trackable.") % (trackable,)) <NEW_LINE> <DEDENT> def _format_name(prefix, number): <NEW_LINE> <INDENT> if number > 0: <NEW_LINE> <INDENT> return "%s_%d" % (prefix, number) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return prefix <NEW_LINE> <DEDENT> <DEDENT> count = self._name_counts.get(base_name, 0) <NEW_LINE> candidate = _format_name(base_name, count) <NEW_LINE> while self._lookup_dependency(candidate) is not None: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> candidate = _format_name(base_name, count) <NEW_LINE> <DEDENT> self._name_counts[base_name] = count + 1 <NEW_LINE> self._track_value(trackable, name=candidate) <NEW_LINE> return trackable
Adds dependencies on trackable objects with name hints. Useful for creating dependencies with locally unique names. Example usage: ```python class SlotManager(tf.contrib.checkpoint.Checkpointable): def __init__(self): # Create a dependency named "slotdeps" on the container. self.slotdeps = tf.contrib.checkpoint.UniqueNameTracker() slotdeps = self.slotdeps slots = [] slots.append(slotdeps.track(tf.Variable(3.), "x")) # Named "x" slots.append(slotdeps.track(tf.Variable(4.), "y")) slots.append(slotdeps.track(tf.Variable(5.), "x")) # Named "x_1" ```
62599044097d151d1a2c238d
class ModelVersion: <NEW_LINE> <INDENT> def __init__(self, model_name, version): <NEW_LINE> <INDENT> self.model_name = model_name <NEW_LINE> self.version = version <NEW_LINE> <DEDENT> def bump(self, artifacts_path): <NEW_LINE> <INDENT> new_ver = self.version + 1 <NEW_LINE> dst_directory = os.path.join(self.model_name, 'versions', str(new_ver)) <NEW_LINE> minio_client.upload_directory(artifacts_path, config.ML_BUCKET, dst_directory) <NEW_LINE> minio_client.write_object(config.ML_BUCKET, self.version_path(self.model_name), str(new_ver)) <NEW_LINE> self.version = new_ver <NEW_LINE> <DEDENT> def get(self, artifacts_path): <NEW_LINE> <INDENT> to_download = self.remote_directory <NEW_LINE> minio_client.download_directory(config.ML_BUCKET, to_download, artifacts_path) <NEW_LINE> <DEDENT> @property <NEW_LINE> def remote_directory(self): <NEW_LINE> <INDENT> return os.path.join(self.model_name, 'versions', str(self.version)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_or_create(cls, model_name): <NEW_LINE> <INDENT> version_path = cls.version_path(model_name) <NEW_LINE> if minio_client.object_exists(config.ML_BUCKET, version_path): <NEW_LINE> <INDENT> version = minio_client.read_object(config.ML_BUCKET, version_path).decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> version = '0' <NEW_LINE> minio_client.write_object(config.ML_BUCKET, version_path, version) <NEW_LINE> <DEDENT> return ModelVersion(model_name, int(version)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def version_path(cls, model_name): <NEW_LINE> <INDENT> return os.path.join(model_name, 'current_version')
Naive model versioning implementation
6259904407f4c71912bb0755
class SwitchTopology(Topology): <NEW_LINE> <INDENT> _mnist_params: DatasetMNISTParams = DatasetMNISTParams(class_filter=[1, 2], one_hot_labels=False) <NEW_LINE> _noise_params: RandomNoiseParams = RandomNoiseParams(torch.Size((28, 28)), distribution='Normal', amplitude=.3) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__(device='cuda') <NEW_LINE> noise_node = RandomNoiseNode(params=self._noise_params) <NEW_LINE> self.add_node(noise_node) <NEW_LINE> mnist_node = DatasetMNISTNode(params=self._mnist_params) <NEW_LINE> self.add_node(mnist_node) <NEW_LINE> switch_node = SwitchNode(n_inputs=2, get_index_from_input=True) <NEW_LINE> self.add_node(switch_node) <NEW_LINE> Connector.connect(mnist_node.outputs.label, switch_node.inputs.switch_signal) <NEW_LINE> Connector.connect(mnist_node.outputs.data, switch_node.inputs[0]) <NEW_LINE> Connector.connect(noise_node.outputs.output, switch_node.inputs[1])
Topology for testing the SwitchNode. The topology connects the MNIST label (1 or 2, to pick input), MNIST digits, and noise as inputs to the SwitchNode. The node's output will be a MNIST '1' digit or noise depending on the input label.
6259904491af0d3eaad3b148
class CollectionMetadata(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'collections_metadata' <NEW_LINE> """Collection identifier.""" <NEW_LINE> collection_id = db.Column( db.Integer, db.ForeignKey(Collection.id), primary_key=True, nullable=False, ) <NEW_LINE> infos = db.Column( JSONType().with_variant( postgresql.JSON(none_as_null=True), 'postgresql', ), default=lambda: dict(), nullable=True ) <NEW_LINE> collection = db.relationship(Collection)
Represent a collection metadata inside the SQL database.
62599044d53ae8145f919782
class Controller(wsgi.Controller): <NEW_LINE> <INDENT> _view_builder_class = flavors_view.ViewBuilder <NEW_LINE> @wsgi.serializers(xml=MinimalFlavorsTemplate) <NEW_LINE> def index(self, req): <NEW_LINE> <INDENT> limited_flavors = self._get_flavors(req) <NEW_LINE> return self._view_builder.index(req, limited_flavors) <NEW_LINE> <DEDENT> @wsgi.serializers(xml=FlavorsTemplate) <NEW_LINE> def detail(self, req): <NEW_LINE> <INDENT> limited_flavors = self._get_flavors(req) <NEW_LINE> req.cache_db_flavors(limited_flavors) <NEW_LINE> return self._view_builder.detail(req, limited_flavors) <NEW_LINE> <DEDENT> @wsgi.serializers(xml=FlavorTemplate) <NEW_LINE> def show(self, req, id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> flavor = flavors.get_instance_type_by_flavor_id(id) <NEW_LINE> req.cache_db_flavor(flavor) <NEW_LINE> <DEDENT> except exception.NotFound: <NEW_LINE> <INDENT> raise webob.exc.HTTPNotFound() <NEW_LINE> <DEDENT> return self._view_builder.show(req, flavor) <NEW_LINE> <DEDENT> def _parse_is_public(self, is_public): <NEW_LINE> <INDENT> if is_public is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif is_public == 'none': <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return strutils.bool_from_string(is_public, strict=True) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> msg = _('Invalid is_public filter [%s]') % is_public <NEW_LINE> raise webob.exc.HTTPBadRequest(explanation=msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _get_flavors(self, req): <NEW_LINE> <INDENT> filters = {} <NEW_LINE> context = req.environ['nova.context'] <NEW_LINE> if context.is_admin: <NEW_LINE> <INDENT> filters['is_public'] = self._parse_is_public( req.params.get('is_public', None)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filters['is_public'] = True <NEW_LINE> filters['disabled'] = False <NEW_LINE> <DEDENT> if 'minRam' in req.params: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> filters['min_memory_mb'] = int(req.params['minRam']) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> msg = _('Invalid minRam filter [%s]') % req.params['minRam'] <NEW_LINE> raise webob.exc.HTTPBadRequest(explanation=msg) <NEW_LINE> <DEDENT> <DEDENT> if 'minDisk' in req.params: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> filters['min_root_gb'] = int(req.params['minDisk']) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> msg = _('Invalid minDisk filter [%s]') % req.params['minDisk'] <NEW_LINE> raise webob.exc.HTTPBadRequest(explanation=msg) <NEW_LINE> <DEDENT> <DEDENT> limited_flavors = flavors.get_all_types(context, filters=filters) <NEW_LINE> flavors_list = limited_flavors.values() <NEW_LINE> sorted_flavors = sorted(flavors_list, key=lambda item: item['flavorid']) <NEW_LINE> limited_flavors = common.limited_by_marker(sorted_flavors, req) <NEW_LINE> return limited_flavors
Flavor controller for the OpenStack API.
625990440a366e3fb87ddd09
class ListUnsubscribeInputSet(InputSet): <NEW_LINE> <INDENT> def set_APIKey(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'APIKey', value) <NEW_LINE> <DEDENT> def set_DeleteMember(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'DeleteMember', value) <NEW_LINE> <DEDENT> def set_EmailAddress(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'EmailAddress', value) <NEW_LINE> <DEDENT> def set_ListId(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ListId', value) <NEW_LINE> <DEDENT> def set_SendGoodbye(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'SendGoodbye', value) <NEW_LINE> <DEDENT> def set_SendNotify(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'SendNotify', value)
An InputSet with methods appropriate for specifying the inputs to the ListUnsubscribe Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
62599044b57a9660fecd2da0
class VolumeDetachTask(object): <NEW_LINE> <INDENT> def __init__(self, stack, server_id, volume_id): <NEW_LINE> <INDENT> self.clients = stack.clients <NEW_LINE> self.server_id = server_id <NEW_LINE> self.volume_id = volume_id <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Detaching Volume %s from Instance %s' % (self.volume_id, self.server_id) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%s -/> %s)' % (type(self).__name__, self.volume_id, self.server_id) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> logger.debug(str(self)) <NEW_LINE> try: <NEW_LINE> <INDENT> vol = self.clients.cinder().volumes.get(self.volume_id) <NEW_LINE> <DEDENT> except clients.cinderclient.exceptions.NotFound: <NEW_LINE> <INDENT> logger.warning('%s - volume not found' % str(self)) <NEW_LINE> return <NEW_LINE> <DEDENT> server_api = self.clients.nova().volumes <NEW_LINE> try: <NEW_LINE> <INDENT> server_api.delete_server_volume(self.server_id, self.volume_id) <NEW_LINE> <DEDENT> except clients.novaclient.exceptions.NotFound: <NEW_LINE> <INDENT> logger.warning('%s - not found' % str(self)) <NEW_LINE> <DEDENT> yield <NEW_LINE> try: <NEW_LINE> <INDENT> vol.get() <NEW_LINE> while vol.status in ('in-use', 'detaching'): <NEW_LINE> <INDENT> logger.debug('%s - volume still in use' % str(self)) <NEW_LINE> yield <NEW_LINE> try: <NEW_LINE> <INDENT> server_api.delete_server_volume(self.server_id, self.volume_id) <NEW_LINE> <DEDENT> except clients.novaclient.exceptions.NotFound: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> vol.get() <NEW_LINE> <DEDENT> logger.info('%s - status: %s' % (str(self), vol.status)) <NEW_LINE> if vol.status != 'available': <NEW_LINE> <INDENT> raise exception.Error(vol.status) <NEW_LINE> <DEDENT> <DEDENT> except clients.cinderclient.exceptions.NotFound: <NEW_LINE> <INDENT> logger.warning('%s - volume not found' % str(self))
A task for detaching a volume from a Nova server.
62599044b830903b9686ee0c
class RuntimeconfigProjectsConfigsWaitersCreateRequest(_messages.Message): <NEW_LINE> <INDENT> parent = _messages.StringField(1, required=True) <NEW_LINE> requestId = _messages.StringField(2) <NEW_LINE> waiter = _messages.MessageField('Waiter', 3)
A RuntimeconfigProjectsConfigsWaitersCreateRequest object. Fields: parent: The path to the configuration that will own the waiter. The configuration must exist beforehand; the path must by in the format: `projects/[PROJECT_ID]/configs/[CONFIG_NAME]`. requestId: An optional but recommended unique `request_id`. If the server receives two `create()` requests with the same `request_id`, then the second request will be ignored and the first resource created and stored in the backend is returned. Empty `request_id` fields are ignored. It is responsibility of the client to ensure uniqueness of the `request_id` strings. `request_id` strings are limited to 64 characters. waiter: A Waiter resource to be passed as the request body.
6259904494891a1f408ba088
class RSpecParsingException(Exception): <NEW_LINE> <INDENT> pass
Raised when there is a problem parsing the RSpec.
6259904430c21e258be99b2a
class DescribeAddressTemplateGroupsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TotalCount = None <NEW_LINE> self.AddressTemplateGroupSet = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> if params.get("AddressTemplateGroupSet") is not None: <NEW_LINE> <INDENT> self.AddressTemplateGroupSet = [] <NEW_LINE> for item in params.get("AddressTemplateGroupSet"): <NEW_LINE> <INDENT> obj = AddressTemplateGroup() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.AddressTemplateGroupSet.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId")
DescribeAddressTemplateGroups返回参数结构体
62599044a8ecb03325872535
class InvalidParamsError(Error): <NEW_LINE> <INDENT> def __init__(self, id_=None, code=-32602, message='Invalid method parameter(s).', data=None): <NEW_LINE> <INDENT> Error.__init__(self, id_=id_, code=code, message=message, data=data)
无效的JSON-RPC请求参数
62599044d53ae8145f919783
class Solution(object): <NEW_LINE> <INDENT> def addDigits(self, num): <NEW_LINE> <INDENT> while num >= 10: <NEW_LINE> <INDENT> num = self.sumDigits(num) <NEW_LINE> <DEDENT> return num <NEW_LINE> <DEDENT> def sumDigits(self, num): <NEW_LINE> <INDENT> s = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> s += num % 10 <NEW_LINE> num = num // 10 <NEW_LINE> if num == 0: <NEW_LINE> <INDENT> return s
:type num: int :rtype: int
62599044596a897236128f41