code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class CannotDisseminateFormat(OAI_PMH_Exception): <NEW_LINE> <INDENT> def __init__(self, msg=None): <NEW_LINE> <INDENT> self.code = "cannotDisseminateFormat" <NEW_LINE> self.msg = "The metadata format identified by the value given for the metadataPrefix argument is not supported by the item or by the repository." <NEW_LINE> if (msg is not None): <NEW_LINE> <INDENT> self.msg += " " + msg
cannotDisseminateFormat error.
625990418e05c05ec3f6f7b4
class MyTopo( Topo ): <NEW_LINE> <INDENT> def __init__( self ): <NEW_LINE> <INDENT> Topo.__init__( self ) <NEW_LINE> h1 = self.addHost( 'h1' ) <NEW_LINE> h2 = self.addHost( 'h2' ) <NEW_LINE> h3 = self.addHost( 'h3' ) <NEW_LINE> h4 = self.addHost( 'h4' ) <NEW_LINE> h5 = self.addHost( 'h5' ) <NEW_LINE> h6 = self.addHost( 'h6' ) <NEW_LINE> h7 = self.addHost( 'h7' ) <NEW_LINE> h8 = self.addHost( 'h8' ) <NEW_LINE> h9 = self.addHost( 'h9' ) <NEW_LINE> s1 = self.addSwitch( 's1' ) <NEW_LINE> s2 = self.addSwitch( 's2' ) <NEW_LINE> s3 = self.addSwitch( 's3' ) <NEW_LINE> s4 = self.addSwitch( 's4' ) <NEW_LINE> self.addLink( s2, s1 ) <NEW_LINE> self.addLink( s3, s1 ) <NEW_LINE> self.addLink( s4, s1 ) <NEW_LINE> self.addLink( h1, s2 ) <NEW_LINE> self.addLink( h2, s2 ) <NEW_LINE> self.addLink( h3, s2 ) <NEW_LINE> self.addLink( h4, s3 ) <NEW_LINE> self.addLink( h5, s3 ) <NEW_LINE> self.addLink( h6, s3 ) <NEW_LINE> self.addLink( h7, s4 ) <NEW_LINE> self.addLink( h8, s4 ) <NEW_LINE> self.addLink( h9, s4 )
Topology used in SOFTmon article
6259904130dc7b76659a0ae4
class CreateDirectConnectTunnelResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.DirectConnectTunnelIdSet = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.DirectConnectTunnelIdSet = params.get("DirectConnectTunnelIdSet") <NEW_LINE> self.RequestId = params.get("RequestId")
CreateDirectConnectTunnel返回参数结构体
6259904107f4c71912bb06e4
class Node: <NEW_LINE> <INDENT> def __init__(self, state, parent=None, action=None, path_cost=0): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.parent = parent <NEW_LINE> self.action = action <NEW_LINE> self.path_cost = path_cost <NEW_LINE> self.depth = 0 <NEW_LINE> if parent: <NEW_LINE> <INDENT> self.depth = parent.depth + 1 <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Node {}>".format(self.state) <NEW_LINE> <DEDENT> def __lt__(self, node): <NEW_LINE> <INDENT> return self.state < node.state <NEW_LINE> <DEDENT> def expand(self, problem): <NEW_LINE> <INDENT> list = [self.child_node(problem, action) for action in problem.actions(self.state)] <NEW_LINE> return list, len(list) <NEW_LINE> <DEDENT> def child_node(self, problem, action): <NEW_LINE> <INDENT> next = problem.result(self.state, action) <NEW_LINE> return Node(next, self, action, problem.path_cost(self.path_cost, self.state, action, next)) <NEW_LINE> <DEDENT> def solution(self): <NEW_LINE> <INDENT> return [node.action for node in self.path()[1:]] <NEW_LINE> <DEDENT> def path(self): <NEW_LINE> <INDENT> node, path_back = self, [] <NEW_LINE> while node: <NEW_LINE> <INDENT> path_back.append(node) <NEW_LINE> node = node.parent <NEW_LINE> <DEDENT> return list(reversed(path_back)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, Node) and self.state == other.state <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.state)
A node in a search tree. Contains a pointer to the parent (the node that this is a successor of) and to the actual state for this node. Note that if a state is arrived at by two paths, then there are two nodes with the same state. Also includes the action that got us to this state, and the total path_cost (also known as g) to reach the node. Other functions may add an f and h value; see best_first_graph_search and astar_search for an explanation of how the f and h values are handled. You will not need to subclass this class.
625990411f5feb6acb163ea6
class IS_DEVICE_INFO_HEARTBEAT(ctypes.Structure): <NEW_LINE> <INDENT> _fields_ = [("reserved_1", wt.BYTE * 24), ("dwRuntimeFirmwareVersion", wt.DWORD), ("reserved_2", wt.BYTE * 8), ("wTemperature", wt.WORD), ("wLinkSpeed_Mb", wt.WORD), ("reserved_3", wt.BYTE * 6), ("wComportOffset", wt.WORD), ("reserved", wt.BYTE * 200)]
:var BYTE[24] reserved_1: :var DWORD dwRuntimeFirmwareVersion: :var BYTE[8] reserved_2: :var WORD wTemperature: :var WORD wLinkSpeed_Mb: :var BYTE[6] reserved_3: :var WORD wComportOffset: :var BYTE[200] reserved:
625990418da39b475be044a1
class OrderableStackedInline(StackedInline): <NEW_LINE> <INDENT> class Media: <NEW_LINE> <INDENT> js = (INLINE_ORDERING_JS,)
Adds necessary media files to regular Django StackedInline
6259904194891a1f408ba050
class TAState: <NEW_LINE> <INDENT> _state: Dict[str, Dict[str, List[ComparableTensor]]] <NEW_LINE> def __init__(self, topology: Topology): <NEW_LINE> <INDENT> self._record_state(topology) <NEW_LINE> <DEDENT> def _record_state(self, topology: Topology): <NEW_LINE> <INDENT> self._state = {} <NEW_LINE> for node in self.get_ta_nodes(topology): <NEW_LINE> <INDENT> node_descriptor = f"{type(node)} {node.name_with_id}" <NEW_LINE> self._state[node_descriptor] = {'memory_blocks': self._clone_block_tensors(node.memory_blocks)} <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _clone_block_tensors(blocks: MemoryBlocks) -> List[ComparableTensor]: <NEW_LINE> <INDENT> return [None if block.tensor is None else ComparableTensor(block.tensor.clone()) for block in blocks] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_ta_nodes(topology: Topology) -> List[NodeBase]: <NEW_LINE> <INDENT> return [node for node in topology.nodes if TAState._is_ta_node(node)] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _is_ta_node(node: NodeBase) -> bool: <NEW_LINE> <INDENT> return type(node) in [SpatialPoolerFlockNode, TemporalPoolerFlockNode, ExpertFlockNode] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_tensor_attributes(node: NodeBase) -> List[str]: <NEW_LINE> <INDENT> return [a for a in dir(node) if type(node.__getattribute__(a)) is torch.Tensor] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_tensor_attribute_dict(node: NodeBase) -> Dict[str, torch.Tensor]: <NEW_LINE> <INDENT> attributes = TAState._get_tensor_attributes() <NEW_LINE> return dict(zip(attributes, [node.__getattribute__(attribute).clone() for attribute in attributes])) <NEW_LINE> <DEDENT> def __eq__(self, other: "TAState") -> bool: <NEW_LINE> <INDENT> return self._state == other._state
Records the state of the toy architecture nodes. The state is defined by the tensors pointed to by the memory_blocks attribute. Currently, the state is recorded as a dictionary of nodes, attributes, and cloned tensors. If this is inefficient, cloned tensors could be replaced with checksums.
6259904130c21e258be99ac0
class LinkedList: <NEW_LINE> <INDENT> def __init__(self, max_length): <NEW_LINE> <INDENT> self.head = None <NEW_LINE> self.length = 0 <NEW_LINE> self.max_length = max_length <NEW_LINE> <DEDENT> def sorted_insert_data(self, new_data): <NEW_LINE> <INDENT> new_node = Node(new_data) <NEW_LINE> self.sorted_insert_node(new_node) <NEW_LINE> return new_node <NEW_LINE> <DEDENT> def sorted_insert_node(self, new_node): <NEW_LINE> <INDENT> if self.head is None: <NEW_LINE> <INDENT> new_node.next = self.head <NEW_LINE> self.head = new_node <NEW_LINE> <DEDENT> elif self.head.data >= new_node.data: <NEW_LINE> <INDENT> new_node.next = self.head <NEW_LINE> self.head = new_node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current = self.head <NEW_LINE> while current.next is not None and current.next.data < new_node.data: <NEW_LINE> <INDENT> current = current.next <NEW_LINE> <DEDENT> new_node.next = current.next <NEW_LINE> current.next = new_node <NEW_LINE> <DEDENT> self.length += 1 <NEW_LINE> if self.length > self.max_length: <NEW_LINE> <INDENT> self.remove(self.head) <NEW_LINE> <DEDENT> return new_node <NEW_LINE> <DEDENT> def remove(self, node): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> prev = None <NEW_LINE> while current is not node and current is not None: <NEW_LINE> <INDENT> prev = current <NEW_LINE> current = current.next <NEW_LINE> <DEDENT> if prev is not None: <NEW_LINE> <INDENT> prev.next = node.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.head = node.next <NEW_LINE> <DEDENT> self.length -= 1 <NEW_LINE> <DEDENT> def sort_node(self, node_to_sort): <NEW_LINE> <INDENT> self.remove(node_to_sort) <NEW_LINE> node_to_sort = self.sorted_insert_node(node_to_sort) <NEW_LINE> return node_to_sort <NEW_LINE> <DEDENT> def min(self): <NEW_LINE> <INDENT> return self.head.data <NEW_LINE> <DEDENT> def get_list(self, order="ascend"): <NEW_LINE> <INDENT> data_list = [] <NEW_LINE> current = self.head <NEW_LINE> while current is not None: <NEW_LINE> <INDENT> data_list.append(current.data) <NEW_LINE> current = current.next <NEW_LINE> <DEDENT> return_list = data_list <NEW_LINE> if order == "descend": <NEW_LINE> <INDENT> return_list = [] <NEW_LINE> for i in range(len(data_list))[::-1]: <NEW_LINE> <INDENT> return_list.append(data_list[i]) <NEW_LINE> <DEDENT> <DEDENT> return return_list
LinkedList: a ascending ordered linked list with a maximum length.
6259904107f4c71912bb06e5
class LTESlice(Slice): <NEW_LINE> <INDENT> default_properties = { 'rbgs': 5, 'ue_scheduler': UE_SLICE_SCHEDULER_RR } <NEW_LINE> def to_str(self): <NEW_LINE> <INDENT> msg = "[LTE] id %s rbgs %s ue_scheduler %s" <NEW_LINE> return msg % (self.slice_id, self.properties['rbgs'], UE_SLICE_SCHEDULERS[self.properties['ue_scheduler']]) <NEW_LINE> <DEDENT> def _parse_properties(self, descriptor=None): <NEW_LINE> <INDENT> properties = {**self.default_properties} <NEW_LINE> if not descriptor: <NEW_LINE> <INDENT> return properties <NEW_LINE> <DEDENT> if 'rbgs' in descriptor: <NEW_LINE> <INDENT> rbgs = descriptor['rbgs'] <NEW_LINE> if isinstance(rbgs, int): <NEW_LINE> <INDENT> properties['rbgs'] = rbgs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> properties['rbgs'] = int(rbgs) <NEW_LINE> <DEDENT> <DEDENT> if 'ue_scheduler' in descriptor: <NEW_LINE> <INDENT> ue_scheduler = descriptor['ue_scheduler'] <NEW_LINE> if not isinstance(ue_scheduler, int): <NEW_LINE> <INDENT> ue_scheduler = int(ue_scheduler) <NEW_LINE> <DEDENT> if ue_scheduler not in UE_SLICE_SCHEDULERS: <NEW_LINE> <INDENT> raise ValueError("Invalid UE slice scheduler %u" % ue_scheduler) <NEW_LINE> <DEDENT> properties['ue_scheduler'] = ue_scheduler <NEW_LINE> <DEDENT> return properties
EmPOWER LTE Slice Class.
6259904150485f2cf55dc238
class Database(object): <NEW_LINE> <INDENT> fields = ('time', 'latitude_from', 'longitude_from', 'latitude_to', 'longitude_to', 'transit_response', 'driving_response') <NEW_LINE> dtypes = ('real', 'real', 'real', 'real', 'real', 'text', 'text') <NEW_LINE> def __init__(self, path, buffer_size=100): <NEW_LINE> <INDENT> tmp_f = ', '.join(['%s %s'%(f, dt) for f, dt in zip(self.fields, self.dtypes)]) <NEW_LINE> create_cmd = "create table google_responses (%s)"%tmp_f <NEW_LINE> self.insert_cmd = "insert into google_responses values (%s)"%(', '.join(['?']*len(self.fields))) <NEW_LINE> from os.path import isfile <NEW_LINE> create = not isfile(path) <NEW_LINE> self.conn = sqlite3.connect(path) <NEW_LINE> self.curs = self.conn.cursor() <NEW_LINE> if create: <NEW_LINE> <INDENT> self.curs.execute(create_cmd) <NEW_LINE> <DEDENT> self.buffer_size = buffer_size <NEW_LINE> self.data_buffer = [] <NEW_LINE> <DEDENT> def record(self, data): <NEW_LINE> <INDENT> for idx, dtype in enumerate((dt.datetime, float, float, float, float, list, list)): <NEW_LINE> <INDENT> if not isinstance(data[idx], dtype): <NEW_LINE> <INDENT> print('failed!') <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> data = list(data) <NEW_LINE> for idx, func in zip((0, -1, -2), (dt.datetime.timestamp, json.dumps, json.dumps)): <NEW_LINE> <INDENT> data[idx] = func(data[idx]) <NEW_LINE> <DEDENT> self.data_buffer.append(tuple(data)) <NEW_LINE> if len(self.data_buffer) >= self.buffer_size: <NEW_LINE> <INDENT> self.curs.executemany(self.insert_cmd, self.data_buffer) <NEW_LINE> self.conn.commit() <NEW_LINE> self.data_buffer = []
Take the information from Google Maps and stuff it into a database
62599041097d151d1a2c231b
class htmlparser(HTMLParser): <NEW_LINE> <INDENT> def __init__(self, n=50): <NEW_LINE> <INDENT> HTMLParser.__init__(self) <NEW_LINE> self.start = None <NEW_LINE> self.links = [] <NEW_LINE> self.count = 0 <NEW_LINE> self.n = n <NEW_LINE> <DEDENT> def handle_starttag(self, tag, attribs): <NEW_LINE> <INDENT> if (tag == 'a' and self.count < self.n): <NEW_LINE> <INDENT> for name, value in attribs: <NEW_LINE> <INDENT> if (name == 'href' and ('siteinfo' in value)): <NEW_LINE> <INDENT> self.start = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def handle_data(self, data): <NEW_LINE> <INDENT> if (self.start == True and ('.' in data)): <NEW_LINE> <INDENT> self.count += 1 <NEW_LINE> self.links.append(data) <NEW_LINE> <DEDENT> <DEDENT> def handle_endtag(self, data): <NEW_LINE> <INDENT> if (self.start == True and data == 'a'): <NEW_LINE> <INDENT> self.start = False
HTML Parser to parse scraped data from website
62599041d99f1b3c44d06950
@export <NEW_LINE> class Algorithm(Enum): <NEW_LINE> <INDENT> Unknown = 0 <NEW_LINE> MD5 = 1 <NEW_LINE> SHA1 = 2 <NEW_LINE> SHA256 = 3 <NEW_LINE> SHA256AC = 4
FileHash Algorithm Enumeration.
625990418e71fb1e983bcd82
class Consumer(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100, unique=True) <NEW_LINE> key = models.CharField(max_length=255, unique=True) <NEW_LINE> secret = models.CharField(max_length=64) <NEW_LINE> @staticmethod <NEW_LINE> def get_consumer(consumer_key): <NEW_LINE> <INDENT> return Consumer.objects.get(key=consumer_key) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_secret(consumer_key): <NEW_LINE> <INDENT> return str(Consumer.get_consumer(consumer_key).secret) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{} ({})".format(self.name, self.key)
Each LMS which connects is considered a consumer and must have an entry in this table. Two LMSes may not share the same consumer key unless they can be certain that no two users will share the same ID.
62599041b57a9660fecd2d30
class Solution: <NEW_LINE> <INDENT> def maxDepth(self, root): <NEW_LINE> <INDENT> dept = 0 <NEW_LINE> if root is None: <NEW_LINE> <INDENT> return dept <NEW_LINE> <DEDENT> q = [] <NEW_LINE> q.append(root) <NEW_LINE> while len(q) != 0: <NEW_LINE> <INDENT> length = len(q) <NEW_LINE> for i in range(length): <NEW_LINE> <INDENT> r = q.pop(0) <NEW_LINE> if r.left is not None: <NEW_LINE> <INDENT> q.append(r.left) <NEW_LINE> <DEDENT> if r.right is not None: <NEW_LINE> <INDENT> q.append(r.right) <NEW_LINE> <DEDENT> <DEDENT> dept += 1 <NEW_LINE> <DEDENT> return dept
@param root: The root of binary tree. @return: An integer
625990418a43f66fc4bf3446
class FeatureWriter(object): <NEW_LINE> <INDENT> def __init__(self, filename, mode): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.mode = mode <NEW_LINE> self.num_features = 0 <NEW_LINE> self._writer = tf.python_io.TFRecordWriter(filename) <NEW_LINE> <DEDENT> def process_feature(self, feature): <NEW_LINE> <INDENT> self.num_features += 1 <NEW_LINE> if self.num_features % 1e3 == 0: <NEW_LINE> <INDENT> tf.logging.info((self.num_features, self.filename)) <NEW_LINE> <DEDENT> def create_int_feature(values): <NEW_LINE> <INDENT> feature = tf.train.Feature( int64_list=tf.train.Int64List(value=list(values))) <NEW_LINE> return feature <NEW_LINE> <DEDENT> features = collections.OrderedDict() <NEW_LINE> features["input_ids"] = create_int_feature(feature.input_ids) <NEW_LINE> features["example_id"] = create_int_feature([feature.example_id]) <NEW_LINE> features["input_mask"] = create_int_feature(feature.input_mask) <NEW_LINE> features["segment_ids"] = create_int_feature(feature.segment_ids) <NEW_LINE> features["start_bytes"] = create_int_feature(feature.start_bytes) <NEW_LINE> features["end_bytes"] = create_int_feature(feature.end_bytes) <NEW_LINE> if self.mode in ['eval','train']: <NEW_LINE> <INDENT> positions = list(sum(feature.targets, ())) <NEW_LINE> features["positions"] = create_int_feature(positions) <NEW_LINE> features['answer_id'] = create_int_feature(feature.answer_id) <NEW_LINE> <DEDENT> tf_example = tf.train.Example(features=tf.train.Features(feature=features)) <NEW_LINE> self._writer.write(tf_example.SerializeToString()) <NEW_LINE> return <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> tf.logging.info("{}: {} examples found".format(self.filename, str(self.num_features))) <NEW_LINE> self._writer.close()
Writes InputFeature to TF example file.
6259904123e79379d538d7b4
class MinEffortTask(JointVelocityTask): <NEW_LINE> <INDENT> def __init__(self, model, weight=1., constraints=[]): <NEW_LINE> <INDENT> super(MinEffortTask, self).__init__(model=model, weight=weight, constraints=constraints) <NEW_LINE> raise NotImplementedError("This class has not been implemented yet.")
Minimum Effort Task "This class implements a task that tries to bring the robot in a minimum-effort posture." [1] References: - [1] "OpenSoT: A whole-body control library for the compliant humanoid robot COMAN", Rocchi et al., 2015
6259904166673b3332c316ae
class MixedProtocol(StructProtocol): <NEW_LINE> <INDENT> def __init__(self, dataRootPath, cMapPath, prevStepPaths=None, verbose=False): <NEW_LINE> <INDENT> StructProtocol.__init__(self, dataRootPath, cMapPath, prevStepPaths, singleChainfeatsToInclude=FEATURES_TO_INCLUDE_CHAIN, pairfeatsToInclude= FEATURES_TO_INCLUDE_PAIR, verbose= verbose) <NEW_LINE> <DEDENT> def loadSingleChainFeatures(self, prefixOneChainType, chainType): <NEW_LINE> <INDENT> singleChainFeats= super(StructProtocol,self).loadSingleChainFeatures( prefixOneChainType, chainType) <NEW_LINE> chainType= chainType.upper() <NEW_LINE> nAAsInWindow= 1+ len(set([ int(elem.split(".")[-1].split("_")[0]) for elem in singleChainFeats.columns if elem.startswith("resNameWin.")] )) <NEW_LINE> centralRes= nAAsInWindow//2 <NEW_LINE> selectedSeqEntr= set([ 'seqEntropy.%d%s'%(i, chainType) for i in range(centralRes*4 , centralRes*4 +4)]) <NEW_LINE> selectedPssm= set([ 'pssm.%d%s'%(i, chainType) for i in range(centralRes*40,centralRes*40+40)]) <NEW_LINE> selectedWinAA= set([ 'resNameWin.%d_dummy_%s%s'%(centralRes,letter, chainType) for letter in AA_CODE_ELEMENTS]) <NEW_LINE> centralResCols= selectedSeqEntr.union(selectedPssm).union(selectedWinAA) <NEW_LINE> winCols= set([col for col in singleChainFeats.columns if not "ggr" in col and( "Win" in col or "pssm" in col or "seqEntropy" in col)]) <NEW_LINE> allWinButCentralCols= winCols.difference(centralResCols) <NEW_LINE> allButWinData= singleChainFeats[ [col for col in singleChainFeats.columns if not col in allWinButCentralCols] ] <NEW_LINE> winData= singleChainFeats[ list(singleChainFeats.columns[:3])+[col for col in singleChainFeats.columns if col in allWinButCentralCols] ] <NEW_LINE> singleChainFeats= self.addSingleChainAggregation(allButWinData, chainType) <NEW_LINE> mergeOn= [ elem%chainType.upper() for elem in ["chainId%s", "structResId%s", "resName%s"] ] <NEW_LINE> singleChainFeats= pd.merge(singleChainFeats, winData, how='inner', on=mergeOn) <NEW_LINE> return singleChainFeats
This class implements structural voronoi environment codification
6259904107d97122c4217f54
class TestingConfig(BaseConfig): <NEW_LINE> <INDENT> TESTING = True
Testing settings
625990418a349b6b436874fd
class build_ext(_build_ext): <NEW_LINE> <INDENT> def finalize_options(self): <NEW_LINE> <INDENT> _build_ext.finalize_options(self) <NEW_LINE> __builtins__.__NUMPY_SETUP__ = False <NEW_LINE> import numpy <NEW_LINE> self.include_dirs.append(numpy.get_include())
to install numpy
6259904116aa5153ce4017a2
class ClearbitCompanyDomainAlias(models.Model): <NEW_LINE> <INDENT> clearbit_company = models.ForeignKey(ClearbitCompany) <NEW_LINE> domain = models.URLField() <NEW_LINE> clearbit_dl_datetime = models.DateTimeField()
Company domain aliases.
6259904130c21e258be99ac2
class AboutFrame(LabelFrame): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> LabelFrame.__init__(self, parent, text='About instamatic') <NEW_LINE> self.parent = parent <NEW_LINE> frame = Frame(self) <NEW_LINE> Label(frame, text='').grid(row=0, column=0, sticky='W') <NEW_LINE> Label(frame, text='Contact:').grid(row=1, column=0, sticky='W', padx=10) <NEW_LINE> Label(frame, text='Stef Smeets ([email protected])').grid(row=1, column=1, sticky='W') <NEW_LINE> Label(frame, text='').grid(row=5, column=0, sticky='W') <NEW_LINE> Label(frame, text='Source code:').grid(row=10, column=0, sticky='W', padx=10) <NEW_LINE> link = Link_Button(frame, text=instamatic.__url__, action=self.link_github) <NEW_LINE> link.grid(row=10, column=1, sticky='W') <NEW_LINE> Label(frame, text='').grid(row=12, column=0, sticky='W') <NEW_LINE> Label(frame, text='Manual:').grid(row=20, column=0, sticky='W', padx=10) <NEW_LINE> link = Link_Button(frame, text=instamatic.__url__ + '/docs', action=self.link_github) <NEW_LINE> link.grid(row=20, column=1, sticky='W') <NEW_LINE> Label(frame, text='').grid(row=22, column=0, sticky='W') <NEW_LINE> Label(frame, text='Bugs:').grid(row=30, column=0, sticky='W', padx=10) <NEW_LINE> link = Link_Button(frame, text=instamatic.__url__ + '/issues', action=self.link_github) <NEW_LINE> link.grid(row=30, column=1, sticky='W') <NEW_LINE> Label(frame, text='').grid(row=32, column=0, sticky='W') <NEW_LINE> Label(frame, text='If you found this software useful, please cite:').grid(row=40, column=0, sticky='W', columnspan=2, padx=10) <NEW_LINE> txt = Message(frame, text=instamatic.__citation__, width=320, justify=LEFT) <NEW_LINE> txt.grid(row=41, column=1, sticky='W') <NEW_LINE> Label(frame, text='').grid(row=41, column=0, sticky='W', padx=10) <NEW_LINE> frame.pack(side='top', fill='x') <NEW_LINE> <DEDENT> def link_github(self, event=None): <NEW_LINE> <INDENT> import webbrowser <NEW_LINE> webbrowser.open_new(instamatic.__url__) <NEW_LINE> <DEDENT> def link_manual(self, event=None): <NEW_LINE> <INDENT> import webbrowser <NEW_LINE> webbrowser.open_new(instamatic.__url__)
`About` panel for the GUI.
62599041ec188e330fdf9b4f
class OpenVPNError(RuntimeError): <NEW_LINE> <INDENT> pass
Errors from the OpenVPN subprocess
625990416fece00bbacccc67
class StatusType(enum.Enum): <NEW_LINE> <INDENT> UP = 'UP' <NEW_LINE> DOWN = 'DOWN' <NEW_LINE> STARTING = 'STARTING' <NEW_LINE> OUT_OF_SERVICE = 'OUT_OF_SERVICE' <NEW_LINE> UNKNOWN = 'UNKNOWN'
Available status types with eureka, these can be used for any `EurekaClient.register` call to pl
6259904115baa72349463247
class UserSource(base.MarxTest): <NEW_LINE> <INDENT> title = 'Compiling a USER source' <NEW_LINE> figures = OrderedDict([('ds9', {'alternative': 'A point source', 'caption': '`ds9`_ shows that the distribution of source is indeed a point source.'}) ]) <NEW_LINE> @base.Python <NEW_LINE> def step_1(self): <NEW_LINE> <INDENT> marxpath = self.conf.get('marx', 'path') <NEW_LINE> src = os.path.join(marxpath, 'share', 'doc', 'marx', 'examples', 'user-source') <NEW_LINE> for f in ['point.c', 'user.h']: <NEW_LINE> <INDENT> shutil.copy(os.path.join(src, f), os.path.join(self.basepath, f)) <NEW_LINE> <DEDENT> jdmath_h = os.path.join(marxpath, 'include') <NEW_LINE> jdmath_a = os.path.join(marxpath, 'lib', 'libjdmath.a') <NEW_LINE> subprocess.call(['gcc', '-I' + jdmath_h, jdmath_a, '-shared', 'point.c', '-o', 'point.so']) <NEW_LINE> <DEDENT> @base.Marx <NEW_LINE> def step_2(self): <NEW_LINE> <INDENT> return {'SourceType': 'USER', 'UserSourceFile': os.path.join(self.basepath, 'point.so')} <NEW_LINE> <DEDENT> @base.Marx2fits <NEW_LINE> def step_3(self): <NEW_LINE> <INDENT> return '--pixadj=EDSER', 'point', 'point.fits' <NEW_LINE> <DEDENT> @base.Ciao <NEW_LINE> def step_30(self): <NEW_LINE> <INDENT> return ['''ds9 -width 800 -height 500 -log -cmap heat point.fits -pan to 4018 4141 physical -zoom 8 -saveimage {0} -exit'''.format(self.figpath(list(self.figures.keys())[0]))]
Run an example for a USER source. |marx| comes with several examples for user written source in C. These can be compiled as shared objects and dynamically linked into |marx| at run time. To test this, we copy one of the source files from the installed |marx| version and compile it with gcc. This particular case is not very useful, because |marx| already has a point source with the same properties build-in. The purpose of this test is only to have an automatic check that the dynamic linking works.
6259904163b5f9789fe86421
class Property(_BasicMixin): <NEW_LINE> <INDENT> values = models.ForeignKey('ValueSet', default=ValueSet.BOOLEAN) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'brubeck' <NEW_LINE> verbose_name_plural = 'properties' <NEW_LINE> <DEDENT> def allowed_values(self): <NEW_LINE> <INDENT> return Value.objects.filter(value_set=self.values)
Represents a property like "compact" or "Hausdorff"
6259904107d97122c4217f55
class MdbExp(list): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._title <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._title = None <NEW_LINE> for s in self.normalStanzas: <NEW_LINE> <INDENT> if self._title == None: <NEW_LINE> <INDENT> self._title = s.title <NEW_LINE> <DEDENT> elif self._title != s.title: <NEW_LINE> <INDENT> self._title = None <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return self._title <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def dataType(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._dataType <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._dataType = None <NEW_LINE> for s in self.normalStanzas: <NEW_LINE> <INDENT> if 'dataType' in s: <NEW_LINE> <INDENT> if self._dataType == None: <NEW_LINE> <INDENT> self._dataType = encodeUtils.dataTypes[s['dataType']] <NEW_LINE> <DEDENT> elif self._dataType.name != s['dataType']: <NEW_LINE> <INDENT> self._dataType = None <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self._dataType <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def normalStanzas(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._normal <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._normal = list() <NEW_LINE> for s in self: <NEW_LINE> <INDENT> if 'objStatus' not in s: <NEW_LINE> <INDENT> self._normal.append(s) <NEW_LINE> <DEDENT> <DEDENT> return self._normal <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, id, parent, stanzas): <NEW_LINE> <INDENT> list.__init__(self) <NEW_LINE> self.extend(stanzas) <NEW_LINE> self._id = id <NEW_LINE> self._parent = parent
Describes a single experiment ID, which has a collection of its stanzas as well as some additional data that should typically be consistent across all the stanzas, as well as verifying that the data is in fact consistent.
6259904145492302aabfd791
class Server(socketserver.ThreadingTCPServer): <NEW_LINE> <INDENT> clients = {} <NEW_LINE> allow_reuse_address = True <NEW_LINE> def __init__(self, server_address, RequestHandlerClass, base, bind_and_activate=True): <NEW_LINE> <INDENT> socketserver.ThreadingTCPServer.__init__( self, server_address, RequestHandlerClass, bind_and_activate) <NEW_LINE> self.base = base <NEW_LINE> <DEDENT> def get_request(self): <NEW_LINE> <INDENT> request, client_address = self.socket.accept() <NEW_LINE> self.clients[request] = None <NEW_LINE> print("get_request") <NEW_LINE> return request, client_address <NEW_LINE> <DEDENT> def verify_request(self, request, client_address): <NEW_LINE> <INDENT> print("verify_request", request, client_address) <NEW_LINE> return True <NEW_LINE> <DEDENT> def close_request(self, request): <NEW_LINE> <INDENT> print("close_request") <NEW_LINE> self.clients.pop(request, None) <NEW_LINE> request.close()
class Server
6259904107f4c71912bb06e8
class Passthrough(Component): <NEW_LINE> <INDENT> text_in = File(iotype='in', local_path='tout', legal_types=['xyzzy', 'txt']) <NEW_LINE> binary_in = File(iotype='in', local_path='bout') <NEW_LINE> text_out = File(path='tout', iotype='out') <NEW_LINE> binary_out = File(path='bout', iotype='out', binary=True) <NEW_LINE> def execute(self): <NEW_LINE> <INDENT> self.binary_out.extra_stuff = self.binary_in.extra_stuff
Copies input files (implicitly via local_path) to output.
62599041004d5f362081f940
class InvalidType(Exception): <NEW_LINE> <INDENT> def __init__(self, expect, actual): <NEW_LINE> <INDENT> msg = 'Expect: {0}\nActual: {1}'.format(expect, actual) <NEW_LINE> super(InvalidType, self).__init__(msg) <NEW_LINE> self.expect = expect <NEW_LINE> self.actual = actual
Raised when types of data for forward/backward are invalid.
625990411f5feb6acb163eaa
class Appointment(object): <NEW_LINE> <INDENT> def __init__(self, patient_first_name, patient_last_name, date, time, kind): <NEW_LINE> <INDENT> self.patient_first_name = patient_first_name <NEW_LINE> self.patient_last_name = patient_last_name <NEW_LINE> self.date = date <NEW_LINE> self.time = time <NEW_LINE> self.kind = kind <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.__dict__)
Class holds all the data of an appointment.
6259904124f1403a92686228
class MockConnection(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.mock_cursor = MagicMock() <NEW_LINE> <DEDENT> def cursor(self): <NEW_LINE> <INDENT> return self.mock_cursor <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> print("mock connect close.") <NEW_LINE> <DEDENT> def commit(self): <NEW_LINE> <INDENT> print("mock connect commit.")
Mocked Connection class, will mocking class of psycopg2.connection.
62599041596a897236128f0a
class Page(models.Model): <NEW_LINE> <INDENT> category = models.ForeignKey(Category) <NEW_LINE> title = models.CharField(max_length=128) <NEW_LINE> url = models.URLField() <NEW_LINE> views = models.IntegerField(default=0) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title
页面属于那一类 题目 链接 浏览量
6259904107d97122c4217f56
class NormalizeContrast(BaseFilter): <NEW_LINE> <INDENT> def __init__(self, region='all', mode=None, cutoff=0): <NEW_LINE> <INDENT> BaseFilter.__init__(self) <NEW_LINE> if mode is not None: <NEW_LINE> <INDENT> region = mode <NEW_LINE> <DEDENT> if region not in ('all', 'bbox', 'mask'): <NEW_LINE> <INDENT> raise RuntimeError( "Not a supported region (options are 'all', 'bbox', and 'mask')") <NEW_LINE> <DEDENT> if type(cutoff) != int or cutoff < 0: <NEW_LINE> <INDENT> raise RuntimeError("'cutoff' must be a positive integer") <NEW_LINE> <DEDENT> self.region = region <NEW_LINE> self.cutoff = cutoff <NEW_LINE> <DEDENT> def process(self, image): <NEW_LINE> <INDENT> BaseFilter.process(self, image) <NEW_LINE> if self.mode != 'gray': <NEW_LINE> <INDENT> raise RuntimeError("NormalizeContrast only supports grayscale images.") <NEW_LINE> <DEDENT> if self.region == 'bbox': <NEW_LINE> <INDENT> bbox = image.split()[1].getbbox() <NEW_LINE> croppedImage = image.crop(bbox) <NEW_LINE> croppedImage.load() <NEW_LINE> alpha = croppedImage.split()[1] <NEW_LINE> croppedImage = ImageOps.autocontrast(croppedImage.split()[0], cutoff=self.cutoff) <NEW_LINE> croppedImage.putalpha(alpha) <NEW_LINE> image.paste(croppedImage, image.bbox) <NEW_LINE> <DEDENT> elif self.region == 'mask': <NEW_LINE> <INDENT> bbox = image.split()[1].getbbox() <NEW_LINE> croppedImage = image.crop(bbox) <NEW_LINE> croppedImage.load() <NEW_LINE> alpha = croppedImage.split()[1] <NEW_LINE> grayImage = ImageChops.constant(croppedImage, 128) <NEW_LINE> compositeImage = Image.composite(croppedImage, grayImage, alpha) <NEW_LINE> compositeImage = ImageOps.autocontrast(compositeImage.split()[0], cutoff=self.cutoff) <NEW_LINE> croppedImage = Image.composite(compositeImage, croppedImage, alpha) <NEW_LINE> croppedImage.putalpha(alpha) <NEW_LINE> image.paste(croppedImage, bbox) <NEW_LINE> <DEDENT> elif self.region == 'all': <NEW_LINE> <INDENT> alpha = image.split()[1] <NEW_LINE> image = ImageOps.autocontrast(image.split()[0], cutoff=self.cutoff) <NEW_LINE> image.putalpha(alpha) <NEW_LINE> <DEDENT> return image
Perform contrast normalization on the image.
6259904107f4c71912bb06e9
class BinaryRule(Rule): <NEW_LINE> <INDENT> def __init__(self, subjects, predicate): <NEW_LINE> <INDENT> super(BinaryRule, self).__init__(subjects, predicate) <NEW_LINE> if len(self.subjects) != 2: <NEW_LINE> <INDENT> raise ValueError('This is not a binary rule.') <NEW_LINE> <DEDENT> self.reference = 1 if self.subjects[1] == '.' else 0 <NEW_LINE> self.other = self.subjects[1-self.reference] <NEW_LINE> <DEDENT> def __call__(self, o1, o2): <NEW_LINE> <INDENT> reference, other = (o1, o2) if self.reference == 0 else (o2, o1) <NEW_LINE> subother = self._symbol2operator(reference, self.other) <NEW_LINE> if isinstance(subother, (type, tuple)): <NEW_LINE> <INDENT> if subother is HomothetyOperator: <NEW_LINE> <INDENT> subother = (HomothetyOperator, ZeroOperator) <NEW_LINE> <DEDENT> if not isinstance(other, subother): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> elif other != subother: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> predicate = self._symbol2operator(reference, self.predicate) <NEW_LINE> if predicate is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not isinstance(predicate, Operator) and isinstance(predicate, collections.Callable): <NEW_LINE> <INDENT> predicate = predicate(o1, o2) <NEW_LINE> <DEDENT> if predicate is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(predicate, (list, tuple)) and len(predicate) == 1: <NEW_LINE> <INDENT> predicate = predicate[0] <NEW_LINE> <DEDENT> if not isinstance(predicate, Operator) and not (isinstance(predicate, (list, tuple)) and all(isinstance(o, Operator) for o in predicate)): <NEW_LINE> <INDENT> raise TypeError("The predicate '{0}' is not an operator.".format( predicate)) <NEW_LINE> <DEDENT> return predicate
Binary rule on operators. An operator rule is a relation that can be expressed by the sentence "'subjects' are 'predicate'". An instance of this class, when called with two input arguments checks if the inputs are subjects to the rule, and returns the predicate if it is the case. Otherwise, it returns None. Parameters ---------- subjects : str It defines the relationship between the two subjects that must be verified for the rule to apply. It is a pair of two expressions. One has to be '.' and stands for the reference subject. It determines if the reference operator is on the right or left hand side of the operator pair. The other expression constrains the other subject, which must be: '.' : the reference operator itself. 'C' : the conjugate of the reference object 'T' : the transpose of the reference object 'H' : the adjoint of the reference object or an Operator subclass. For instance, given a string 'C,.', the rule will apply to the inputs o1 and o2 if o1 is o2.C. For a condition ('.', DiagonalOperator), the rule will apply if o2 is a DiagonalOperator instance. predicate : function or str If the two objects o1, o2, are subjects of the rule, the predicate will be returned. The predicate can be '.', '1' or a callable of two arguments. Example ------- >>> rule = BinaryRule('.,.', '.') >>> o = Operator() >>> rule(o, o) is o True >>> rule(o, IdentityOperator()) is None True
6259904116aa5153ce4017a4
class CMFGENHydLParser(BaseParser): <NEW_LINE> <INDENT> nu_ratio_key = 'L_DEL_U' <NEW_LINE> def load(self, fname): <NEW_LINE> <INDENT> header = parse_header(fname) <NEW_LINE> self.header = header <NEW_LINE> self.max_l = self.get_max_l() <NEW_LINE> self.num_xsect_nus = int(header['Number of values per cross-section']) <NEW_LINE> nu_ratio = 10**float(header[self.nu_ratio_key]) <NEW_LINE> nus = np.power( nu_ratio, np.arange(self.num_xsect_nus) ) <NEW_LINE> skiprows, _ = find_row(fname, self.nu_ratio_key) <NEW_LINE> skiprows += 1 <NEW_LINE> data = [] <NEW_LINE> indexes = [] <NEW_LINE> with open(fname, mode='r') as f: <NEW_LINE> <INDENT> for i in range(skiprows): <NEW_LINE> <INDENT> f.readline() <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> n, l, log10x_sect = next(self._table_gen(f), None) <NEW_LINE> indexes.append((n, l)) <NEW_LINE> data.append(log10x_sect) <NEW_LINE> if l == self.max_l: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> index = pd.MultiIndex.from_tuples(indexes, names=['n', 'l']) <NEW_LINE> self.base = pd.DataFrame(data, index=index, columns=nus) <NEW_LINE> self.base.columns.name = 'nu / nu_0' <NEW_LINE> <DEDENT> def _table_gen(self, f): <NEW_LINE> <INDENT> line = f.readline() <NEW_LINE> n, l, num_entries = self.parse_table_header_line(line) <NEW_LINE> assert(num_entries == self.num_xsect_nus) <NEW_LINE> log10_xsect = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> line = f.readline() <NEW_LINE> if not line.strip(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> log10_xsect += [float(entry) for entry in line.split()] <NEW_LINE> <DEDENT> log10_xsect = np.array(log10_xsect) <NEW_LINE> assert(len(log10_xsect) == self.num_xsect_nus) <NEW_LINE> yield n, l, log10_xsect <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parse_table_header_line(line): <NEW_LINE> <INDENT> return [int(entry) for entry in line.split()] <NEW_LINE> <DEDENT> def get_max_l(self): <NEW_LINE> <INDENT> return int(self.header['Maximum principal quantum number']) - 1
Parser for the CMFGEN hydrogen photoionization cross sections. Attributes ---------- base : pandas.DataFrame, dtype float Photoionization cross section table for hydrogen. Values are the common logarithm (i.e. base 10) of the cross section in units cm^2. Indexed by the principal quantum number n and orbital quantum number l. Columns are the frequencies for the cross sections. Given in units of the threshold frequency for photoionization. header : dict Methods ------- load(fname) Parses the input file and stores the result in the `base` attribute.
62599041d6c5a102081e33dd
class CredentialError(ClientError): <NEW_LINE> <INDENT> pass
Could not connect client using given credentials
625990416fece00bbacccc69
class LyAssignment(LyObject): <NEW_LINE> <INDENT> def __init__(self, assignmentId = None, identifierInit = None, propertyPath = None, embeddedScm = None): <NEW_LINE> <INDENT> LyObject.__init__(self) <NEW_LINE> self.assignmentId = assignmentId <NEW_LINE> self.identifierInit = identifierInit <NEW_LINE> self.propertyPath = propertyPath <NEW_LINE> self.embeddedScm = embeddedScm <NEW_LINE> <DEDENT> def stringOutput(self): <NEW_LINE> <INDENT> if self.embeddedScm is not None: <NEW_LINE> <INDENT> return self.embeddedScm.stringOutput() <NEW_LINE> <DEDENT> elif self.propertyPath is not None: <NEW_LINE> <INDENT> if self.assignmentId is None or self.identifierInit is None: <NEW_LINE> <INDENT> raise Exception() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ''.join([str(self.assignmentId), ' ' , self.propertyPath.stringOutput(), " = ", self.identifierInit.stringOutput()], ' ') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.assignmentId is None or self.identifierInit is None: <NEW_LINE> <INDENT> raise Exception() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ' '.join([str(self.assignmentId), "=", self.identifierInit.stringOutput(), ' '])
one of three forms of assignment: assignment_id '=' identifier_init assignment_id property_path '=' identifier_init embedded_scm if self.embeddedScm is not None, uses type 3 if self.propertyPath is not None, uses type 2 else uses type 1 or raises an exception. >>> lyii = lily.lilyObjects.LyIdentifierInit(string = "hi") >>> lya = lily.lilyObjects.LyAssignment(assignmentId = "title", identifierInit = lyii) >>> print lya title = "hi" Note that you could also pass assignmentId a LyAssignmentId object, but that's overkill for a lot of things.
6259904115baa72349463249
class ListModeratorRequiredTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> from django.test.client import RequestFactory <NEW_LINE> from postorius.tests.utils import create_mock_list <NEW_LINE> self.request_factory = RequestFactory() <NEW_LINE> list_name = 'foolist.example.org' <NEW_LINE> list_id = 'foolist.example.org' <NEW_LINE> self.mock_list = create_mock_list(dict( fqdn_listname=list_name, list_id=list_id)) <NEW_LINE> <DEDENT> @patch.object(Client, 'get_list') <NEW_LINE> def test_not_authenticated(self, mock_get_list): <NEW_LINE> <INDENT> mock_get_list.return_value = self.mock_list <NEW_LINE> request = self.request_factory.get('/lists/foolist.example.org/' 'settings/') <NEW_LINE> request.user = AnonymousUser() <NEW_LINE> self.assertRaises(PermissionDenied, dummy_function_mod_req, request, list_id='foolist.example.org') <NEW_LINE> <DEDENT> @patch.object(Client, 'get_list') <NEW_LINE> def test_superuser(self, mock_get_list): <NEW_LINE> <INDENT> mock_get_list.return_value = self.mock_list <NEW_LINE> request = self.request_factory.get('/lists/foolist.example.org/' 'settings/') <NEW_LINE> request.user = User.objects.create_superuser('su2', '[email protected]', 'pwd') <NEW_LINE> return_value = dummy_function_mod_req(request, list_id= 'foolist.example.org') <NEW_LINE> self.assertEqual(return_value, True) <NEW_LINE> <DEDENT> @patch.object(Client, 'get_list') <NEW_LINE> def test_non_list_moderator(self, mock_get_list): <NEW_LINE> <INDENT> self.mock_list.moderators = ['[email protected]'] <NEW_LINE> mock_get_list.return_value = self.mock_list <NEW_LINE> request = self.request_factory.get('/lists/foolist.example.org/' 'settings/') <NEW_LINE> request.user = User.objects.create_user('les cl2', '[email protected]', 'pwd') <NEW_LINE> self.assertRaises(PermissionDenied, dummy_function_mod_req, request, list_id='foolist.example.org') <NEW_LINE> <DEDENT> @patch.object(Client, 'get_list') <NEW_LINE> def test_list_owner(self, mock_get_list): <NEW_LINE> <INDENT> self.mock_list.owners = ['[email protected]'] <NEW_LINE> mock_get_list.return_value = self.mock_list <NEW_LINE> request = self.request_factory.get('/lists/foolist.example.org/' 'settings/') <NEW_LINE> request.user = User.objects.create_user('les cl3', '[email protected]', 'pwd') <NEW_LINE> return_value = dummy_function_mod_req(request, list_id= 'foolist.example.org') <NEW_LINE> self.assertEqual(return_value, True) <NEW_LINE> <DEDENT> @patch.object(Client, 'get_list') <NEW_LINE> def test_list_moderator(self, mock_get_list): <NEW_LINE> <INDENT> self.mock_list.moderators = ['[email protected]'] <NEW_LINE> mock_get_list.return_value = self.mock_list <NEW_LINE> request = self.request_factory.get('/lists/foolist.example.org/' 'settings/') <NEW_LINE> request.user = User.objects.create_user('les cl4', '[email protected]', 'pwd') <NEW_LINE> return_value = dummy_function_mod_req(request, list_id= 'foolist.example.org') <NEW_LINE> self.assertEqual(return_value, True)
Tests the list_owner_required auth decorator.
62599041d10714528d69efe8
class Player(object): <NEW_LINE> <INDENT> def __init__(self, name, state): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._state = state <NEW_LINE> <DEDENT> def change_state(self, state): <NEW_LINE> <INDENT> self._state = state <NEW_LINE> <DEDENT> def play(self): <NEW_LINE> <INDENT> print(self._name + self._state.play()) <NEW_LINE> <DEDENT> def train(self): <NEW_LINE> <INDENT> print(self._name + self._state.train())
Context
625990414e696a045264e77d
class PrivateTagsApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.user = get_user_model().objects.create_user( '[email protected]', 'password123' ) <NEW_LINE> self.client = APIClient() <NEW_LINE> self.client.force_authenticate(self.user) <NEW_LINE> <DEDENT> def test_retrieve_tags(self): <NEW_LINE> <INDENT> Tag.objects.create(user=self.user, name="Vegan") <NEW_LINE> Tag.objects.create(user=self.user, name="Dessert") <NEW_LINE> res = self.client.get(TAGS_URL) <NEW_LINE> tags = Tag.objects.all().order_by('-name') <NEW_LINE> serializer = TagSerializer(tags, many=True) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_tags_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> Tag.objects.create(user=user2, name="Fruity") <NEW_LINE> tag = Tag.objects.create(user=self.user, name="Comfort Food") <NEW_LINE> res = self.client.get(TAGS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], tag.name) <NEW_LINE> <DEDENT> def test_create_tags_successful(self): <NEW_LINE> <INDENT> payload = {'name':'Test tag'} <NEW_LINE> self.client.post(TAGS_URL, payload) <NEW_LINE> exists = Tag.objects.filter(user= self.user, name=payload['name']).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_tag_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(TAGS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_tags_assigned_to_recipes(self): <NEW_LINE> <INDENT> tag1 = Tag.objects.create(user=self.user, name='Breakfast') <NEW_LINE> tag2 = Tag.objects.create(user=self.user, name='Lunch') <NEW_LINE> recipe = Recipe.objects.create( title='Coriander eggs on toast', time_minutes=10, price=5.00, user=self.user, ) <NEW_LINE> recipe.tags.add(tag1) <NEW_LINE> res = self.client.get(TAGS_URL, {'assigned_only': 1}) <NEW_LINE> serializer1 = TagSerializer(tag1) <NEW_LINE> serializer2 = TagSerializer(tag2) <NEW_LINE> self.assertIn(serializer1.data, res.data) <NEW_LINE> self.assertNotIn(serializer2.data, res.data) <NEW_LINE> <DEDENT> def test_retrieve_tags_assigned_unique(self): <NEW_LINE> <INDENT> tag = Tag.objects.create(user=self.user, name='Breakfast') <NEW_LINE> Tag.objects.create(user=self.user, name='Lunch') <NEW_LINE> recipe1 = Recipe.objects.create( title='Pancakes', time_minutes=5, price=3.00, user=self.user ) <NEW_LINE> recipe1.tags.add(tag) <NEW_LINE> recipe2 = Recipe.objects.create( title='Porridge', time_minutes=3, price=2.00, user=self.user ) <NEW_LINE> recipe2.tags.add(tag) <NEW_LINE> res = self.client.get(TAGS_URL, {'assigned_only': 1}) <NEW_LINE> self.assertEqual(len(res.data), 1)
Test the authorized user tags API
6259904196565a6dacd2d8e6
class PurgePathCacheRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Paths = None <NEW_LINE> self.FlushType = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Paths = params.get("Paths") <NEW_LINE> self.FlushType = params.get("FlushType")
PurgePathCache request structure.
6259904163b5f9789fe86423
class InvokerHandler(AbstractMessageDriver): <NEW_LINE> <INDENT> DEFAULT_TIMEOUT = None <NEW_LINE> def __init__(self, channel): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.channel = channel <NEW_LINE> self.invoking_thread = MessageThread(target=self.__dealing_invoker, name='invoking') <NEW_LINE> self.retrieving_thread = MessageThread(target=self.__receive_invoker, name='retrieving') <NEW_LINE> self.isrunning = False <NEW_LINE> <DEDENT> def startup(self): <NEW_LINE> <INDENT> print("MessageHandler startup") <NEW_LINE> self.isrunning = True <NEW_LINE> self.invoking_thread.start() <NEW_LINE> self.retrieving_thread.start() <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> print("MessageHandler shutdown") <NEW_LINE> self.isrunning = False <NEW_LINE> self.channel.close() <NEW_LINE> self.invoking_thread.stopAndWait() <NEW_LINE> self.retrieving_thread.stopAndWait() <NEW_LINE> <DEDENT> def invoke(self, invoker): <NEW_LINE> <INDENT> if not self.isrunning: raise Exception() <NEW_LINE> try: <NEW_LINE> <INDENT> self.invoking_thread.push(invoker, self.DEFAULT_TIMEOUT) <NEW_LINE> <DEDENT> except MessageFullError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> except StopError as e: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def retrieve(self): <NEW_LINE> <INDENT> if not self.isrunning: raise Exception() <NEW_LINE> try: <NEW_LINE> <INDENT> message = self.retrieving_thread.pop(self.DEFAULT_TIMEOUT) <NEW_LINE> return message <NEW_LINE> <DEDENT> except MessageEmptyError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> except StopError as e: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def __dealing_invoker(self): <NEW_LINE> <INDENT> while self.isrunning: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> invoker = self.invoking_thread.pop() <NEW_LINE> print('1111') <NEW_LINE> self.channel.send(invoker.message) <NEW_LINE> print('2222') <NEW_LINE> <DEDENT> except StopError: <NEW_LINE> <INDENT> print('warning: message thread is broken') <NEW_LINE> self.isrunning = False <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> print('stop invoking thread') <NEW_LINE> self.isrunning = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __receive_invoker(self): <NEW_LINE> <INDENT> while self.isrunning: <NEW_LINE> <INDENT> try : <NEW_LINE> <INDENT> print('3333') <NEW_LINE> message = self.channel.recv() <NEW_LINE> self.retrieving_thread.push(message) <NEW_LINE> print('4444') <NEW_LINE> <DEDENT> except StopError: <NEW_LINE> <INDENT> print('stop retrieving thread') <NEW_LINE> self.isrunning = False <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> print('stop retrieving thread') <NEW_LINE> self.isrunning = False <NEW_LINE> break
\ 支持异步的RPC消息处理者。 一方面负责自动将消息从这端发送出去,另一方面负责自动从对端接收RPC消息 \ 内部通过引入两个线程以及对应的消息队列,实现消息的发送和接收并行。
62599041287bf620b6272e9e
class AnnotationAdapter(object): <NEW_LINE> <INDENT> ANNOTATION_KEY = None <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> annotations = IAnnotations(context) <NEW_LINE> self._data = annotations.get(self.ANNOTATION_KEY, None) <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name in ('context', '_data', 'ANNOTATION_KEY'): <NEW_LINE> <INDENT> self.__dict__[name] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self._data is None: <NEW_LINE> <INDENT> self._data = PersistentDict() <NEW_LINE> annotations = IAnnotations(self.context) <NEW_LINE> annotations[self.ANNOTATION_KEY] = self._data <NEW_LINE> <DEDENT> self._data[name] = value <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return self._data and self._data.get(name, None) or None
Abstract Base Class for an annotation storage. If the annotation wasn't set, it won't be created until the first attempt to set a property on this adapter. So, the context doesn't get polluted with annotations by accident.
62599041379a373c97d9a2e1
class BaseAPIView(APIView): <NEW_LINE> <INDENT> def get_serializer_context(self): <NEW_LINE> <INDENT> return { 'request': self.request, 'view': self, } <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> assert self.serializer_class is not None, ( "'%s' should either include a `serializer_class` attribute, " "or override the `get_serializer_class()` method." % self.__class__.__name__) <NEW_LINE> return self.serializer_class <NEW_LINE> <DEDENT> def get_serializer(self, *args, **kwargs): <NEW_LINE> <INDENT> serializer_class = self.get_serializer_class() <NEW_LINE> kwargs['context'] = self.get_serializer_context() <NEW_LINE> return serializer_class(*args, **kwargs)
Base API View
62599041b57a9660fecd2d33
class Z(Group): <NEW_LINE> <INDENT> def __init__(self, order): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.order = order <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return product(range(self.order), [1])
Cyclic group on {0, 1, ..., n-1}.
625990410fa83653e46f6192
class NonBlockingStreamReader: <NEW_LINE> <INDENT> def __init__(self, stream): <NEW_LINE> <INDENT> self._s = stream <NEW_LINE> self._q = Queue() <NEW_LINE> def _populateQueue(stream, queue): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> line = stream.readline() <NEW_LINE> if line: <NEW_LINE> <INDENT> queue.put(line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._t = Thread(target=_populateQueue, args=(self._s, self._q)) <NEW_LINE> self._t.daemon = True <NEW_LINE> self._t.start() <NEW_LINE> <DEDENT> def readline(self, block=False, timeout=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._q.get(block=timeout is not None, timeout=timeout) <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._s.close() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass
A non-blocking stream reader Open a separate thread which reads lines from the stream whenever data becomes available and stores the data in a queue. Based on: http://eyalarubas.com/python-subproc-nonblock.html Keyword arguments: - stream -- The stream to read from
625990418a43f66fc4bf344a
class MemoryError(StandardError): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(S, *more): <NEW_LINE> <INDENT> pass
Out of memory.
6259904123e79379d538d7b8
class AiReviewPoliticalAsrTaskOutput(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Confidence = None <NEW_LINE> self.Suggestion = None <NEW_LINE> self.SegmentSet = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Confidence = params.get("Confidence") <NEW_LINE> self.Suggestion = params.get("Suggestion") <NEW_LINE> if params.get("SegmentSet") is not None: <NEW_LINE> <INDENT> self.SegmentSet = [] <NEW_LINE> for item in params.get("SegmentSet"): <NEW_LINE> <INDENT> obj = MediaContentReviewAsrTextSegmentItem() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.SegmentSet.append(obj)
Asr 文字涉政信息
6259904121bff66bcd723f24
class FadeOutDownTiles(FadeOutUpTiles): <NEW_LINE> <INDENT> def test_func(self, i, j, t): <NEW_LINE> <INDENT> x, y = self.grid * (1 - t) <NEW_LINE> if j == 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return pow(y / j, 6)
Fades out each tile following an downwards path until all the tiles are faded out. Example:: scene.do(FadeOutDownTiles(grid=(16,12), duration=5))
62599041d6c5a102081e33df
class TestMedSig(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_medsig(self): <NEW_LINE> <INDENT> arr = normal(14, 5, 50000) <NEW_LINE> m,s = medsig(arr) <NEW_LINE> npt.assert_almost_equal(m, 14, decimal=1) <NEW_LINE> npt.assert_almost_equal(s, 5, decimal=1)
Test the median and sigma calculation
62599041d4950a0f3b11179d
@LOSSES.register_module() <NEW_LINE> class GaussianFocalLoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, alpha=2.0, gamma=4.0, reduction='mean', loss_weight=1.0): <NEW_LINE> <INDENT> super(GaussianFocalLoss, self).__init__() <NEW_LINE> self.alpha = alpha <NEW_LINE> self.gamma = gamma <NEW_LINE> self.reduction = reduction <NEW_LINE> self.loss_weight = loss_weight <NEW_LINE> <DEDENT> def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None): <NEW_LINE> <INDENT> assert reduction_override in (None, 'none', 'mean', 'sum') <NEW_LINE> reduction = ( reduction_override if reduction_override else self.reduction) <NEW_LINE> loss_reg = self.loss_weight * gaussian_focal_loss( pred, target, weight, alpha=self.alpha, gamma=self.gamma, reduction=reduction, avg_factor=avg_factor) <NEW_LINE> return loss_reg
GaussianFocalLoss is a variant of focal loss. More details can be found in the `paper <https://arxiv.org/abs/1808.01244>`_ Code is modified from `kp_utils.py <https://github.com/princeton-vl/CornerNet/blob/master/models/py_utils/kp_utils.py#L152>`_ # noqa: E501 Please notice that the target in GaussianFocalLoss is a gaussian heatmap, not 0/1 binary target. Args: alpha (float): Power of prediction. gamma (float): Power of target for negtive samples. reduction (str): Options are "none", "mean" and "sum". loss_weight (float): Loss weight of current loss.
62599041c432627299fa425e
class GraphicalButton(Button): <NEW_LINE> <INDENT> def __init__(self, master, filename, command): <NEW_LINE> <INDENT> img = PhotoImage(file=filename) <NEW_LINE> Button.__init__(self, master, image=img, command=command, borderwidth=.001) <NEW_LINE> self.image = img
Creates a graphical button using the filename for the image, and the command as the bound function/method. Filename must include extension.
62599041507cdc57c63a6056
class RegEx(): <NEW_LINE> <INDENT> def __init__(self, needle): <NEW_LINE> <INDENT> self.needle = "%s" % needle <NEW_LINE> <DEDENT> def seekIn(self, haystack): <NEW_LINE> <INDENT> m = re.match(re.compile(self.needle, flags=re.S), "%s" % haystack) <NEW_LINE> if m: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
reg expresion operator
6259904176d4e153a661dbd1
class V8: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._v8 = _V8() <NEW_LINE> <DEDENT> def eval(self, src): <NEW_LINE> <INDENT> if not isinstance(src, basestring): <NEW_LINE> <INDENT> raise TypeError('source code not string') <NEW_LINE> <DEDENT> res = self._v8.eval(src) <NEW_LINE> if res == 'undefined': <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return json.loads(res) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise V8Error(res) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def call(self, func, args): <NEW_LINE> <INDENT> if not isinstance(func, basestring): <NEW_LINE> <INDENT> raise TypeError('function name not string') <NEW_LINE> <DEDENT> if not isinstance(args, list): <NEW_LINE> <INDENT> raise TypeError('arguments not list') <NEW_LINE> <DEDENT> args_str = json.dumps(args) <NEW_LINE> res = self._v8.call(func, args_str) <NEW_LINE> if res == 'undefined': <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return json.loads(res) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise V8Error(res) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def enable_debugger(self, port): <NEW_LINE> <INDENT> if not isinstance(port, int): <NEW_LINE> <INDENT> raise TypeError('port not integer') <NEW_LINE> <DEDENT> if not self._v8.enable_debugger(port): <NEW_LINE> <INDENT> raise V8Error('failed to start debug server') <NEW_LINE> <DEDENT> <DEDENT> def disable_debugger(self): <NEW_LINE> <INDENT> self._v8.disable_debugger()
Represents a V8 instance.
625990413c8af77a43b68899
@runtime_checkable <NEW_LINE> class CompleterFuncWithTokens(Protocol): <NEW_LINE> <INDENT> def __call__( self, text: str, line: str, begidx: int, endidx: int, *, arg_tokens: Dict[str, List[str]] = {}, ) -> List[str]: <NEW_LINE> <INDENT> ...
Function to support tab completion with the provided state of the user prompt and accepts a dictionary of prior arguments.
625990418c3a8732951f7813
class ArticleIteratorArgumentParser(object): <NEW_LINE> <INDENT> def __init__(self, article_iterator, category_fetcher): <NEW_LINE> <INDENT> self.article_iterator = article_iterator <NEW_LINE> self.category_fetcher = category_fetcher <NEW_LINE> <DEDENT> def check_argument(self, argument): <NEW_LINE> <INDENT> if argument.find("-limit:") == 0: <NEW_LINE> <INDENT> self.article_iterator.limit = int(argument[7:]) <NEW_LINE> return True <NEW_LINE> <DEDENT> if argument.find("-limit-per-category:") == 0: <NEW_LINE> <INDENT> self.article_iterator.articles_per_category_limit = int(argument[20:]) <NEW_LINE> return True <NEW_LINE> <DEDENT> elif argument.find("-category:") == 0: <NEW_LINE> <INDENT> category_names = argument[10:].split(",") <NEW_LINE> category_names = [self._format_category(n) for n in category_names] <NEW_LINE> self.article_iterator.categories = self.category_fetcher.get_categories_filtered_by_name(category_names) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _format_category(self, category_name): <NEW_LINE> <INDENT> name = category_name.strip().replace(u"_", u" ") <NEW_LINE> if name.find(u"Kategorie:") == -1 and name.find(u"Category:") == -1: <NEW_LINE> <INDENT> name = u"Kategorie:{}".format(name) <NEW_LINE> <DEDENT> return name
Parse command line arguments -limit: and -category: and set to ArticleIterator
6259904115baa7234946324c
class RubyClasslike(RubyObject): <NEW_LINE> <INDENT> def get_signature_prefix(self, sig): <NEW_LINE> <INDENT> return self.objtype + ' ' <NEW_LINE> <DEDENT> def get_index_text(self, modname, name_cls): <NEW_LINE> <INDENT> if self.objtype == 'class': <NEW_LINE> <INDENT> if not modname: <NEW_LINE> <INDENT> return _('%s (class)') % name_cls[0] <NEW_LINE> <DEDENT> return _('%s (class in %s)') % (name_cls[0], modname) <NEW_LINE> <DEDENT> elif self.objtype == 'exception': <NEW_LINE> <INDENT> return name_cls[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def before_content(self): <NEW_LINE> <INDENT> RubyObject.before_content(self) <NEW_LINE> if self.names: <NEW_LINE> <INDENT> self.env.temp_data['rb:class'] = self.names[0][0] <NEW_LINE> self.clsname_set = True
Description of a class-like object (classes, exceptions).
625990418a43f66fc4bf344c
class StateManager(object): <NEW_LINE> <INDENT> GAME = 0 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.__states = Stack() <NEW_LINE> self.current_state = StateManager.GAME <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self.__states.peek() <NEW_LINE> <DEDENT> def set_state(self, state): <NEW_LINE> <INDENT> self.__states.pop() <NEW_LINE> self.__states.push(state) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.__states.peek().update() <NEW_LINE> <DEDENT> def render(self, screen): <NEW_LINE> <INDENT> self.__states.peek().render(screen) <NEW_LINE> <DEDENT> def key_pressed(self, event, keys): <NEW_LINE> <INDENT> self.__states.peek().key_pressed(event, keys)
This controller manage all game states.
62599041e64d504609df9d2e
class BlobLinguaFixture(PloneTestCaseFixture): <NEW_LINE> <INDENT> defaultBases = (PTC_FIXTURE, ) <NEW_LINE> def setUpZope(self, app, configurationContext): <NEW_LINE> <INDENT> from plone.app import imaging <NEW_LINE> self.loadZCML(package=imaging) <NEW_LINE> from plone.app.blob import tests <NEW_LINE> self.loadZCML(name='testing.zcml', package=tests) <NEW_LINE> from Products import LinguaPlone <NEW_LINE> self.loadZCML(package=LinguaPlone) <NEW_LINE> z2.installProduct(app, 'plone.app.blob') <NEW_LINE> z2.installProduct(app, 'Products.LinguaPlone') <NEW_LINE> <DEDENT> def setUpPloneSite(self, portal): <NEW_LINE> <INDENT> profile = 'plone.app.blob:testing-lingua' <NEW_LINE> self.applyProfile(portal, profile, purge_old=False) <NEW_LINE> types = getToolByName(portal, 'portal_types') <NEW_LINE> assert types.getTypeInfo('BlobelFish') <NEW_LINE> <DEDENT> def tearDownZope(self, app): <NEW_LINE> <INDENT> z2.uninstallProduct(app, 'plone.app.blob') <NEW_LINE> z2.uninstallProduct(app, 'Products.LinguaPlone')
layer for integration tests with LinguaPlone
6259904123e79379d538d7ba
class PFJet(object): <NEW_LINE> <INDENT> def __init__(self, jets, index): <NEW_LINE> <INDENT> read_attributes = [ 'etCorr', 'muMult', 'eta', 'phi', 'nhef', 'pef', 'mef', 'chMult', 'elMult', 'nhMult', 'phMult', 'chef', 'eef', 'nemef', 'cMult', 'nMult', 'cemef' ] <NEW_LINE> for attr in read_attributes: <NEW_LINE> <INDENT> setattr(self, attr, getattr(jets, attr)[index])
Create a simple python wrapper for L1Analysis::L1AnalysisRecoJetDataFormat
625990418a349b6b43687503
class ModelMultiValueField(BaseModelMulti): <NEW_LINE> <INDENT> widget_class = ModelMultiValueWidget
A field class which provides a sub-form for a model.
6259904110dbd63aa1c71e93
class Peek(Subconstruct): <NEW_LINE> <INDENT> __slots__ = ["perform_build"] <NEW_LINE> def __init__(self, subcon, perform_build = False): <NEW_LINE> <INDENT> Subconstruct.__init__(self, subcon) <NEW_LINE> self.perform_build = perform_build <NEW_LINE> <DEDENT> def _parse(self, stream, context): <NEW_LINE> <INDENT> pos = stream.tell() <NEW_LINE> try: <NEW_LINE> <INDENT> return self.subcon._parse(stream, context) <NEW_LINE> <DEDENT> except FieldError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> stream.seek(pos) <NEW_LINE> <DEDENT> <DEDENT> def _build(self, obj, stream, context): <NEW_LINE> <INDENT> if self.perform_build: <NEW_LINE> <INDENT> self.subcon._build(obj, stream, context) <NEW_LINE> <DEDENT> <DEDENT> def _sizeof(self, context): <NEW_LINE> <INDENT> return 0
Peeks at the stream: parses without changing the stream position. See also Union. If the end of the stream is reached when peeking, returns None. .. note:: Requires a seekable stream. :param subcon: the subcon to peek at :param perform_build: whether or not to perform building. by default this parameter is set to False, meaning building is a no-op. Example:: Peek(UBInt8("foo"))
62599041b5575c28eb713627
class RandomNoiseTile(FetchedTile): <NEW_LINE> <INDENT> @property <NEW_LINE> def shape(self) -> Mapping[Axes, int]: <NEW_LINE> <INDENT> return {Axes.Y: 1536, Axes.X: 1024} <NEW_LINE> <DEDENT> @property <NEW_LINE> def coordinates(self) -> Mapping[Union[str, Coordinates], CoordinateValue]: <NEW_LINE> <INDENT> return { Coordinates.X: (0.0, 0.0001), Coordinates.Y: (0.0, 0.0001), Coordinates.Z: (0.0, 0.0001), } <NEW_LINE> <DEDENT> @property <NEW_LINE> def format(self) -> ImageFormat: <NEW_LINE> <INDENT> return ImageFormat.TIFF <NEW_LINE> <DEDENT> def tile_data(self) -> np.ndarray: <NEW_LINE> <INDENT> return np.random.randint( 0, 256, size=(self.shape[Axes.Y], self.shape[Axes.X]), dtype=np.uint8)
This is a simple implementation of :class:`.FetchedImage` that simply regenerates random data for the image.
6259904176d4e153a661dbd2
class ToolEnableAllNavigation(ToolBase): <NEW_LINE> <INDENT> description = 'Enable all axes toolmanager' <NEW_LINE> default_keymap = rcParams['keymap.all_axes'] <NEW_LINE> def trigger(self, sender, event, data=None): <NEW_LINE> <INDENT> if event.inaxes is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for a in self.figure.get_axes(): <NEW_LINE> <INDENT> if (event.x is not None and event.y is not None and a.in_axes(event)): <NEW_LINE> <INDENT> a.set_navigate(True)
Tool to enable all axes for toolmanager interaction
6259904123849d37ff852376
class AsyncRequestError(Exception): <NEW_LINE> <INDENT> def __init__(self, raised='', message='', status_code=0, request=None): <NEW_LINE> <INDENT> self.raised = raised <NEW_LINE> self.message = message <NEW_LINE> self.status_code = status_code <NEW_LINE> self.request = request <NEW_LINE> super().__init__(f"raised={self.raised} message={self.message} " f"request={self.request} status_code={self.status_code}")
A wrapper of all possible exception during a HTTP request
62599041b57a9660fecd2d37
class TestIosXrPluginPrompts(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._conn = Connection( hostname='Router', start=['mock_device_cli --os iosxr --state enable'], os='iosxr', ) <NEW_LINE> self._conn.connect() <NEW_LINE> <DEDENT> def test_confirm(self): <NEW_LINE> <INDENT> self._conn.execute("process restart ifmgr location all") <NEW_LINE> <DEDENT> def test_confirm_y(self): <NEW_LINE> <INDENT> self._conn.execute("clear logging") <NEW_LINE> <DEDENT> def test_y_on_repeat_confirm(self): <NEW_LINE> <INDENT> self._conn.execute("clear logg")
Tests for prompt handling.
625990418e71fb1e983bcd8a
class LongIronButterfly(_Butterfly): <NEW_LINE> <INDENT> def __init__( self, St=None, K1=None, K2=None, K3=None, price1=None, price2=None, price3=None, ): <NEW_LINE> <INDENT> super().__init__( St=St, K1=K1, K2=K2, K3=K3, price1=price1, price2=price2, price3=price3, ) <NEW_LINE> self.add_option(K=K1, price=price1, St=St, kind="put", pos="short") <NEW_LINE> self.add_option(K=K2, price=price2, St=St, kind="put", pos="long") <NEW_LINE> self.add_option(K=K2, price=price2, St=St, kind="call", pos="long") <NEW_LINE> self.add_option(K=K3, price=price3, St=St, kind="call", pos="short")
Combination of 2 puts and 2 calls. Long volatility exposure. - Short K1 (put) - Long 2x K2 (1 put, 1 call) - Short K3 (call)
62599041e76e3b2f99fd9cc7
class One: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__()
One.
6259904130dc7b76659a0aee
class Config(object): <NEW_LINE> <INDENT> __config_dict = {} <NEW_LINE> __config_file = '' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> file_path = Args() <NEW_LINE> self.__config_file = file_path.get_file_path['-c'] <NEW_LINE> <DEDENT> def __get_config(self): <NEW_LINE> <INDENT> with open(self.__config_file) as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__config_dict[line.replace(' ', '').split('=')[0]] = float('%.3f' % float(line.replace(' ', '').strip('\n').split('=')[1])) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('Config file error!') <NEW_LINE> sys.exit(103) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.__config_dict <NEW_LINE> <DEDENT> @property <NEW_LINE> def get_config(self): <NEW_LINE> <INDENT> return self.__get_config()
获取配置文件信息类 1.读取配置文件信息 2.判断配置文件格式 3.返回配置信息字典
62599041b830903b9686edd8
class MAVLink_attitude_quaternion_cov_message(MAVLink_message): <NEW_LINE> <INDENT> id = MAVLINK_MSG_ID_ATTITUDE_QUATERNION_COV <NEW_LINE> name = 'ATTITUDE_QUATERNION_COV' <NEW_LINE> fieldnames = ['time_boot_ms', 'q', 'rollspeed', 'pitchspeed', 'yawspeed', 'covariance'] <NEW_LINE> ordered_fieldnames = [ 'time_boot_ms', 'q', 'rollspeed', 'pitchspeed', 'yawspeed', 'covariance' ] <NEW_LINE> format = '<I4ffff9f' <NEW_LINE> native_format = bytearray('<Ifffff', 'ascii') <NEW_LINE> orders = [0, 1, 2, 3, 4, 5] <NEW_LINE> lengths = [1, 4, 1, 1, 1, 9] <NEW_LINE> array_lengths = [0, 4, 0, 0, 0, 9] <NEW_LINE> crc_extra = 153 <NEW_LINE> def __init__(self, time_boot_ms, q, rollspeed, pitchspeed, yawspeed, covariance): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLink_attitude_quaternion_cov_message.id, MAVLink_attitude_quaternion_cov_message.name) <NEW_LINE> self._fieldnames = MAVLink_attitude_quaternion_cov_message.fieldnames <NEW_LINE> self.time_boot_ms = time_boot_ms <NEW_LINE> self.q = q <NEW_LINE> self.rollspeed = rollspeed <NEW_LINE> self.pitchspeed = pitchspeed <NEW_LINE> self.yawspeed = yawspeed <NEW_LINE> self.covariance = covariance <NEW_LINE> <DEDENT> def pack(self, mav, force_mavlink1=False): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 153, struct.pack('<I4ffff9f', self.time_boot_ms, self.q[0], self.q[1], self.q[2], self.q[3], self.rollspeed, self.pitchspeed, self.yawspeed, self.covariance[0], self.covariance[1], self.covariance[2], self.covariance[3], self.covariance[4], self.covariance[5], self.covariance[6], self.covariance[7], self.covariance[8]), force_mavlink1=force_mavlink1)
The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right), expressed as quaternion. Quaternion order is w, x, y, z and a zero rotation would be expressed as (1 0 0 0).
62599041596a897236128f0d
class SerpNGRedirectionMiddleware(object): <NEW_LINE> <INDENT> def process_exception(self, request, exception): <NEW_LINE> <INDENT> if isinstance(exception, serpng.lib.exceptions.HttpRedirect): <NEW_LINE> <INDENT> is_permanent = isinstance(exception, serpng.lib.exceptions.Http301) <NEW_LINE> response = redirect(exception.location, permanent=True) <NEW_LINE> if 'Set-Cookie' in exception.headers: <NEW_LINE> <INDENT> response['Set-Cookie'] = exception.headers['Set-Cookie'] <NEW_LINE> serpng.lib.logging_utils.log( log_level="DEBUG", log_msg="Issuing a %s redirect to %s with cookies %s" % ("301" if is_permanent else "302", exception.location, exception.headers['Set-Cookie'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> serpng.lib.logging_utils.log( log_level="DEBUG", log_msg="Issuing a %s redirect to %s" % ("301" if is_permanent else "302", exception.location)) <NEW_LINE> <DEDENT> return response
Middleware class to handle exceptions that signal that a redirection should take place.
62599041be383301e0254ad5
class Decrypter(object): <NEW_LINE> <INDENT> def __init__(self, privkeyFile): <NEW_LINE> <INDENT> self._privkeyFile = privkeyFile <NEW_LINE> self._backend = default_backend() <NEW_LINE> <DEDENT> def decrypt(self, data): <NEW_LINE> <INDENT> if not isinstance(data, bytes): <NEW_LINE> <INDENT> raise TypeError("encoded data must be in bytes") <NEW_LINE> <DEDENT> privkey = serialization.load_pem_private_key(open(self._privkeyFile, "rb").read(), None, self._backend) <NEW_LINE> key_size = (privkey.key_size + 7) // 8 <NEW_LINE> encrypted_key, data = data[:key_size], data[key_size:] <NEW_LINE> key = privkey.decrypt(encrypted_key, _get_paddings()) <NEW_LINE> iv, authentication_tag, ciphertext = data[:_IV_LENGTH], data[_IV_LENGTH:_IV_LENGTH + _AUTH_TAG_LENGTH], data[_IV_LENGTH + _AUTH_TAG_LENGTH:] <NEW_LINE> decryptor = _get_cipher(key, iv, self._backend, authentication_tag).decryptor() <NEW_LINE> return decryptor.update(ciphertext) + decryptor.finalize()
Decrypter decrypts
6259904121bff66bcd723f28
class FloatingIP(model_base.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasTenant): <NEW_LINE> <INDENT> floating_ip_address = sa.Column(sa.String(64), nullable=False) <NEW_LINE> floating_network_id = sa.Column(sa.String(36), nullable=False) <NEW_LINE> floating_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete="CASCADE"), nullable=False) <NEW_LINE> port = orm.relationship(models_v2.Port, backref=orm.backref('floating_ips', cascade='all,delete-orphan'), foreign_keys='FloatingIP.floating_port_id') <NEW_LINE> fixed_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id')) <NEW_LINE> fixed_ip_address = sa.Column(sa.String(64)) <NEW_LINE> router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id')) <NEW_LINE> last_known_router_id = sa.Column(sa.String(36)) <NEW_LINE> status = sa.Column(sa.String(16)) <NEW_LINE> router = orm.relationship(Router, backref='floating_ips')
Represents a floating IP address. This IP address may or may not be allocated to a tenant, and may or may not be associated with an internal port/ip address/router.
6259904130c21e258be99ac7
class BackupError(IOError): <NEW_LINE> <INDENT> pass
Base backup exception.
62599041d99f1b3c44d0695a
class ExtensionElement(externals.atom.core.XmlElement): <NEW_LINE> <INDENT> def __init__(self, tag=None, namespace=None, attributes=None, children=None, text=None, *args, **kwargs): <NEW_LINE> <INDENT> if namespace: <NEW_LINE> <INDENT> self._qname = '{%s}%s' % (namespace, tag) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._qname = tag <NEW_LINE> <DEDENT> self.children = children or [] <NEW_LINE> self.attributes = attributes or {} <NEW_LINE> self.text = text <NEW_LINE> <DEDENT> _BecomeChildElement = externals.atom.core.XmlElement._become_child
Provided for backwards compatibility to the v1 atom.ExtensionElement.
62599041dc8b845886d54875
class Sentry(Sentry): <NEW_LINE> <INDENT> def __init__(self, application): <NEW_LINE> <INDENT> self.application = application <NEW_LINE> <DEDENT> @property <NEW_LINE> def client(self): <NEW_LINE> <INDENT> from raven_django.models import client <NEW_LINE> return client
Identical to the default WSGI middleware except that the client comes dynamically via ``get_client >>> from raven_django.middleware.wsgi import Sentry >>> application = Sentry(application)
62599041d10714528d69efeb
class EtlPipeline(luigi.WrapperTask): <NEW_LINE> <INDENT> year_month = luigi.Parameter() <NEW_LINE> def requires(self): <NEW_LINE> <INDENT> yield IngestPipeline(self.year_month) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> yield ETL(year_month=self.year_month)
Este wrapper ejecuta el ETL de cada pipeline-task Input - lista con los pipeline-tasks especificados a correr.
6259904115baa72349463250
class TestAddConnectionConfiguration(base.NectarTests): <NEW_LINE> <INDENT> def test_defaults(self): <NEW_LINE> <INDENT> config = DownloaderConfig('https') <NEW_LINE> curl_downloader = HTTPSCurlDownloader(config) <NEW_LINE> easy_handle = mock.MagicMock() <NEW_LINE> curl_downloader._add_connection_configuration(easy_handle) <NEW_LINE> self.assertEqual(easy_handle.setopt.call_count, 6) <NEW_LINE> easy_handle.setopt.assert_any_call(pycurl.FOLLOWLOCATION, DEFAULT_FOLLOW_LOCATION) <NEW_LINE> easy_handle.setopt.assert_any_call(pycurl.MAXREDIRS, DEFAULT_MAX_REDIRECTS) <NEW_LINE> easy_handle.setopt.assert_any_call(pycurl.CONNECTTIMEOUT, DEFAULT_CONNECT_TIMEOUT) <NEW_LINE> easy_handle.setopt.assert_any_call(pycurl.LOW_SPEED_LIMIT, DEFAULT_LOW_SPEED_LIMIT) <NEW_LINE> easy_handle.setopt.assert_any_call(pycurl.LOW_SPEED_TIME, DEFAULT_LOW_SPEED_TIME) <NEW_LINE> easy_handle.setopt.assert_any_call(pycurl.NOPROGRESS, DEFAULT_NO_PROGRESS)
This test module tests the HTTPSCurlDownloadBackend._add_connection_configuration method. It asserts that all the appropriate default values are passed to pycurl, no more and no less. It uses Mocks to make these assertions, and we will trust that the features in pycurl are tested by that project.
6259904145492302aabfd799
class ObjectFilterList(filter_interface.FilterObject): <NEW_LINE> <INDENT> def CompileFilter(self, filter_string): <NEW_LINE> <INDENT> if not os.path.isfile(filter_string): <NEW_LINE> <INDENT> raise errors.WrongPlugin(( 'ObjectFilterList requires an YAML file to be passed on, this filter ' 'string is not a file.')) <NEW_LINE> <DEDENT> yaml.add_constructor('!include', IncludeKeyword, Loader=yaml.loader.SafeLoader) <NEW_LINE> results = None <NEW_LINE> with open(filter_string, 'rb') as fh: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> results = yaml.safe_load(fh) <NEW_LINE> <DEDENT> except (yaml.scanner.ScannerError, IOError) as exception: <NEW_LINE> <INDENT> raise errors.WrongPlugin( u'Unable to parse YAML file with error: {0:s}.'.format(exception)) <NEW_LINE> <DEDENT> <DEDENT> self.filters = [] <NEW_LINE> results_type = type(results) <NEW_LINE> if results_type is dict: <NEW_LINE> <INDENT> self._ParseEntry(results) <NEW_LINE> <DEDENT> elif results_type is list: <NEW_LINE> <INDENT> for result in results: <NEW_LINE> <INDENT> if type(result) is not dict: <NEW_LINE> <INDENT> raise errors.WrongPlugin( u'Wrong format of YAML file, entry not a dict ({})'.format( results_type)) <NEW_LINE> <DEDENT> self._ParseEntry(result) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise errors.WrongPlugin( u'Wrong format of YAML file, entry not a dict ({})'.format( results_type)) <NEW_LINE> <DEDENT> self._filter_expression = filter_string <NEW_LINE> <DEDENT> def _ParseEntry(self, entry): <NEW_LINE> <INDENT> for name, meta in entry.items(): <NEW_LINE> <INDENT> if 'filter' not in meta: <NEW_LINE> <INDENT> raise errors.WrongPlugin( u'Entry inside {} does not contain a filter statement.'.format( name)) <NEW_LINE> <DEDENT> matcher = pfilter.GetMatcher(meta.get('filter'), True) <NEW_LINE> if not matcher: <NEW_LINE> <INDENT> raise errors.WrongPlugin( u'Filter entry [{0:s}] malformed for rule: <{1:s}>'.format( meta.get('filter'), name)) <NEW_LINE> <DEDENT> self.filters.append((name, matcher, meta)) <NEW_LINE> <DEDENT> <DEDENT> def Match(self, event_object): <NEW_LINE> <INDENT> if not self.filters: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for name, matcher, meta in self.filters: <NEW_LINE> <INDENT> self._decision = matcher.Matches(event_object) <NEW_LINE> if self._decision: <NEW_LINE> <INDENT> self._reason = u'[{}] {} {}'.format( name, meta.get('description', 'N/A'), u' - '.join( meta.get('urls', []))) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return
A series of Pfilter filters along with metadata.
625990410fa83653e46f6198
class Stc(X86InstructionBase): <NEW_LINE> <INDENT> def __init__(self, prefix, mnemonic, operands, architecture_mode): <NEW_LINE> <INDENT> super(Stc, self).__init__(prefix, mnemonic, operands, architecture_mode) <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_operands(self): <NEW_LINE> <INDENT> return [ ] <NEW_LINE> <DEDENT> @property <NEW_LINE> def destination_operands(self): <NEW_LINE> <INDENT> return [ ]
Representation of Stc x86 instruction.
62599041004d5f362081f944
class GetHotspotTests(TestCase, HeaderTestsMixin): <NEW_LINE> <INDENT> def get_callable(self): <NEW_LINE> <INDENT> return get_hotspot <NEW_LINE> <DEDENT> def get_params(self, **kwargs): <NEW_LINE> <INDENT> params = { "token": "12345", "loc_id": "L123456", } <NEW_LINE> params.update(kwargs) <NEW_LINE> return params <NEW_LINE> <DEDENT> def test_url_contains_location_code(self): <NEW_LINE> <INDENT> url = self.api_call()[0] <NEW_LINE> self.assertEqual(HOTSPOT_INFO_URL % "L123456", url) <NEW_LINE> <DEDENT> def test_invalid_location_code_raises_error(self): <NEW_LINE> <INDENT> self.api_raises(ValueError, loc_id="123456")
Tests for the get_hotspot() API call.
625990413eb6a72ae038b920
class NflListView(ListView): <NEW_LINE> <INDENT> paginate_by = 100 <NEW_LINE> model = nfl_player
Renders a list of all players in database
62599041596a897236128f0e
class time: <NEW_LINE> <INDENT> def _assign_pointintime(self, day): <NEW_LINE> <INDENT> self.day = day <NEW_LINE> self.bdate = self.quote.date[0] + dt.timedelta(days=self.day) <NEW_LINE> self.quote = self.quote[self.quote.date < self.bdate] <NEW_LINE> if self.bdate.month<4: <NEW_LINE> <INDENT> _max_keyratio_year = self.bdate.year - 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _max_keyratio_year = self.bdate.year - 1 <NEW_LINE> <DEDENT> self.keyratios = self.keyratios[self.keyratios['year'] < _max_keyratio_year]
Make the analysis for today a special case of backtesting
6259904176d4e153a661dbd4
class NonDjangoLanguageTests(SimpleTestCase): <NEW_LINE> <INDENT> @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), ('xxx', 'Somelanguage'), ], LANGUAGE_CODE='xxx', LOCALE_PATHS=[os.path.join(here, 'commands', 'locale')], ) <NEW_LINE> def test_non_django_language(self): <NEW_LINE> <INDENT> self.assertEqual(get_language(), 'xxx') <NEW_LINE> self.assertEqual(gettext("year"), "reay") <NEW_LINE> <DEDENT> @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), ('xyz', 'XYZ'), ], ) <NEW_LINE> @translation.override('xyz') <NEW_LINE> def test_plural_non_django_language(self): <NEW_LINE> <INDENT> self.assertEqual(get_language(), 'xyz') <NEW_LINE> self.assertEqual(ngettext('year', 'years', 2), 'years')
A language non present in default Django languages can still be installed/used by a Django project.
625990413c8af77a43b6889c
class DuplicatePluginError(QwcoreError): <NEW_LINE> <INDENT> pass
Raised when a specific name has multiple plugins
625990414e696a045264e781
class NoSuchField(Exception): <NEW_LINE> <INDENT> def __init__(self, uuid): <NEW_LINE> <INDENT> super(NoSuchField, self).__init__( "No such field: %s" % uuid) <NEW_LINE> self.uuid = uuid
Raised when the field doesn't exist for the service.
625990411d351010ab8f4dde
@dataclass <NEW_LINE> class Property: <NEW_LINE> <INDENT> name: str <NEW_LINE> values: list
Property struct with name and values
6259904123e79379d538d7be
class SubscriptionDetailGet(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.obj = Subscription.objects.create(name='Henrique Bastos', cpf='12345678901', email='[email protected]', phone='21-996186180') <NEW_LINE> self.resp = self.client.get(r('subscriptions:detail', self.obj.pk)) <NEW_LINE> <DEDENT> def test_get(self): <NEW_LINE> <INDENT> self.assertEqual(200, self.resp.status_code) <NEW_LINE> <DEDENT> def test_template(self): <NEW_LINE> <INDENT> self.assertTemplateUsed(self.resp, 'subscriptions/subscription_detail.html') <NEW_LINE> <DEDENT> def test_content(self): <NEW_LINE> <INDENT> subscription = self.resp.context['subscription'] <NEW_LINE> self.assertIsInstance(subscription, Subscription) <NEW_LINE> <DEDENT> def test_html(self): <NEW_LINE> <INDENT> contents = (self.obj.name, self.obj.cpf, self.obj.email, self.obj.phone) <NEW_LINE> with self.subTest(): <NEW_LINE> <INDENT> for expected in contents: <NEW_LINE> <INDENT> self.assertContains(self.resp, expected)
docstring for SubscriptionDetailGet.
62599041287bf620b6272ea6
class Client(Grid5000): <NEW_LINE> <INDENT> def __init__(self, excluded_sites=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.excluded_site = excluded_sites <NEW_LINE> if excluded_sites is None: <NEW_LINE> <INDENT> self.excluded_site = []
Wrapper of the python-grid5000 client. It accepts extra parameters to be set in the configuration file.
6259904126068e7796d4dc07
class PdLambdaPipe(pemi.Pipe): <NEW_LINE> <INDENT> def __init__(self, fun): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.fun = fun <NEW_LINE> self.source( pemi.PdDataSubject, name='main' ) <NEW_LINE> self.target( pemi.PdDataSubject, name='main' ) <NEW_LINE> <DEDENT> def flow(self): <NEW_LINE> <INDENT> self.targets['main'].df = self.fun(self.sources['main'].df)
This pipe is used to build quick Pandas transformations where building a full pipe class may feel like overkill. You would use this pipe if you don't need to test it in isolation (e.g., it only makes sense in a larger context), or you don't need control over the schemas. Args: fun (function): A function that accepts a dataframe as argument (source) and returns a dataframe (target). :Data Sources: **main** (*pemi.PdDataSubject*) - The source dataframe that gets pass to ``fun``. :Data Targets: **main** (*pemi.PdDataSubject*) - The target dataframe that gets populated from the return value of ``fun``.
62599041004d5f362081f945
class EnvironmentTool(Tool): <NEW_LINE> <INDENT> def __init__(self, configuration): <NEW_LINE> <INDENT> Tool.__init__(self, configuration) <NEW_LINE> self.vars = {} <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.vars[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.vars[key] = value <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self.vars[key] <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return key in self.vars <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> return self.vars.get(key, default) <NEW_LINE> <DEDENT> def setDefault(self, key, default=None): <NEW_LINE> <INDENT> return self.vars.setdefault(key, default) <NEW_LINE> <DEDENT> def update(self, *values, **kwargs): <NEW_LINE> <INDENT> self.vars.update(*values, **kwargs) <NEW_LINE> <DEDENT> def expand(self, value): <NEW_LINE> <INDENT> return cake.path.expandVars(value, self.vars) <NEW_LINE> <DEDENT> def append(self, **kwargs): <NEW_LINE> <INDENT> for k, v in kwargs.iteritems(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> old = self.vars[k] <NEW_LINE> if type(old) != type(v): <NEW_LINE> <INDENT> old = _coerceToList(old) <NEW_LINE> v = _coerceToList(v) <NEW_LINE> <DEDENT> self.vars[k] = old + v <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.vars[k] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def prepend(self, **kwargs): <NEW_LINE> <INDENT> for k, v in kwargs.iteritems(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> old = self.vars[k] <NEW_LINE> if type(old) != type(v): <NEW_LINE> <INDENT> old = _coerceToList(old) <NEW_LINE> v = _coerceToList(v) <NEW_LINE> <DEDENT> self.vars[k] = v + old <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.vars[k] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def replace(self, **kwargs): <NEW_LINE> <INDENT> self.vars.update(kwargs)
Tool that provides a dictionary of key/value pairs used for path substitution.
6259904173bcbd0ca4bcb54b
class Data68: <NEW_LINE> <INDENT> hdr_dtype = np.dtype([('PingCounter','H'),('SystemSerial#','H'), ('VesselHeading',"f"),('SoundSpeed',"f"),('TransducerDepth',"f"), ('MaximumBeams','B'),('ValidBeams','B'),('Zresolution',"f"), ('XYresolution',"f"),('SampleRate','f')]) <NEW_LINE> xyz_dtype = np.dtype([('Depth',"f"),('AcrossTrack',"f"),('AlongTrack',"f"), ('BeamDepressionAngle',"f"),('BeamAzimuthAngle',"f"), ('OneWayRange',"f"),('QualityFactor','B'), ('DetectionWindowLength',"f"),('Reflectivity',"f"),('BeamNumber','B')]) <NEW_LINE> def __init__(self,datablock, model, byteswap = False): <NEW_LINE> <INDENT> hdr_dtype = np.dtype([('PingCounter','H'),('SystemSerial#','H'), ('VesselHeading',"H"),('SoundSpeed',"H"),('TransducerDepth',"H"), ('MaximumBeams','B'),('ValidBeams','B'),('Zresolution',"B"), ('XYresolution',"B"),('SampleRate','H')]) <NEW_LINE> hdr_sz = hdr_dtype.itemsize <NEW_LINE> self._model = model <NEW_LINE> self.header = np.frombuffer(datablock[:hdr_sz], dtype = hdr_dtype)[0] <NEW_LINE> self.header = self.header.astype(Data68.hdr_dtype) <NEW_LINE> self.header[2] *= 0.01 <NEW_LINE> self.header[3] *= 0.1 <NEW_LINE> self.header[4] *= 0.01 <NEW_LINE> self.header[7] *= 0.01 <NEW_LINE> self.header[8] *= 0.01 <NEW_LINE> self.header[-1] *= 4 <NEW_LINE> self.depthoffsetmultiplier = np.frombuffer(datablock[-1:], dtype = 'b')[0] * 65536 <NEW_LINE> self.header[4] += self.depthoffsetmultiplier <NEW_LINE> self.read(datablock[hdr_sz:-1]) <NEW_LINE> <DEDENT> def read(self, datablock): <NEW_LINE> <INDENT> decode_depth = "h" <NEW_LINE> if self._model == 300 or self._model == 120: <NEW_LINE> <INDENT> decode_depth = "H" <NEW_LINE> <DEDENT> xyz_dtype = np.dtype([('Depth',decode_depth),('AcrossTrack',"h"),('AlongTrack',"h"), ('BeamDepressionAngle',"h"),('BeamAzimuthAngle',"H"), ('OneWayRange',"H"),('QualityFactor','B'), ('DetectionWindowLength',"B"),('Reflectivity',"b"),('BeamNumber','B')]) <NEW_LINE> self.data = np.frombuffer(datablock, dtype = xyz_dtype) <NEW_LINE> self.data = self.data.astype(Data68.xyz_dtype) <NEW_LINE> self.data['Depth'] *= self.header['Zresolution'] <NEW_LINE> self.data['AcrossTrack'] *= self.header['XYresolution'] <NEW_LINE> self.data['AlongTrack'] *= self.header['XYresolution'] <NEW_LINE> self.data['BeamDepressionAngle'] *= 0.01 <NEW_LINE> self.data['BeamAzimuthAngle'] *= 0.01 <NEW_LINE> self.data['OneWayRange'] /= self.header['SampleRate'] <NEW_LINE> self.data['Reflectivity'] *= 0.5 <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> for n,name in enumerate(self.header.dtype.names): <NEW_LINE> <INDENT> print(name + ' : ' + str(self.header[n])) <NEW_LINE> <DEDENT> print('TransducerDepthOffsetMultiplier : ' + str(self.depthoffsetmultiplier))
XYZ datagram 044h / 68d / 'D'. All values are converted to meters, degrees, or whole units. The header sample rate may not be correct, but is multiplied by 4 to make the one way travel time per beam appear correct. The detection window length per beam is in its raw form...
625990413eb6a72ae038b922
class User_Validator(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def validate(user): <NEW_LINE> <INDENT> error_msg = "" <NEW_LINE> if(len(user.get_nr_telefon())!=10): <NEW_LINE> <INDENT> error_msg += "Nr. tel. invalid.\n" <NEW_LINE> <DEDENT> if (user.get_tip()!="client" and user.get_tip()!="admin"): <NEW_LINE> <INDENT> error_msg += "Tip user invalid.\n" <NEW_LINE> <DEDENT> if len(error_msg) > 0: <NEW_LINE> <INDENT> raise ValueError(error_msg)
Validator pentru clasa user Trebuie sa verific: - lungime nr tel sa fie 10 - tipul sa fie admin sau client
62599041a79ad1619776b340
class CreatePostForm(ModelFormWithUser): <NEW_LINE> <INDENT> def clean_blog(self): <NEW_LINE> <INDENT> blog = self.cleaned_data.get('blog', None) <NEW_LINE> if not blog.check_user(self.user): <NEW_LINE> <INDENT> raise forms.ValidationError(_('You not in this blog!')) <NEW_LINE> <DEDENT> return blog <NEW_LINE> <DEDENT> def clean_addition(self): <NEW_LINE> <INDENT> addition = self.cleaned_data.get('addition', None) <NEW_LINE> if self.cleaned_data.get('type') in (Post.TYPE_LINK, Post.TYPE_TRANSLATE) and not addition: <NEW_LINE> <INDENT> raise forms.ValidationError(_('This post type require addition!')) <NEW_LINE> <DEDENT> return addition <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> inst = self.instance <NEW_LINE> inst.author = self.user <NEW_LINE> inst.preview, inst.text = utils.cut(inst.text) <NEW_LINE> inst.preview = utils.parse(inst.preview, settings.VALID_TAGS, settings.VALID_ATTRS) <NEW_LINE> inst.text = utils.parse(inst.text, settings.VALID_TAGS, settings.VALID_ATTRS) <NEW_LINE> inst = super(CreatePostForm, self).save(commit) <NEW_LINE> Tag.objects.update_tags(inst, inst.raw_tags) <NEW_LINE> inst.create_comment_root() <NEW_LINE> for mention in utils.find_mentions(inst.text): <NEW_LINE> <INDENT> Notify.new_mention_notify(mention, post=inst) <NEW_LINE> <DEDENT> if settings.PUBSUB: <NEW_LINE> <INDENT> ping_hub(settings.FEED_URL, hub_url=settings.PUSH_HUB) <NEW_LINE> <DEDENT> return inst <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Post <NEW_LINE> fields = ( 'type', 'blog', 'addition', 'title', 'text', 'raw_tags', )
Create new post form
6259904123e79379d538d7bf
class IncompatibleFormatException(Exception): <NEW_LINE> <INDENT> def __init__(self, format_name, reason): <NEW_LINE> <INDENT> self.format_name = format_name <NEW_LINE> message = "Format {} is incompatible with the given settings, {}" .format(format_name, reason) <NEW_LINE> super(IncompatibleAlgorithmException, self).__init__(message)
Raised when the selected format and settings are incompatible :param str algorithm_name: :param str reason: why the algorithm is incompatible, optional
62599041c432627299fa4262
class CatPropDialog(wx.Dialog): <NEW_LINE> <INDENT> __category = None <NEW_LINE> def __init__(self, parent, hdd, category = None): <NEW_LINE> <INDENT> if category: <NEW_LINE> <INDENT> self.__category = category <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__category = Category("", "", hdd) <NEW_LINE> <DEDENT> wx.Dialog.__init__(self, parent, wx.ID_ANY, "Category Editor") <NEW_LINE> self.szrBaseVert = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> self.szrName = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> self.szrPath = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> self.lblName = wx.StaticText(self, label = "Name:") <NEW_LINE> self.txtName = wx.TextCtrl(self, value=self.__category.GetName()) <NEW_LINE> self.dirPath = wx.DirPickerCtrl(self) <NEW_LINE> self.dirPath.SetPath(self.__category.GetFullPath()) <NEW_LINE> self.szrName.Add(self.lblName, 0, wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5) <NEW_LINE> self.szrName.Add(self.txtName, 1, wx.ALL, 5) <NEW_LINE> self.szrPath.Add(self.dirPath, 1, wx.ALL, 5) <NEW_LINE> self.szrDialogButtons = self.CreateButtonSizer(wx.OK | wx.CANCEL) <NEW_LINE> self.btnOk = self.FindWindowById(wx.ID_OK) <NEW_LINE> self.btnCancel = self.FindWindowById(wx.ID_CANCEL) <NEW_LINE> if not category: <NEW_LINE> <INDENT> self.btnOk.Disable() <NEW_LINE> <DEDENT> self.szrBaseVert.Add(self.szrName, 0, wx.EXPAND | wx.ALL, 5) <NEW_LINE> self.szrBaseVert.Add(self.szrPath, 0, wx.EXPAND | wx.ALL, 5) <NEW_LINE> self.szrBaseVert.Add(self.szrDialogButtons, 0, wx.EXPAND | wx.ALL, 5) <NEW_LINE> self.SetSizerAndFit(self.szrBaseVert) <NEW_LINE> self.txtName.Bind(wx.EVT_TEXT, self.OnTextNameChanged) <NEW_LINE> self.dirPath.Bind(wx.EVT_DIRPICKER_CHANGED, self.OnDirPathChanged) <NEW_LINE> <DEDENT> def GetCategory(self): <NEW_LINE> <INDENT> return self.__category <NEW_LINE> <DEDENT> def OnTextNameChanged(self, event): <NEW_LINE> <INDENT> name = self.txtName.GetValue() <NEW_LINE> if name.isspace() or not name: <NEW_LINE> <INDENT> self.btnOk.Disable() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__category.SetName(name) <NEW_LINE> self.btnOk.Enable() <NEW_LINE> <DEDENT> <DEDENT> def OnDirPathChanged(self, event): <NEW_LINE> <INDENT> newPath = self.dirPath.GetPath() <NEW_LINE> if newPath.find(self.__category.GetHdd().GetPath()) == -1: <NEW_LINE> <INDENT> self.btnOk.Disable() <NEW_LINE> return <NEW_LINE> <DEDENT> self.__category.SetRelativePath(os.path.relpath(newPath, self.__category.GetHdd().GetPath()))
The Cat Prop Dialog class
62599041d53ae8145f91971f
class QubeProcessor(Processor): <NEW_LINE> <INDENT> def process(self, command, args=None, kw=None): <NEW_LINE> <INDENT> if args is None: <NEW_LINE> <INDENT> args = () <NEW_LINE> <DEDENT> if kw is None: <NEW_LINE> <INDENT> kw = {} <NEW_LINE> <DEDENT> serialised = base64.b64encode( pickle.dumps( {'command': command, 'args': args, 'kw': kw}, pickle.HIGHEST_PROTOCOL ) ) <NEW_LINE> python_statement = ( 'import pickle;' 'import base64;' 'data = base64.b64decode(\'{0}\');' 'data = pickle.loads(data);' 'data[\'command\'](*data[\'args\'], **data[\'kw\'])' ).format(serialised.replace("'", r"\'")) <NEW_LINE> command = [] <NEW_LINE> if self.host is None: <NEW_LINE> <INDENT> command.append('python') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command.extend(self.host.get_python_prefix()) <NEW_LINE> <DEDENT> command.extend(['-c', '"{0}"'.format(python_statement)]) <NEW_LINE> job = self._create_job(command) <NEW_LINE> job_id = self._submit_job(job) <NEW_LINE> return 'Submitted Qube job: {0}'.format(job_id) <NEW_LINE> <DEDENT> def _create_job(self, command): <NEW_LINE> <INDENT> return { 'prototype': 'cmdline', 'name': 'segue', 'cpus': 1, 'package': { 'cmdline': ' '.join(command) } } <NEW_LINE> <DEDENT> def _submit_job(self, job): <NEW_LINE> <INDENT> import qb <NEW_LINE> return qb.submit(job)
Foreground processor.
62599041d10714528d69efed
class EmailWasher(HTMLWasher): <NEW_LINE> <INDENT> def handle_starttag(self, tag, attrs): <NEW_LINE> <INDENT> if tag.lower() in self.allowed_tag_whitelist: <NEW_LINE> <INDENT> if tag.lower() == 'ol': <NEW_LINE> <INDENT> self.previous_nbs.append(self.nb) <NEW_LINE> self.nb = 0 <NEW_LINE> self.previous_type_lists.append(tag.lower()) <NEW_LINE> <DEDENT> elif tag.lower() == 'ul': <NEW_LINE> <INDENT> self.previous_type_lists.append(tag.lower()) <NEW_LINE> <DEDENT> elif tag.lower() == 'li': <NEW_LINE> <INDENT> if self.previous_type_lists[-1] == 'ol': <NEW_LINE> <INDENT> self.nb += 1 <NEW_LINE> self.result += str(self.nb) + '. ' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.result += '* ' <NEW_LINE> <DEDENT> <DEDENT> elif tag.lower() == 'a': <NEW_LINE> <INDENT> self.previous_type_lists.append(tag.lower()) <NEW_LINE> for (attr, value) in attrs: <NEW_LINE> <INDENT> if attr.lower() == 'href': <NEW_LINE> <INDENT> self.url = value <NEW_LINE> self.result += '<' + value + '>' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def handle_endtag(self, tag): <NEW_LINE> <INDENT> if tag.lower() in self.allowed_tag_whitelist: <NEW_LINE> <INDENT> if tag.lower() in ['ul', 'ol']: <NEW_LINE> <INDENT> self.previous_type_lists = self.previous_type_lists[:-1] <NEW_LINE> if tag.lower() == 'ol': <NEW_LINE> <INDENT> self.nb = self.previous_nbs[-1] <NEW_LINE> self.previous_nbs = self.previous_nbs[:-1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def handle_startendtag(self, tag, attrs): <NEW_LINE> <INDENT> self.result += "" <NEW_LINE> <DEDENT> def handle_charref(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.result += unichr(int(name)).encode("utf-8") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def handle_entityref(self, name): <NEW_LINE> <INDENT> char_code = htmlentitydefs.name2codepoint.get(name, None) <NEW_LINE> if char_code is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.result += unichr(char_code).encode("utf-8") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return
Wash comments before being send by email
6259904123e79379d538d7c0
class ProgressTestCase(TabTestCase): <NEW_LINE> <INDENT> def check_progress_tab(self): <NEW_LINE> <INDENT> return self.check_tab( tab_class=ProgressTab, dict_tab={'type': ProgressTab.type, 'name': 'same'}, expected_link=self.reverse('progress', args=[text_type(self.course.id)]), expected_tab_id=ProgressTab.type, invalid_dict_tab=None, ) <NEW_LINE> <DEDENT> @patch('student.models.CourseEnrollment.is_enrolled') <NEW_LINE> def test_progress(self, is_enrolled): <NEW_LINE> <INDENT> is_enrolled.return_value = True <NEW_LINE> self.course.hide_progress_tab = False <NEW_LINE> tab = self.check_progress_tab() <NEW_LINE> self.check_can_display_results( tab, for_staff_only=True, for_enrolled_users_only=True ) <NEW_LINE> self.course.hide_progress_tab = True <NEW_LINE> self.check_progress_tab() <NEW_LINE> self.check_can_display_results( tab, for_staff_only=True, for_enrolled_users_only=True, expected_value=False )
Test cases for Progress Tab.
62599041287bf620b6272ea8
class BlobTags(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'blob_tag_set': {'required': True}, } <NEW_LINE> _attribute_map = { 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, } <NEW_LINE> _xml_map = { 'name': 'Tags' } <NEW_LINE> def __init__( self, *, blob_tag_set: List["BlobTag"], **kwargs ): <NEW_LINE> <INDENT> super(BlobTags, self).__init__(**kwargs) <NEW_LINE> self.blob_tag_set = blob_tag_set
Blob tags. All required parameters must be populated in order to send to Azure. :param blob_tag_set: Required. :type blob_tag_set: list[~azure.storage.blob.models.BlobTag]
625990418e71fb1e983bcd90