code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class SageVideo(BaseVideo): <NEW_LINE> <INDENT> def __init__(self, mf, log): <NEW_LINE> <INDENT> BaseVideo.__init__(self) <NEW_LINE> self.watchedStartTime = 0 <NEW_LINE> self.watchedEndTime = 0 <NEW_LINE> self.realWatchedStartTime = 0 <NEW_LINE> self.realWatchedEndTime = 0 <NEW_LINE> airing = mf.get('Airing') <NEW_LINE> if not airing: <NEW_LINE> <INDENT> log.error('Sage MediaFile object has no Airing attribute') <NEW_LINE> return <NEW_LINE> <DEDENT> show = airing.get('Show') <NEW_LINE> if not show: <NEW_LINE> <INDENT> log.error('Sage MediaFile object has no Show attribute') <NEW_LINE> return <NEW_LINE> <DEDENT> self.watched = airing.get('IsWatched') <NEW_LINE> data = airing.get('LatestWatchedTime') <NEW_LINE> if data: <NEW_LINE> <INDENT> self.lastWatched = int(data // 1000) <NEW_LINE> <DEDENT> data = airing.get('AiringDuration') <NEW_LINE> if data: <NEW_LINE> <INDENT> self.duration = int(data) <NEW_LINE> <DEDENT> data = airing.get('WatchedDuration') <NEW_LINE> if data: <NEW_LINE> <INDENT> self.resume = int(data) <NEW_LINE> self.setResumeNorm() <NEW_LINE> <DEDENT> start = airing.get('WatchedStartTime') <NEW_LINE> end = airing.get('WatchedEndTime') <NEW_LINE> if start and end: <NEW_LINE> <INDENT> self.watchedStartTime = int(start) <NEW_LINE> self.watchedEndTime = int(end) <NEW_LINE> <DEDENT> start = airing.get('RealWatchedStartTime') <NEW_LINE> end = airing.get('RealWatchedEndTime') <NEW_LINE> if start and end: <NEW_LINE> <INDENT> self.realWatchedStartTime = int(start) <NEW_LINE> self.realWatchedEndTime = int(end) <NEW_LINE> <DEDENT> if self.realWatchedEndTime > self.lastWatched * 1000: <NEW_LINE> <INDENT> self.lastWatched = self.realWatchedEndTime // 1000 <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = self.getLastWatchedStr() + ' / ' + self.getResumeStr() <NEW_LINE> if self.watched: <NEW_LINE> <INDENT> s += ' [watched]' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s += ' [not watched]' <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def setResumeNorm(self): <NEW_LINE> <INDENT> val = self.resume <NEW_LINE> if self.watched: <NEW_LINE> <INDENT> if val > 0: <NEW_LINE> <INDENT> val = 0 <NEW_LINE> <DEDENT> <DEDENT> elif val > 0 and val < START_IGNORE: <NEW_LINE> <INDENT> val = 0 <NEW_LINE> <DEDENT> self.resumeNorm = val <NEW_LINE> <DEDENT> def getInfo(self, detail=False): <NEW_LINE> <INDENT> s = str(self) <NEW_LINE> if not detail: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> if self.watchedStartTime and self.watchedEndTime: <NEW_LINE> <INDENT> s += ('\n\t\t%s to %s [%s] (Watched Time)' % (self.timeToStr(self.watchedStartTime // 1000), self.timeToStr(self.watchedEndTime // 1000), self.durationToStr((self.watchedEndTime - self.watchedStartTime) // 1000))) <NEW_LINE> <DEDENT> if self.realWatchedStartTime and self.realWatchedEndTime: <NEW_LINE> <INDENT> s += ('\n\t\t%s to %s [%s] (Real Watched Time)' % (self.timeToStr(self.realWatchedStartTime // 1000), self.timeToStr(self.realWatchedEndTime // 1000), self.durationToStr((self.realWatchedEndTime - self.realWatchedStartTime) // 1000))) <NEW_LINE> <DEDENT> return s
Class that represents a SageTV video object
6259904fec188e330fdf9d27
class TripletLoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, margin=0.3, distance='consine', use_gpu=True): <NEW_LINE> <INDENT> super(TripletLoss, self).__init__() <NEW_LINE> if distance not in ['euclidean', 'consine']: <NEW_LINE> <INDENT> raise KeyError("Unsupported distance: {}".format(distance)) <NEW_LINE> <DEDENT> self.distance = distance <NEW_LINE> self.margin = margin <NEW_LINE> self.use_gpu = use_gpu <NEW_LINE> self.ranking_loss = nn.MarginRankingLoss(margin=margin) <NEW_LINE> <DEDENT> def forward(self, inputs, targets): <NEW_LINE> <INDENT> n = inputs.size(0) <NEW_LINE> if self.distance == 'euclidean': <NEW_LINE> <INDENT> dist = torch.pow(inputs, 2).sum(dim=1, keepdim=True).expand(n, n) <NEW_LINE> dist = dist + dist.t() <NEW_LINE> dist.addmm_(1, -2, inputs, inputs.t()) <NEW_LINE> dist = dist.clamp(min=1e-12).sqrt() <NEW_LINE> <DEDENT> elif self.distance == 'consine': <NEW_LINE> <INDENT> fnorm = torch.norm(inputs, p=2, dim=1, keepdim=True) <NEW_LINE> l2norm = inputs.div(fnorm.expand_as(inputs)) <NEW_LINE> dist = - torch.mm(l2norm, l2norm.t()) <NEW_LINE> <DEDENT> if self.use_gpu: targets = targets.cuda() <NEW_LINE> mask = targets.expand(n, n).eq(targets.expand(n, n).t()) <NEW_LINE> dist_ap, dist_an = [], [] <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> dist_ap.append(dist[i][mask[i]].max().unsqueeze(0)) <NEW_LINE> dist_an.append(dist[i][mask[i] == 0].min().unsqueeze(0)) <NEW_LINE> <DEDENT> dist_ap = torch.cat(dist_ap) <NEW_LINE> dist_an = torch.cat(dist_an) <NEW_LINE> y = torch.ones_like(dist_an) <NEW_LINE> loss = self.ranking_loss(dist_an, dist_ap, y) <NEW_LINE> return loss
Triplet loss with hard positive/negative mining. Reference: Hermans et al. In Defense of the Triplet Loss for Person Re-Identification. arXiv:1703.07737. Code imported from https://github.com/Cysu/open-reid/blob/master/reid/loss/triplet.py. Args: margin (float): margin for triplet.
6259904f7b25080760ed8722
class Parameter(DriverParameter): <NEW_LINE> <INDENT> INTERVAL = 'SampleInterval' <NEW_LINE> INSTRUMENT_SERIES = 'InstrumentSeries'
Device specific parameters for THSPH.
6259904f6fece00bbaccce44
class _BitmapTools(object): <NEW_LINE> <INDENT> CROP_PIXELS = 0 <NEW_LINE> HISTOGRAM = 1 <NEW_LINE> BOUNDING_BOX = 2 <NEW_LINE> def __init__(self, dimensions, pixels): <NEW_LINE> <INDENT> binary = './bitmaptools' <NEW_LINE> assert os.path.exists(binary), 'You must build bitmaptools first!' <NEW_LINE> self._popen = subprocess.Popen([binary], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) <NEW_LINE> packed_dims = struct.pack('iiiiiii', *dimensions) <NEW_LINE> self._popen.stdin.write(packed_dims) <NEW_LINE> if type(pixels) is not bytearray: <NEW_LINE> <INDENT> pixels = bytearray(pixels) <NEW_LINE> <DEDENT> self._popen.stdin.write(pixels) <NEW_LINE> <DEDENT> def _RunCommand(self, *command): <NEW_LINE> <INDENT> assert not self._popen.stdin.closed, ( 'Exactly one command allowed per instance of tools.') <NEW_LINE> packed_command = struct.pack('i' * len(command), *command) <NEW_LINE> self._popen.stdin.write(packed_command) <NEW_LINE> self._popen.stdin.close() <NEW_LINE> length_packed = self._popen.stdout.read(struct.calcsize('i')) <NEW_LINE> if not length_packed: <NEW_LINE> <INDENT> raise Exception(self._popen.stderr.read()) <NEW_LINE> <DEDENT> length = struct.unpack('i', length_packed)[0] <NEW_LINE> return self._popen.stdout.read(length) <NEW_LINE> <DEDENT> def CropPixels(self): <NEW_LINE> <INDENT> return self._RunCommand(_BitmapTools.CROP_PIXELS) <NEW_LINE> <DEDENT> def Histogram(self, ignore_color, tolerance): <NEW_LINE> <INDENT> ignore_color_int = -1 if ignore_color is None else int(ignore_color) <NEW_LINE> response = self._RunCommand(_BitmapTools.HISTOGRAM, ignore_color_int, tolerance) <NEW_LINE> out = array.array('i') <NEW_LINE> out.fromstring(response) <NEW_LINE> assert len(out) == 768, ( 'The ColorHistogram has the wrong number of buckets: %s' % len(out)) <NEW_LINE> return ColorHistogram(out[:256], out[256:512], out[512:], ignore_color) <NEW_LINE> <DEDENT> def BoundingBox(self, color, tolerance): <NEW_LINE> <INDENT> response = self._RunCommand(_BitmapTools.BOUNDING_BOX, int(color), tolerance) <NEW_LINE> unpacked = struct.unpack('iiiii', response) <NEW_LINE> box, count = unpacked[:4], unpacked[-1] <NEW_LINE> if box[2] < 0 or box[3] < 0: <NEW_LINE> <INDENT> box = None <NEW_LINE> <DEDENT> return box, count
Wraps a child process of bitmaptools and allows for one command.
6259904f23e79379d538d986
class SecondaryIndexingLatencyTest(SecondaryIndexTest): <NEW_LINE> <INDENT> @with_stats <NEW_LINE> def apply_scanworkload(self): <NEW_LINE> <INDENT> rest_username, rest_password = self.cluster_spec.rest_credentials <NEW_LINE> logger.info('Initiating the scan workload') <NEW_LINE> cmdstr = "cbindexperf -cluster {} -auth=\"{}:{}\" -configfile scripts/config_indexinglatency.json -resultfile result.json".format(self.index_nodes[0], rest_username, rest_password) <NEW_LINE> status = subprocess.call(cmdstr, shell=True) <NEW_LINE> if status != 0: <NEW_LINE> <INDENT> raise Exception('Scan workload could not be applied') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info('Scan workload applied') <NEW_LINE> <DEDENT> return status <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.load() <NEW_LINE> self.wait_for_persistence() <NEW_LINE> self.compact_bucket() <NEW_LINE> self.hot_load() <NEW_LINE> self.build_secondaryindex() <NEW_LINE> num_samples = 100 <NEW_LINE> samples = [] <NEW_LINE> while num_samples != 0: <NEW_LINE> <INDENT> access_settings = self.test_config.access_settings <NEW_LINE> self.worker_manager.run_workload(access_settings, self.target_iterator) <NEW_LINE> self.worker_manager.wait_for_workers() <NEW_LINE> time_before = time.time() <NEW_LINE> status = self.apply_scanworkload() <NEW_LINE> time_after = time.time() <NEW_LINE> if status == 0: <NEW_LINE> <INDENT> num_samples = num_samples - 1 <NEW_LINE> time_elapsed = (time_after - time_before) / 1000000.0 <NEW_LINE> samples.append(time_elapsed) <NEW_LINE> <DEDENT> <DEDENT> temp = np.array(samples) <NEW_LINE> indexing_latency_percentile_80 = np.percentile(temp, 80) <NEW_LINE> logger.info('Indexing latency (80th percentile): {} ms.'.format(indexing_latency_percentile_80)) <NEW_LINE> if self.test_config.stats_settings.enabled: <NEW_LINE> <INDENT> self.reporter.post_to_sf(indexing_latency_percentile_80)
This test applies scan workload against a 2i server and measures the indexing latency
6259904f7cff6e4e811b6ec5
class CleanSummer(): <NEW_LINE> <INDENT> def clean_text(self): <NEW_LINE> <INDENT> data = self.cleaned_data['text'] <NEW_LINE> max_length = self.summer_max_length <NEW_LINE> cleaned = cleanText(data) <NEW_LINE> if len(cleaned)>max_length: <NEW_LINE> <INDENT> raise ValidationError("This text has length "+str(len(cleaned))+", when the maximum is "+str(max_length)) <NEW_LINE> <DEDENT> return cleaned
Assumes the plugin Summernote is being used as a widget for a field called text, which this class then implements the cleaning method for
6259904f6e29344779b01acd
class _GroupObserver(object): <NEW_LINE> <INDENT> def __init__(self, group): <NEW_LINE> <INDENT> self.group = group <NEW_LINE> self._observed = {} <NEW_LINE> self._unobserved = set() <NEW_LINE> self._init_connections() <NEW_LINE> <DEDENT> def _observe(self, actor): <NEW_LINE> <INDENT> add_handler = actor.connect("actor-added", self._add_actor) <NEW_LINE> remove_handler = actor.connect("actor-removed", self._remove_actor) <NEW_LINE> self._observed[actor] = add_handler, remove_handler <NEW_LINE> for child in actor.get_children(): <NEW_LINE> <INDENT> self._observe(child) <NEW_LINE> <DEDENT> <DEDENT> def _unobserve(self, actor): <NEW_LINE> <INDENT> if actor not in self._observed: <NEW_LINE> <INDENT> _LOG.debug("double unobserve: " + str(actor.get_id())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._unobserved.add(actor) <NEW_LINE> actor.disconnect(self._observed[actor][0]) <NEW_LINE> actor.disconnect(self._observed[actor][1]) <NEW_LINE> del self._observed[actor] <NEW_LINE> <DEDENT> <DEDENT> def _init_connections(self): <NEW_LINE> <INDENT> self._observe(self.group) <NEW_LINE> <DEDENT> def _add_actor(self, parent, descendant): <NEW_LINE> <INDENT> if isinstance(descendant, Group): <NEW_LINE> <INDENT> self.group.schedule_update() <NEW_LINE> <DEDENT> elif hasattr(descendant, "enable_hilite"): <NEW_LINE> <INDENT> self.group.schedule_update() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for child in descendant.get_children(): <NEW_LINE> <INDENT> self._add_actor(descendant, child) <NEW_LINE> <DEDENT> self._observe(descendant) <NEW_LINE> <DEDENT> <DEDENT> def _remove_actor(self, parent, descendant): <NEW_LINE> <INDENT> if isinstance(descendant, Group): <NEW_LINE> <INDENT> self.group.schedule_update() <NEW_LINE> <DEDENT> elif hasattr(descendant, "enable_hilite"): <NEW_LINE> <INDENT> self.group.schedule_update() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._unobserve(descendant) <NEW_LINE> for child in descendant.get_children(): <NEW_LINE> <INDENT> self._remove_actor(descendant, child)
Helper class for Group. This class observes all group descendants. When subgroup change it schedules update in scanning seqence.
6259904f23849d37ff852548
class Comparator: <NEW_LINE> <INDENT> def equals(self, rhs): <NEW_LINE> <INDENT> raise NotImplementedError('method must be implemented by a subclass.') <NEW_LINE> <DEDENT> def __eq__(self, rhs): <NEW_LINE> <INDENT> return self.equals(rhs) <NEW_LINE> <DEDENT> def __ne__(self, rhs): <NEW_LINE> <INDENT> return not self.equals(rhs)
Base class for all Mox comparators. A Comparator can be used as a parameter to a mocked method when the exact value is not known. For example, the code you are testing might build up a long SQL string that is passed to your mock DAO. You're only interested that the IN clause contains the proper primary keys, so you can set your mock up as follows: mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result) Now whatever query is passed in must contain the string 'IN (1, 2, 4, 5)'. A Comparator may replace one or more parameters, for example: # return at most 10 rows mock_dao.RunQuery(StrContains('SELECT'), 10) or # Return some non-deterministic number of rows mock_dao.RunQuery(StrContains('SELECT'), IsA(int))
6259904f8da39b475be0466f
class InsertChartMixin: <NEW_LINE> <INDENT> def insert_chart( self, slice_name: str, owners: List[int], datasource_id: int, created_by=None, datasource_type: str = "table", description: Optional[str] = None, viz_type: Optional[str] = None, params: Optional[str] = None, cache_timeout: Optional[int] = None, certified_by: Optional[str] = None, certification_details: Optional[str] = None, ) -> Slice: <NEW_LINE> <INDENT> obj_owners = list() <NEW_LINE> for owner in owners: <NEW_LINE> <INDENT> user = db.session.query(security_manager.user_model).get(owner) <NEW_LINE> obj_owners.append(user) <NEW_LINE> <DEDENT> datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session ) <NEW_LINE> slice = Slice( cache_timeout=cache_timeout, certified_by=certified_by, certification_details=certification_details, created_by=created_by, datasource_id=datasource.id, datasource_name=datasource.name, datasource_type=datasource.type, description=description, owners=obj_owners, params=params, slice_name=slice_name, viz_type=viz_type, ) <NEW_LINE> db.session.add(slice) <NEW_LINE> db.session.commit() <NEW_LINE> return slice
Implements shared logic for tests to insert charts (slices) in the DB
6259904f30c21e258be99c8f
class Node: <NEW_LINE> <INDENT> def __init__(self, data, parent=None): <NEW_LINE> <INDENT> self.__children = [] <NEW_LINE> self.__data = data <NEW_LINE> self.__parent = parent <NEW_LINE> <DEDENT> def add_child(self, node): <NEW_LINE> <INDENT> if node in self.__children: <NEW_LINE> <INDENT> raise ValueError('Node has already is children') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__children.append(node) <NEW_LINE> node.__parent = self <NEW_LINE> <DEDENT> <DEDENT> def delete_child(self, node): <NEW_LINE> <INDENT> self.__children.remove(node) <NEW_LINE> <DEDENT> def is_root(self): <NEW_LINE> <INDENT> return self.__parent is None <NEW_LINE> <DEDENT> def is_parent(self): <NEW_LINE> <INDENT> return self.__children > 0 <NEW_LINE> <DEDENT> def is_leaf(self): <NEW_LINE> <INDENT> return len(self.__children) == 0 <NEW_LINE> <DEDENT> def at_level(self): <NEW_LINE> <INDENT> if self.__parent is not None: <NEW_LINE> <INDENT> return 1 + self.__parent.at_level() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> def get_child_list(self): <NEW_LINE> <INDENT> return self.__children <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> return self.__data <NEW_LINE> <DEDENT> def set_data(self, data): <NEW_LINE> <INDENT> self.__data = data <NEW_LINE> <DEDENT> def get_parent(self): <NEW_LINE> <INDENT> return self.__parent <NEW_LINE> <DEDENT> def __str__(self, level=0): <NEW_LINE> <INDENT> ret = "\t" * level + "|---" + "{}".format(self.__data) + "\n" <NEW_LINE> for child in self.__children: <NEW_LINE> <INDENT> ret += child.__str__(level + 1) <NEW_LINE> <DEDENT> return ret
Generic node class
6259904fa79ad1619776b501
class TestQpidInvalidTopologyVersion(_QpidBaseTestCase): <NEW_LINE> <INDENT> scenarios = [ ('direct', dict(consumer_cls=qpid_driver.DirectConsumer, consumer_kwargs={}, publisher_cls=qpid_driver.DirectPublisher, publisher_kwargs={})), ('topic', dict(consumer_cls=qpid_driver.TopicConsumer, consumer_kwargs={'exchange_name': 'openstack'}, publisher_cls=qpid_driver.TopicPublisher, publisher_kwargs={'exchange_name': 'openstack'})), ('fanout', dict(consumer_cls=qpid_driver.FanoutConsumer, consumer_kwargs={}, publisher_cls=qpid_driver.FanoutPublisher, publisher_kwargs={})), ] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(TestQpidInvalidTopologyVersion, self).setUp() <NEW_LINE> self.config(qpid_topology_version=-1, group='oslo_messaging_qpid') <NEW_LINE> <DEDENT> def test_invalid_topology_version(self): <NEW_LINE> <INDENT> def consumer_callback(msg): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> msgid_or_topic = 'test' <NEW_LINE> exception_msg = ("Invalid value for qpid_topology_version: %d" % self.conf.oslo_messaging_qpid.qpid_topology_version) <NEW_LINE> recvd_exc_msg = '' <NEW_LINE> try: <NEW_LINE> <INDENT> self.consumer_cls(self.conf.oslo_messaging_qpid, self.session_receive, msgid_or_topic, consumer_callback, **self.consumer_kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> recvd_exc_msg = e.message <NEW_LINE> <DEDENT> self.assertEqual(exception_msg, recvd_exc_msg) <NEW_LINE> recvd_exc_msg = '' <NEW_LINE> try: <NEW_LINE> <INDENT> self.publisher_cls(self.conf.oslo_messaging_qpid, self.session_send, topic=msgid_or_topic, **self.publisher_kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> recvd_exc_msg = e.message <NEW_LINE> <DEDENT> self.assertEqual(exception_msg, recvd_exc_msg)
Unit test cases to test invalid qpid topology version.
6259904fb57a9660fecd2f07
class InternalEvent(Event): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, label=None, logical_round=-1, event_time=None, timeout_disallowed=False, prunable=False): <NEW_LINE> <INDENT> super(InternalEvent, self).__init__(prefix='i', label=label, logical_round=logical_round, event_time=event_time, prunable=prunable) <NEW_LINE> self.timeout_disallowed = timeout_disallowed <NEW_LINE> <DEDENT> def whitelisted(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def proceed(self, simulation): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def disallow_timeouts(self): <NEW_LINE> <INDENT> self.timeout_disallowed = True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json(json_hash): <NEW_LINE> <INDENT> raise NotImplementedError()
An InternalEvent is one that happens within the controller(s) under simulation. Derivatives of this class verify that the internal event has occurred during replay in its proceed method before it returns.
6259904fa8ecb0332587269d
class LintMessage(object): <NEW_LINE> <INDENT> msg = "" <NEW_LINE> def __init__(self, msg, level=INFO, producer="unknown", msgid=None): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> self.level = level <NEW_LINE> self.producer = producer <NEW_LINE> self.msgid = msgid <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return str(self.msg) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.msg)
A base class for all lint messages.
6259904f596a897236128ff4
class PlayerDataMap(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Key = None <NEW_LINE> self.Value = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Key = params.get("Key") <NEW_LINE> self.Value = params.get("Value")
玩家自定义数据
6259904fd7e4931a7ef3d503
class CreateQueueRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.QueueName = None <NEW_LINE> self.MaxMsgHeapNum = None <NEW_LINE> self.PollingWaitSeconds = None <NEW_LINE> self.VisibilityTimeout = None <NEW_LINE> self.MaxMsgSize = None <NEW_LINE> self.MsgRetentionSeconds = None <NEW_LINE> self.RewindSeconds = None <NEW_LINE> self.Transaction = None <NEW_LINE> self.FirstQueryInterval = None <NEW_LINE> self.MaxQueryCount = None <NEW_LINE> self.DeadLetterQueueName = None <NEW_LINE> self.Policy = None <NEW_LINE> self.MaxReceiveCount = None <NEW_LINE> self.MaxTimeToLive = None <NEW_LINE> self.Trace = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.QueueName = params.get("QueueName") <NEW_LINE> self.MaxMsgHeapNum = params.get("MaxMsgHeapNum") <NEW_LINE> self.PollingWaitSeconds = params.get("PollingWaitSeconds") <NEW_LINE> self.VisibilityTimeout = params.get("VisibilityTimeout") <NEW_LINE> self.MaxMsgSize = params.get("MaxMsgSize") <NEW_LINE> self.MsgRetentionSeconds = params.get("MsgRetentionSeconds") <NEW_LINE> self.RewindSeconds = params.get("RewindSeconds") <NEW_LINE> self.Transaction = params.get("Transaction") <NEW_LINE> self.FirstQueryInterval = params.get("FirstQueryInterval") <NEW_LINE> self.MaxQueryCount = params.get("MaxQueryCount") <NEW_LINE> self.DeadLetterQueueName = params.get("DeadLetterQueueName") <NEW_LINE> self.Policy = params.get("Policy") <NEW_LINE> self.MaxReceiveCount = params.get("MaxReceiveCount") <NEW_LINE> self.MaxTimeToLive = params.get("MaxTimeToLive") <NEW_LINE> self.Trace = params.get("Trace")
CreateQueue请求参数结构体
6259904fb830903b9686eec0
class AnalysisDirMetadata(MetadataDict): <NEW_LINE> <INDENT> def __init__(self,filen=None): <NEW_LINE> <INDENT> MetadataDict.__init__(self, attributes = { 'run_name':'run_name', 'run_number': 'run_number', 'source': 'source', 'platform':'platform', 'assay': 'assay', 'processing_software': 'processing_software', 'bcl2fastq_software': 'bcl2fastq_software', 'cellranger_software': 'cellranger_software', 'instrument_name': 'instrument_name', 'instrument_datestamp': 'instrument_datestamp', 'instrument_flow_cell_id': 'instrument_flow_cell_id', 'instrument_run_number': 'instrument_run_number', 'sequencer_model': 'sequencer_model', }, order=('run_name', 'platform', 'run_number', 'source',), filen=filen)
Class for storing metadata about an analysis directory Provides a set of data items representing metadata about the current analysis, which are loaded from and saved to an external file. The metadata items are: run_name: name of the run run_number: run number assigned by local facility source: source of the data (e.g. local facility) platform: sequencing platform e.g. 'miseq' assay: the 'assay' from the IEM SampleSheet e.g. 'Nextera XT' processing_software: dictionary of software packages used in in the processing bcl2fastq_software: info on the Bcl conversion software used (deprecated) cellranger_software: info on the 10xGenomics cellranger software used (deprecated) instrument_name: name/i.d. for the sequencing instrument instrument_datestamp: datestamp from the sequencing instrument instrument_run_number: the run number from the sequencing instrument instrument_flow_cell_id: the flow cell ID from the sequencing instrument sequencer_model: the model of the sequencing instrument
6259904f82261d6c5273090c
class HTMLEmailBackend(EmailBackend): <NEW_LINE> <INDENT> formats = ('subject.txt', 'message.html') <NEW_LINE> def _strip_tags(self, html): <NEW_LINE> <INDENT> s = MLStripper() <NEW_LINE> s.feed(html) <NEW_LINE> return s.get_data() <NEW_LINE> <DEDENT> def send(self, messages, recipients, *args, **kwargs): <NEW_LINE> <INDENT> addresses = self.get_addresses(recipients) <NEW_LINE> if addresses: <NEW_LINE> <INDENT> subject = ' '.join(messages['subject.txt'].splitlines()) <NEW_LINE> body_html = messages['message.html'] <NEW_LINE> body = self._strip_tags(body_html) <NEW_LINE> email = EmailMultiAlternatives( subject, body, settings.DEFAULT_FROM_EMAIL, addresses) <NEW_LINE> email.attach_alternative(body_html, "text/html") <NEW_LINE> email.send() <NEW_LINE> return email
Email delivery backend with html support as alternative content.
6259904f3c8af77a43b68983
class DocumentFragmentValueReadTests(TestCase): <NEW_LINE> <INDENT> def test_value_uses_schema_to_find_default_value(self): <NEW_LINE> <INDENT> fragment = DocumentFragment( document=None, parent=None, value=DefaultValue, item=None, schema={"default": "default value"}) <NEW_LINE> self.assertEqual(fragment.value, "default value") <NEW_LINE> <DEDENT> def test_value_returns_normal_values(self): <NEW_LINE> <INDENT> fragment = DocumentFragment( document=None, parent=None, value="value") <NEW_LINE> self.assertEqual(fragment.value, "value") <NEW_LINE> <DEDENT> def test_value_returns_None_normally(self): <NEW_LINE> <INDENT> fragment = DocumentFragment( document=None, parent=None, value=None) <NEW_LINE> self.assertEqual(fragment.value, None)
Tests related to reading values
6259904f7b25080760ed8723
class CompositeRule(ClientRule): <NEW_LINE> <INDENT> def __init__(self, children, rule_type=RuleType.COMPOSITE.name, rule_id=None): <NEW_LINE> <INDENT> super(CompositeRule, self).__init__(rule_type, rule_id) <NEW_LINE> self._children = tuple(children) <NEW_LINE> <DEDENT> @property <NEW_LINE> def children(self): <NEW_LINE> <INDENT> return tuple(self._children) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{class_name}(children={children})".format( class_name=self.__class__.__name__, children=self._children, )
Composite rule to match request by several rules simultaneously.
6259904f73bcbd0ca4bcb715
class WindLoad(RotatableLoad): <NEW_LINE> <INDENT> def __init__(self, *, load_name: str, load_no, wind_speed: float, angle: float, symmetrical: bool, abbrev: str = ''): <NEW_LINE> <INDENT> super().__init__(load_name = load_name, load_no = load_no, load_value = wind_speed, angle = angle, symmetrical = symmetrical, abbrev = abbrev) <NEW_LINE> <DEDENT> @property <NEW_LINE> def wind_speed(self) -> float: <NEW_LINE> <INDENT> return self._load_value <NEW_LINE> <DEDENT> @wind_speed.setter <NEW_LINE> def wind_speed(self, wind_speed: float): <NEW_LINE> <INDENT> self._load_value = wind_speed <NEW_LINE> <DEDENT> def scale_speed(self, wind_speed_to, scale): <NEW_LINE> <INDENT> def scale_func(scale_to, scale_from): <NEW_LINE> <INDENT> return (scale_to ** 2) / (scale_from ** 2) <NEW_LINE> <DEDENT> return self.scale_factor(scale_to = wind_speed_to, scale_func = scale_func, scale = scale) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return (f'{type(self).__name__}(' + f'load_name={repr(self.load_name)}, ' + f'load_no={repr(self.load_no)}, ' + f'wind_speed={repr(self.wind_speed)}, ' + f'angle={repr(self.angle)}, ' + f'symmetrical={repr(self.symmetrical)}, ' + f'abbrev={repr(self.abbrev)})') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return (super(WindLoad, self).__str__() + f', wind_speed: {self.wind_speed}' + f', angle: {self.angle}')
WindLoad is a sub-class of RotatableLoad, however it has an additional method to scale the load based on the windspeed, and the load can be entered as either load or wind_speed.
6259904f29b78933be26ab08
class CorsController(Controller): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def convert_response(req, resp, success_code, response_class): <NEW_LINE> <INDENT> if resp.status_int == success_code: <NEW_LINE> <INDENT> headers = dict() <NEW_LINE> if req.object_name: <NEW_LINE> <INDENT> headers['x-amz-version-id'] = resp.sw_headers[VERSION_ID_HEADER] <NEW_LINE> <DEDENT> return response_class(headers=headers) <NEW_LINE> <DEDENT> return resp <NEW_LINE> <DEDENT> @public <NEW_LINE> @bucket_operation <NEW_LINE> @check_container_existence <NEW_LINE> def GET(self, req): <NEW_LINE> <INDENT> log_s3api_command(req, 'get-bucket-cors') <NEW_LINE> sysmeta = req.get_container_info(self.app).get('sysmeta', {}) <NEW_LINE> body = sysmeta.get('swift3-cors') <NEW_LINE> if not body: <NEW_LINE> <INDENT> raise NoSuchCORSConfiguration <NEW_LINE> <DEDENT> return HTTPOk(body=body, content_type='application/xml') <NEW_LINE> <DEDENT> @public <NEW_LINE> @bucket_operation <NEW_LINE> @check_container_existence <NEW_LINE> def PUT(self, req): <NEW_LINE> <INDENT> log_s3api_command(req, 'put-bucket-cors') <NEW_LINE> xml = req.xml(MAX_CORS_BODY_SIZE) <NEW_LINE> try: <NEW_LINE> <INDENT> data = fromstring(xml, "CorsConfiguration") <NEW_LINE> <DEDENT> except (XMLSyntaxError, DocumentInvalid): <NEW_LINE> <INDENT> raise MalformedXML() <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> LOGGER.error(exc) <NEW_LINE> raise <NEW_LINE> <DEDENT> check_cors_rule(data) <NEW_LINE> req.headers[BUCKET_CORS_HEADER] = xml <NEW_LINE> resp = req._get_response(self.app, 'POST', req.container_name, None) <NEW_LINE> return self.convert_response(req, resp, 204, HTTPOk) <NEW_LINE> <DEDENT> @public <NEW_LINE> @bucket_operation <NEW_LINE> @check_container_existence <NEW_LINE> def DELETE(self, req): <NEW_LINE> <INDENT> log_s3api_command(req, 'delete-bucket-cors') <NEW_LINE> req.headers[BUCKET_CORS_HEADER] = '' <NEW_LINE> resp = req._get_response(self.app, 'POST', req.container_name, None) <NEW_LINE> return self.convert_response(req, resp, 202, HTTPNoContent)
Handles the following APIs: - GET Bucket CORS - PUT Bucket CORS - DELETE Bucket CORS
6259904fd53ae8145f9198ee
class TestCardResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCardResponse(self): <NEW_LINE> <INDENT> pass
CardResponse unit test stubs
6259904f1f037a2d8b9e52b1
class OneToManyDescriptor(LinkDescriptor): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(OneToManyDescriptor, self).__init__( linked_is_p=True, p_value_is_list=False, **kwargs) <NEW_LINE> <DEDENT> def get_reverse(self, cls): <NEW_LINE> <INDENT> return ManyToOneDescriptor( this_key=self._linked_key, linked_cls=cls, linked_key=self._this_key) <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> assert isinstance(value, list) <NEW_LINE> lm = self.get_manager(instance) <NEW_LINE> lm.clear() <NEW_LINE> for v in value: <NEW_LINE> <INDENT> lm.add(value)
This object has an attribute, represented by this field, that can contain only one member, that refers to linked object.
6259904f3cc13d1c6d466bc5
class AuthenticationError(APIException): <NEW_LINE> <INDENT> status_code = status.HTTP_401_UNAUTHORIZED <NEW_LINE> default_detail = _('身份校验错误') <NEW_LINE> default_code = 'Authentication Failed'
jwt校验错误
6259904f435de62698e9d28a
class SipDialError(OpenTokException): <NEW_LINE> <INDENT> pass
Indicates that there was a SIP dial specific problem: The Session ID passed in is invalid or you attempt to start a SIP call for a session that does not use the OpenTok Media Router.
6259904fd6c5a102081e35aa
class OccupationType(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'occupationtypes' <NEW_LINE> pk_id = db.Column(db.Text(), primary_key=True) <NEW_LINE> name = db.Column(db.Text()) <NEW_LINE> occupation_id = db.Column(db.Text(), db.ForeignKey('occupations.pk_id')) <NEW_LINE> def __init__(self, **data): <NEW_LINE> <INDENT> self.pk_id = str(uuid.uuid4()) <NEW_LINE> self.name = data.get('name', '') <NEW_LINE> self.occupation_id = data.get('occupation_id', None) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return u'<{} - {}>'.format(self.pk_id, self.name) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return { "id": self.pk_id, "name": self.name, "occupation": {"id": self.occupation.pk_id, "name": self.occupation.name} if self.occupation is not None else {} }
Model for OccupationType
6259904f8e7ae83300eea51e
class VsphereVirtualDiskVolumeSource(_kuber_definitions.Definition): <NEW_LINE> <INDENT> def __init__( self, fs_type: str = None, storage_policy_id: str = None, storage_policy_name: str = None, volume_path: str = None, ): <NEW_LINE> <INDENT> super(VsphereVirtualDiskVolumeSource, self).__init__( api_version="core/v1", kind="VsphereVirtualDiskVolumeSource" ) <NEW_LINE> self._properties = { "fsType": fs_type if fs_type is not None else "", "storagePolicyID": storage_policy_id if storage_policy_id is not None else "", "storagePolicyName": storage_policy_name if storage_policy_name is not None else "", "volumePath": volume_path if volume_path is not None else "", } <NEW_LINE> self._types = { "fsType": (str, None), "storagePolicyID": (str, None), "storagePolicyName": (str, None), "volumePath": (str, None), } <NEW_LINE> <DEDENT> @property <NEW_LINE> def fs_type(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("fsType"), ) <NEW_LINE> <DEDENT> @fs_type.setter <NEW_LINE> def fs_type(self, value: str): <NEW_LINE> <INDENT> self._properties["fsType"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage_policy_id(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("storagePolicyID"), ) <NEW_LINE> <DEDENT> @storage_policy_id.setter <NEW_LINE> def storage_policy_id(self, value: str): <NEW_LINE> <INDENT> self._properties["storagePolicyID"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage_policy_name(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("storagePolicyName"), ) <NEW_LINE> <DEDENT> @storage_policy_name.setter <NEW_LINE> def storage_policy_name(self, value: str): <NEW_LINE> <INDENT> self._properties["storagePolicyName"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def volume_path(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("volumePath"), ) <NEW_LINE> <DEDENT> @volume_path.setter <NEW_LINE> def volume_path(self, value: str): <NEW_LINE> <INDENT> self._properties["volumePath"] = value <NEW_LINE> <DEDENT> def __enter__(self) -> "VsphereVirtualDiskVolumeSource": <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> return False
Represents a vSphere volume resource.
6259904f7d847024c075d860
class UserProfile(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserProfileManager() <NEW_LINE> USERNAME_FIELD = "email" <NEW_LINE> REQUIRED_FIELDS = ['name'] <NEW_LINE> def get_full_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email
Represent a user profile inside our system
6259904fa79ad1619776b502
class FlavorProfileResponse(BaseFlavorProfileType): <NEW_LINE> <INDENT> id = wtypes.wsattr(wtypes.UuidType()) <NEW_LINE> name = wtypes.wsattr(wtypes.StringType()) <NEW_LINE> provider_name = wtypes.wsattr(wtypes.StringType()) <NEW_LINE> flavor_data = wtypes.wsattr(wtypes.StringType()) <NEW_LINE> @classmethod <NEW_LINE> def from_data_model(cls, data_model, children=False): <NEW_LINE> <INDENT> flavorprofile = super(FlavorProfileResponse, cls).from_data_model( data_model, children=children) <NEW_LINE> return flavorprofile
Defines which attributes are to be shown on any response.
6259904fb7558d589546496d
class TaskDetails(DetailView): <NEW_LINE> <INDENT> template_name = "task_details.html" <NEW_LINE> context_object_name = 'task' <NEW_LINE> model = Task <NEW_LINE> pk_url_kwarg = "id"
Shows info about task.
6259904fdd821e528d6da367
class feederzip(datafeeder): <NEW_LINE> <INDENT> def __init__(self, gens, preptrain=None): <NEW_LINE> <INDENT> self.gens = gens <NEW_LINE> self.preptrain = preptrain if preptrain is not None else pk.preptrain([]) <NEW_LINE> <DEDENT> def batchstream(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> batchlist = [gen.next() for gen in self.gens] <NEW_LINE> yield self.preptrain(batchlist) <NEW_LINE> <DEDENT> <DEDENT> def restartgenerator(self): <NEW_LINE> <INDENT> for gen in self.gens: <NEW_LINE> <INDENT> if hasattr(gen, "restartgenerator"): <NEW_LINE> <INDENT> gen.restartgenerator() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> return self.batchstream().next() <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> for gen in self.gens: <NEW_LINE> <INDENT> if hasattr(gen, 'cleanup'): <NEW_LINE> <INDENT> gen.cleanup()
Zip multiple generators (with or without a restartgenerator method)
62599050287bf620b6273076
class OpenSSLSeeker(Seeker): <NEW_LINE> <INDENT> NAME = "OpenSSL" <NEW_LINE> VERSION_STRING = " part of OpenSSL " <NEW_LINE> def searchLib(self, logger): <NEW_LINE> <INDENT> key_string = self.VERSION_STRING <NEW_LINE> ids = ["SHA1", "SHA-256", "SHA-512", "SSLv3", "TLSv1", "ASN.1", "EVP", "RAND", "RSA", "Big Number"] <NEW_LINE> self._version_strings = [] <NEW_LINE> seen_copyrights = set() <NEW_LINE> match_counter = 0 <NEW_LINE> for bin_str in self._all_strings: <NEW_LINE> <INDENT> if key_string in str(bin_str): <NEW_LINE> <INDENT> copyright_string = str(bin_str) <NEW_LINE> if len([x for x in ids if x in copyright_string]) == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> chopped_copyright_string = copyright_string[copyright_string.find(key_string):] <NEW_LINE> if match_counter >= 1 and chopped_copyright_string in seen_copyrights: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> logger.debug(f"Located a copyright string of {self.NAME} in address 0x{bin_str.ea:x}") <NEW_LINE> match_counter += 1 <NEW_LINE> seen_copyrights.add(chopped_copyright_string) <NEW_LINE> self._version_strings.append(chopped_copyright_string) <NEW_LINE> <DEDENT> <DEDENT> return len(self._version_strings) <NEW_LINE> <DEDENT> def identifyVersions(self, logger): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for work_str in self._version_strings: <NEW_LINE> <INDENT> results.append(self.extractVersion(work_str, start_index=work_str.find(self.NAME) + len(self.NAME) + 1, legal_chars=string.digits + string.ascii_lowercase + '.')) <NEW_LINE> <DEDENT> return results
Seeker (Identifier) for the OpenSSL open source library.
62599050596a897236128ff5
class IngredientSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Ingredient <NEW_LINE> fields = ('name',) <NEW_LINE> read_only_fields = ('id',)
Serializer for ingredients
62599050e76e3b2f99fd9e8c
class LongTable(Tabular): <NEW_LINE> <INDENT> def __init__(self, content): <NEW_LINE> <INDENT> Tabular.__init__(self, content) <NEW_LINE> self._set_tab_type('longtabu') <NEW_LINE> self.repeats = 1 <NEW_LINE> self.caption = 'Table 1' <NEW_LINE> self.label = 'table1' <NEW_LINE> self.loc = 'c' <NEW_LINE> <DEDENT> def set_location(self, loc='c'): <NEW_LINE> <INDENT> if loc not in ['c', 'l', 'r']: <NEW_LINE> <INDENT> raise ValueError('must specify c, l, or r; received {}'.format(loc)) <NEW_LINE> <DEDENT> self.loc = loc <NEW_LINE> <DEDENT> def set_caption(self, caption): <NEW_LINE> <INDENT> self.caption = caption <NEW_LINE> <DEDENT> def set_label(self, label): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> <DEDENT> def set_repeats(self, repeats): <NEW_LINE> <INDENT> self.repeats = repeats <NEW_LINE> <DEDENT> def _build_rows(self): <NEW_LINE> <INDENT> space = ' '*(self.depth)*self.indent <NEW_LINE> rows = [self[i,:].as_tex(self.depth*self.indent) for i in xrange(len(self.content))] <NEW_LINE> if self.loc == 'c': <NEW_LINE> <INDENT> just = 'centering' <NEW_LINE> <DEDENT> elif self.loc == 'r': <NEW_LINE> <INDENT> just = 'raggedleft' <NEW_LINE> <DEDENT> elif self.loc == 'l': <NEW_LINE> <INDENT> just = 'raggedright' <NEW_LINE> <DEDENT> caption = '{}\\{} \\\\'.format(space, just) <NEW_LINE> caption += '\n{}\\captionsetup{{singlelinecheck=false,justification={}}}'.format( space, just) <NEW_LINE> label = ' \\label{{{}}}'.format(self.label) <NEW_LINE> caption += '\n{}\\caption{{{}}} \\\\\n'.format(space, self.caption + label) <NEW_LINE> firsthead = caption <NEW_LINE> headrows = rows[:self.repeats] <NEW_LINE> firsthead += '\n'.join(headrows) + '\n{}\\endfirsthead\n\n'.format(space) <NEW_LINE> head = '{}\\mc{{{}}}{{c}}{{{}}} \\\\\n'.format(space, self.shape[1], '\\tablename\\ \\thetable\\ -- \\emph{Continued from previous page}') <NEW_LINE> head += '\n'.join(headrows) + '\n{}\\endhead\n\n'.format(space) <NEW_LINE> foot = '{}\\mc{{{}}}{{r}}{{{}}} \\\\\n'.format(space, self.shape[1], '\\emph{Continued on next page}') <NEW_LINE> foot += '\n{}\\endfoot\n{}\\endlastfoot\n\n'.format(space, space) <NEW_LINE> return firsthead + head + foot + '\n'.join(rows[self.repeats:])
class for LateX longtables
6259905055399d3f056279a7
class FileOpenEvent(EngineEvent): <NEW_LINE> <INDENT> def __init__(self, file_path): <NEW_LINE> <INDENT> super(FileOpenEvent, self).__init__() <NEW_LINE> self._file_path = file_path <NEW_LINE> <DEDENT> @property <NEW_LINE> def file_path(self): <NEW_LINE> <INDENT> return self._file_path <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s: %s" % ("FileOpenEvent", self.file_path)
An object representation of a file-open event.
625990503617ad0b5ee075cd
class DBAlter: <NEW_LINE> <INDENT> pass
This class manages the interactions to modify data in the database
6259905082261d6c5273090d
class ValuePrinter(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, logger): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.logger = logger <NEW_LINE> self.values = dict(self.logger.values) <NEW_LINE> self.printing = True <NEW_LINE> self.screen = curses.initscr() <NEW_LINE> self.win = curses.newwin(10, 50, 0, 0) <NEW_LINE> curses.cbreak() <NEW_LINE> curses.noecho() <NEW_LINE> curses.halfdelay(1) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.printing = False <NEW_LINE> sleep(1) <NEW_LINE> self.screen.keypad(0) <NEW_LINE> curses.nocbreak() <NEW_LINE> curses.echo() <NEW_LINE> curses.endwin() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while self.printing: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> char = self.screen.getch() <NEW_LINE> if char == ord('q'): <NEW_LINE> <INDENT> self.logger.stop() <NEW_LINE> self.stop() <NEW_LINE> break <NEW_LINE> <DEDENT> self.update_screen() <NEW_LINE> sleep(0.1) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_screen(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.values != self.logger.values: <NEW_LINE> <INDENT> self.values = dict(self.logger.values) <NEW_LINE> self.win.addstr(0, 0, 'Omicron Pressure Logger running.') <NEW_LINE> self.win.addstr(1, 10, '(Quit with "q")') <NEW_LINE> self.win.addstr(3, 0, 'Chamber pressures') <NEW_LINE> string = 'Prep pressure: {} mbar'.format(self.values['omicron_prep_pressure']) <NEW_LINE> self.win.addstr(4, 3, string.ljust(50)) <NEW_LINE> string = 'Ana pressure : {} mbar'.format(self.values['omicron_ana_pressure']) <NEW_LINE> self.win.addstr(5, 3, string.ljust(50)) <NEW_LINE> self.win.addstr(6, 0, 'Roughing lines') <NEW_LINE> string = 'Prep : {} mbar'.format(self.values['omicron_roughing_2_prep']) <NEW_LINE> self.win.addstr(7, 3, string.ljust(40)) <NEW_LINE> string = 'Ana : {} mbar'.format(self.values['omicron_roughing_1_ana']) <NEW_LINE> self.win.addstr(8, 3, string.ljust(40)) <NEW_LINE> string = 'Diff : {} mbar'.format(self.values['omicron_roughing_3_diff']) <NEW_LINE> self.win.addstr(9, 3, string.ljust(40)) <NEW_LINE> self.win.refresh() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not self.logger.is_alive(): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> print('Logger not running..') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> self.stop()
Print values nicely in terminal
62599050379a373c97d9a4b8
class monitoring_site(object): <NEW_LINE> <INDENT> def __init__(self,url_template): <NEW_LINE> <INDENT> self.url_template = url_template <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'url - %' % self.url_template <NEW_LINE> <DEDENT> def get_page(self,univ_num): <NEW_LINE> <INDENT> return university_page(univ_num,self.url_template % univ_num)
The wrapper for site's url get_page returns wrapper for the specific university page to parse
625990503c8af77a43b68984
class CancelOrStopIntentHandler(AbstractRequestHandler): <NEW_LINE> <INDENT> def can_handle(self, handler_input): <NEW_LINE> <INDENT> return (is_intent_name("AMAZON.CancelIntent")(handler_input) or is_intent_name("AMAZON.StopIntent")(handler_input)) <NEW_LINE> <DEDENT> def handle(self, handler_input): <NEW_LINE> <INDENT> logger.info("In CancelOrStopIntentHandler") <NEW_LINE> speech = STOP_MESSAGE <NEW_LINE> handler_input.response_builder.speak(speech) <NEW_LINE> return handler_input.response_builder.response
Single handler for Cancel and Stop Intent.
6259905010dbd63aa1c7206b
@jsonld.s( type=[ 'prov:SoftwareAgent', 'wfprov:WorkflowEngine', ], context={ 'prov': 'http://purl.org/dc/terms/', 'wfprov': 'http://purl.org/wf4ever/wfprov#', }, frozen=True, slots=True, ) <NEW_LINE> class SoftwareAgent: <NEW_LINE> <INDENT> label = jsonld.ib(context='rdfs:label', kw_only=True) <NEW_LINE> was_started_by = jsonld.ib( context='prov:wasStartedBy', default=None, kw_only=True, ) <NEW_LINE> _id = jsonld.ib(context='@id', kw_only=True) <NEW_LINE> @classmethod <NEW_LINE> def from_commit(cls, commit): <NEW_LINE> <INDENT> author = Person.from_commit(commit) <NEW_LINE> if commit.author != commit.committer: <NEW_LINE> <INDENT> return cls( label=commit.committer.name, id='mailto:{0}'.format(commit.committer.email), was_started_by=author, ) <NEW_LINE> <DEDENT> return author
Represent a person.
625990507b25080760ed8724
class HsMetricsNonDup(HsMetrics): <NEW_LINE> <INDENT> parent_task = luigi.Parameter(default=("ratatosk.lib.tools.picard.MergeSamFiles", ), is_list=True)
Run on non-deduplicated data
625990506fece00bbaccce48
class TestRun: <NEW_LINE> <INDENT> def test_run(self, mocker): <NEW_LINE> <INDENT> mock_serve = mocker.patch('websockets.serve') <NEW_LINE> mock_get_event_loop = mocker.patch('asyncio.get_event_loop') <NEW_LINE> mock_ensure_future = mocker.patch('asyncio.ensure_future') <NEW_LINE> hub = DumplingHub() <NEW_LINE> mocker.patch.object(hub, '_announce_system_status') <NEW_LINE> mocker.patch.object(hub, '_logger') <NEW_LINE> hub.run() <NEW_LINE> assert mock_serve.call_count == 2 <NEW_LINE> assert mock_serve.call_args_list == [ ((hub._grab_dumplings, hub.address, hub.in_port),), ((hub._emit_dumplings, hub.address, hub.out_port),), ] <NEW_LINE> mock_get_event_loop.assert_called_once() <NEW_LINE> mock_get_event_loop.return_value.run_forever.assert_called_once() <NEW_LINE> mock_ensure_future.assert_called_once_with( hub._announce_system_status() ) <NEW_LINE> <DEDENT> def test_websocket_error(self, mocker): <NEW_LINE> <INDENT> mocker.patch('websockets.serve') <NEW_LINE> mocker.patch('asyncio.ensure_future') <NEW_LINE> mock_get_event_loop = mocker.patch('asyncio.get_event_loop') <NEW_LINE> mock_get_event_loop.return_value.run_until_complete.side_effect = ( OSError ) <NEW_LINE> hub = DumplingHub() <NEW_LINE> with pytest.raises(NetDumplingsError): <NEW_LINE> <INDENT> hub.run() <NEW_LINE> <DEDENT> <DEDENT> def test_keyboard_interrupt(self, mocker): <NEW_LINE> <INDENT> mocker.patch('websockets.serve') <NEW_LINE> mocker.patch('asyncio.ensure_future') <NEW_LINE> mock_get_event_loop = mocker.patch('asyncio.get_event_loop') <NEW_LINE> mock_get_event_loop.return_value.run_forever.side_effect = ( KeyboardInterrupt ) <NEW_LINE> hub = DumplingHub() <NEW_LINE> mocker.patch.object(hub, '_announce_system_status') <NEW_LINE> mock_logger = mocker.patch.object(hub, '_logger') <NEW_LINE> hub.run() <NEW_LINE> mock_logger.warning.assert_called_once()
Test the run() method of the DumplingHub.
625990500c0af96317c577a7
class DummyClientComputation(tff.learning.framework.ClientDeltaFn): <NEW_LINE> <INDENT> def __init__(self, model, input_spec, client_weight_fn=None): <NEW_LINE> <INDENT> del client_weight_fn <NEW_LINE> self._model = tff.learning.framework.enhance(model) <NEW_LINE> self._input_spec = input_spec <NEW_LINE> py_typecheck.check_type(self._model, tff.learning.framework.EnhancedModel) <NEW_LINE> self._client_weight_fn = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def variables(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> @tf.function <NEW_LINE> def __call__(self, dataset, initial_weights): <NEW_LINE> <INDENT> del initial_weights <NEW_LINE> model = self._model <NEW_LINE> @tf.function <NEW_LINE> def reduce_fn_num_examples(num_examples_sum, batch): <NEW_LINE> <INDENT> num_examples_in_batch = tf.shape(batch['x'])[0] <NEW_LINE> return num_examples_sum + num_examples_in_batch <NEW_LINE> <DEDENT> @tf.function <NEW_LINE> def reduce_fn_dataset_mean(sum_vector, batch): <NEW_LINE> <INDENT> sum_batch = tf.reshape(tf.reduce_sum(batch['x'], [0]), (-1, 1)) <NEW_LINE> return sum_vector + sum_batch <NEW_LINE> <DEDENT> num_examples_sum = dataset.reduce( initial_state=tf.constant(0), reduce_func=reduce_fn_num_examples) <NEW_LINE> example_vector_sum = dataset.reduce( initial_state=tf.zeros((self._input_spec, 1)), reduce_func=reduce_fn_dataset_mean) <NEW_LINE> weights_delta = [example_vector_sum / tf.cast(num_examples_sum, tf.float32)] <NEW_LINE> aggregated_outputs = model.report_local_outputs() <NEW_LINE> weights_delta, has_non_finite_delta = ( tensor_utils.zero_all_if_any_non_finite(weights_delta)) <NEW_LINE> weights_delta_weight = tf.cast(num_examples_sum, tf.float32) <NEW_LINE> return tff.learning.framework.ClientOutput( weights_delta, weights_delta_weight, aggregated_outputs, collections.OrderedDict( num_examples=num_examples_sum, has_non_finite_delta=has_non_finite_delta, ))
Client TensorFlow logic for example. Designed to mimic the class `ClientFedAvg` from federated_averaging.py
6259905023e79379d538d98a
class Topic(DocumentBase): <NEW_LINE> <INDENT> zones = ['title', 'desc', 'narr'] <NEW_LINE> def __init__(self, fname): <NEW_LINE> <INDENT> if fname.endswith('.gz'): <NEW_LINE> <INDENT> opener = gzip.open <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> opener = open <NEW_LINE> <DEDENT> with opener(fname, mode='rt') as instream: <NEW_LINE> <INDENT> parsed_results = self.parse(instream) <NEW_LINE> <DEDENT> sorted_results = sorted(parsed_results, key=operator.itemgetter(0)) <NEW_LINE> objects = collections.defaultdict(list) <NEW_LINE> for idx, tag, content in sorted_results: <NEW_LINE> <INDENT> objects[tag].append(content) <NEW_LINE> <DEDENT> self.tid = objects['num'][0].strip() <NEW_LINE> self._vtexts = [] <NEW_LINE> self._vtext_zones = [] <NEW_LINE> for idx, tag, content in sorted_results: <NEW_LINE> <INDENT> if tag in self.zones: <NEW_LINE> <INDENT> self._vtexts.append(content) <NEW_LINE> self._vtext_zones.append(tag) <NEW_LINE> <DEDENT> <DEDENT> self.titles = [vt for i, vt in enumerate(self._vtexts) if self._vtext_zones[i] == 'title'] <NEW_LINE> self.descs = [vt for i, vt in enumerate(self._vtexts) if self._vtext_zones[i] == 'desc'] <NEW_LINE> self.narrs = [vt for i, vt in enumerate(self._vtexts) if self._vtext_zones[i] == 'narr'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def uid(self): <NEW_LINE> <INDENT> return self.tid
Represents one query topic. Like the document has zones ``title`` and ``text``, the Topic has zones ``title``, ``desc``, and ``narr``. >>> fname = '../test_data/topic-401-AH.vert' >>> t = Topic(fname) The Topic is uniquely identified by its topic id (``tid``): >>> t.tid '10.2452/401-AH' Which is also its ``uid``: >>> t.uid '10.2452/401-AH' The zones are again represented as VTexts. >>> description = t.descs[0] >>> print(description.to_text()) Najděte dokumenty o růstech cen po zavedení Eura .
62599050cb5e8a47e493cbcd
class RevokeActionTestCase(AdminTestMixin, DjangoCAWithCertTestCase): <NEW_LINE> <INDENT> def test_basic(self): <NEW_LINE> <INDENT> data = { 'action': 'revoke', '_selected_action': [self.cert.pk], } <NEW_LINE> response = self.client.post(self.changelist_url, data) <NEW_LINE> self.assertRedirects(response, self.changelist_url) <NEW_LINE> cert = Certificate.objects.get(serial=self.cert.serial) <NEW_LINE> self.assertTrue(cert.revoked) <NEW_LINE> self.assertIsNone(cert.revoked_reason) <NEW_LINE> response = self.client.post(self.changelist_url, data) <NEW_LINE> self.assertRedirects(response, self.changelist_url)
Test the "revoke" action in the changelist.
62599050435de62698e9d28c
class BigqueryTablesGetRequest(messages.Message): <NEW_LINE> <INDENT> datasetId = messages.StringField(1, required=True) <NEW_LINE> projectId = messages.StringField(2, required=True) <NEW_LINE> tableId = messages.StringField(3, required=True)
A BigqueryTablesGetRequest object. Fields: datasetId: Dataset ID of the requested table projectId: Project ID of the requested table tableId: Table ID of the requested table
625990508da39b475be04673
class AttributedMixin: <NEW_LINE> <INDENT> def __contains__(self, name): <NEW_LINE> <INDENT> return name in self._attributes <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if name in self._attributes: <NEW_LINE> <INDENT> return self._attributes[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def set_attributes(self, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._attributes.update(kwargs) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._attributes = kwargs <NEW_LINE> <DEDENT> <DEDENT> def attributes(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._attributes.keys() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return []
Obsługa atrybutów.
6259905023849d37ff85254c
class DUP2_X2(NoOperandsInstruction): <NEW_LINE> <INDENT> def execute(self, frame): <NEW_LINE> <INDENT> stack = frame.operand_stack <NEW_LINE> slot1 = stack.pop_slot() <NEW_LINE> slot2 = stack.pop_slot() <NEW_LINE> slot3 = stack.pop_slot() <NEW_LINE> slot4 = stack.pop_slot() <NEW_LINE> stack.push_slot(copy_slot(slot2)) <NEW_LINE> stack.push_slot(copy_slot(slot1)) <NEW_LINE> stack.push_slot(slot4) <NEW_LINE> stack.push_slot(slot3) <NEW_LINE> stack.push_slot(slot2) <NEW_LINE> stack.push_slot(slot1)
bottom -> top [...][d][c][b][a] ____/ __/ | __/ V V [...][b][a][d][c][b][a]
62599050a79ad1619776b503
class FP16Optimizer(_FP16OptimizerMixin, optim.FairseqOptimizer): <NEW_LINE> <INDENT> def __init__(self, args, params, fp32_optimizer, fp32_params): <NEW_LINE> <INDENT> super().__init__(args) <NEW_LINE> self.fp16_params = params <NEW_LINE> self.fp32_optimizer = fp32_optimizer <NEW_LINE> self.fp32_params = fp32_params <NEW_LINE> if getattr(args, 'fp16_scale_window', None) is None: <NEW_LINE> <INDENT> if len(args.update_freq) > 1: <NEW_LINE> <INDENT> raise ValueError( '--fp16-scale-window must be given explicitly when using a ' 'custom --update-freq schedule' ) <NEW_LINE> <DEDENT> data_parallel_size = int(args.distributed_world_size / args.model_parallel_size) <NEW_LINE> scale_window = int(2**14 / data_parallel_size / args.update_freq[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> scale_window = args.fp16_scale_window <NEW_LINE> <DEDENT> if not getattr(args, 'bf16', False): <NEW_LINE> <INDENT> self.scaler = DynamicLossScaler( init_scale=args.fp16_init_scale, scale_window=scale_window, tolerance=args.fp16_scale_tolerance, threshold=args.threshold_loss_scale, min_loss_scale=args.min_loss_scale ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.scaler = None <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def build_optimizer(cls, args, params): <NEW_LINE> <INDENT> flatten = not getattr(args, 'fp16_no_flatten_grads', False) <NEW_LINE> if getattr(args, 'bf16', False): <NEW_LINE> <INDENT> flatten = False <NEW_LINE> <DEDENT> fp32_params = cls.build_fp32_params(params, flatten=flatten) <NEW_LINE> if flatten: <NEW_LINE> <INDENT> fp32_optimizer = optim.build_optimizer(args, [fp32_params]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fp32_optimizer = optim.build_optimizer(args, fp32_params) <NEW_LINE> <DEDENT> if flatten and not fp32_optimizer.supports_flat_params: <NEW_LINE> <INDENT> raise RuntimeError( 'chosen optimizer does not support flat params, ' 'please set --fp16-no-flatten-grads' ) <NEW_LINE> <DEDENT> return cls(args, params, fp32_optimizer, fp32_params) <NEW_LINE> <DEDENT> @property <NEW_LINE> def optimizer(self): <NEW_LINE> <INDENT> return self.fp32_optimizer.optimizer <NEW_LINE> <DEDENT> @property <NEW_LINE> def optimizer_config(self): <NEW_LINE> <INDENT> return self.fp32_optimizer.optimizer_config <NEW_LINE> <DEDENT> def get_lr(self): <NEW_LINE> <INDENT> return self.fp32_optimizer.get_lr() <NEW_LINE> <DEDENT> def set_lr(self, lr): <NEW_LINE> <INDENT> self.fp32_optimizer.set_lr(lr)
Wrap an *optimizer* to support FP16 (mixed precision) training.
62599050a8ecb033258726a1
class CorsConfiguration(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "CorsRules": ([CorsRules], True), }
`CorsConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors.html>`__
62599050b57a9660fecd2f0b
class UserInfoEmbed(discord.Embed): <NEW_LINE> <INDENT> def __init__(self, ctx: commands.Context, user: discord.User): <NEW_LINE> <INDENT> self.user = user <NEW_LINE> self.prefix = ctx.prefix.replace(ctx.bot.user.mention, f"@{ctx.bot.user.name}") <NEW_LINE> self.set_author( name=str(user), url=f"https://discordapp.com/users/{user.id}", icon_url=user.avatar_url, ) <NEW_LINE> self.set_thumbnail(url=user.avatar_url) <NEW_LINE> self.set_footer( text=f"Utilise {self.prefix}help pour obtenir de l'aide", icon_url=ctx.bot.user.avatar_url, ) <NEW_LINE> self.set_fields() <NEW_LINE> for field in self.fields_list: <NEW_LINE> <INDENT> self.add_field( name=field["name"], value=field["value"], inline=field["inline"] ) <NEW_LINE> <DEDENT> super().__init__( title="Informations :", colour=discord.Colour(user.color.value), timestamp=TZ.localize(datetime.now()), ) <NEW_LINE> <DEDENT> def set_fields(self): <NEW_LINE> <INDENT> self.fields_list = [ {"name": "ID :", "value": self.user.id, "inline": True}, {"name": "Statut :", "value": f"*{self.user.status}*", "inline": True}, { "name": "A créé son compte le :", "value": self.user.created_at.strftime("%A %d/%m/%Y à %H:%M:%S"), "inline": False, }, ]
Représente un discord.Embed avec les informations sur un utilisateur. TODO : add desc
62599050004d5f362081fa30
class lateralNet(BasicModule): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.inplanes = 16 <NEW_LINE> super(lateralNet, self).__init__(path) <NEW_LINE> block = BasicBlock <NEW_LINE> self.conv1 = nn.Conv2d(2, 16, 3, padding=1) <NEW_LINE> self.bn1 = nn.BatchNorm2d(16) <NEW_LINE> self.relu1 = nn.ReLU(True) <NEW_LINE> self.layer1 = self._make_layer(block, 16, 1, stride=1) <NEW_LINE> self.layer2 = self._make_layer(block, 32, 2, stride=2) <NEW_LINE> self.layer3 = self._make_layer(block, 64, 2, stride=2) <NEW_LINE> self.layer4 = self._make_layer(block, 128, 2, stride=2) <NEW_LINE> self.layer5 = self._make_layer(block, 128, 2, stride=2) <NEW_LINE> self.conv6 = nn.Conv2d(256, 192, 3, 1, padding=1) <NEW_LINE> self.avgpool = nn.MaxPool2d(3) <NEW_LINE> self.fc2 = nn.Linear(192, 1) <NEW_LINE> self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=1) <NEW_LINE> <DEDENT> def _make_layer(self, block, outplanes, blocks, stride=1): <NEW_LINE> <INDENT> downsample = None <NEW_LINE> if stride != 1 or self.inplanes != outplanes * block.expansion: <NEW_LINE> <INDENT> downsample = nn.Sequential( nn.Conv2d(self.inplanes, outplanes*block.expansion, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(outplanes*block.expansion), ) <NEW_LINE> <DEDENT> layers = [] <NEW_LINE> layers.append(block(self.inplanes, outplanes, stride, downsample)) <NEW_LINE> self.inplanes = outplanes * block.expansion <NEW_LINE> for i in range(1, blocks): <NEW_LINE> <INDENT> layers.append(block(self.inplanes, outplanes)) <NEW_LINE> <DEDENT> return nn.Sequential(*layers) <NEW_LINE> <DEDENT> def forward(self, x, incs): <NEW_LINE> <INDENT> h = x <NEW_LINE> h = self.conv1(h) <NEW_LINE> h = self.relu1(self.bn1(h)) <NEW_LINE> h = self.layer1(h) <NEW_LINE> h = self.layer2(h) <NEW_LINE> h = self.layer3(h) <NEW_LINE> h = self.layer4(h) <NEW_LINE> h_laternal = self.maxpool1(h) <NEW_LINE> h = self.layer5(h) <NEW_LINE> _,_,H,W = h_laternal.shape <NEW_LINE> h_up = F.upsample(h, size=(H,W), mode='bilinear') <NEW_LINE> h = torch.cat([h_up, h_laternal], dim=1) <NEW_LINE> h = self.conv6(h) <NEW_LINE> h = self.avgpool(h) <NEW_LINE> h = h.view(h.size(0), -1) <NEW_LINE> h = self.fc2(h) <NEW_LINE> if self.train: <NEW_LINE> <INDENT> h += random.random()/5 <NEW_LINE> <DEDENT> return h
in 40x40
6259905076e4537e8c3f0a15
class NewLayerMergedFromVisible (Command): <NEW_LINE> <INDENT> display_name = _("New Layer from Visible") <NEW_LINE> def __init__(self, doc, **kwds): <NEW_LINE> <INDENT> super(NewLayerMergedFromVisible, self).__init__(doc, **kwds) <NEW_LINE> self._old_current_path = doc.layer_stack.current_path <NEW_LINE> self._result_insert_path = None <NEW_LINE> self._result_layer = None <NEW_LINE> self._result_final_path = None <NEW_LINE> self._paths_merged = None <NEW_LINE> <DEDENT> def redo(self): <NEW_LINE> <INDENT> rootstack = self.doc.layer_stack <NEW_LINE> merged = self._result_layer <NEW_LINE> if merged is None: <NEW_LINE> <INDENT> self._result_insert_path = (len(rootstack),) <NEW_LINE> self._paths_merged = [] <NEW_LINE> for path, layer in rootstack.walk(visible=True): <NEW_LINE> <INDENT> if path[0] < self._result_insert_path[0]: <NEW_LINE> <INDENT> self._result_insert_path = (path[0],) <NEW_LINE> <DEDENT> self._paths_merged.append(path) <NEW_LINE> <DEDENT> merged = rootstack.layer_new_merge_visible() <NEW_LINE> self._result_layer = merged <NEW_LINE> <DEDENT> assert self._result_insert_path is not None <NEW_LINE> rootstack.deepinsert(self._result_insert_path, merged) <NEW_LINE> self._result_final_path = rootstack.deepindex(merged) <NEW_LINE> rootstack.current_path = self._result_final_path <NEW_LINE> <DEDENT> def undo(self): <NEW_LINE> <INDENT> rootstack = self.doc.layer_stack <NEW_LINE> rootstack.deeppop(self._result_final_path) <NEW_LINE> rootstack.current_path = self._old_current_path
Create a new layer from the merge of all visible layers Performs a Merge Visible, and inserts the result into the layer stack just before the highest root of any visible layer.
62599050b830903b9686eec2
class RadioButtonGroup: <NEW_LINE> <INDENT> def __init__(self, parent, command, labels, default): <NEW_LINE> <INDENT> self.command = command <NEW_LINE> self.buttons = [] <NEW_LINE> self.button = None <NEW_LINE> for text in labels: <NEW_LINE> <INDENT> if type(text) in (ListType, TupleType): <NEW_LINE> <INDENT> b = QuiskCycleCheckbutton(parent, self.OnButton, text, is_radio=True) <NEW_LINE> for t in text: <NEW_LINE> <INDENT> if t == default and self.button is None: <NEW_LINE> <INDENT> b.SetLabel(t) <NEW_LINE> self.button = b <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> b = QuiskCheckbutton(parent, self.OnButton, text) <NEW_LINE> if text == default and self.button is None: <NEW_LINE> <INDENT> b.SetValue(True) <NEW_LINE> self.button = b <NEW_LINE> <DEDENT> <DEDENT> self.buttons.append(b) <NEW_LINE> <DEDENT> <DEDENT> def ReplaceButton(self, index, button): <NEW_LINE> <INDENT> b = self.buttons[index] <NEW_LINE> b.Destroy() <NEW_LINE> self.buttons[index] = button <NEW_LINE> button.command = self.OnButton <NEW_LINE> <DEDENT> def SetLabel(self, label, do_cmd=False): <NEW_LINE> <INDENT> self.button = None <NEW_LINE> for b in self.buttons: <NEW_LINE> <INDENT> if self.button is not None: <NEW_LINE> <INDENT> b.SetValue(False) <NEW_LINE> <DEDENT> elif isinstance(b, QuiskCycleCheckbutton): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> index = b.labels.index(label) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> b.SetValue(False) <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> b.SetIndex(index) <NEW_LINE> self.button = b <NEW_LINE> b.SetValue(True) <NEW_LINE> <DEDENT> <DEDENT> elif b.GetLabel() == label: <NEW_LINE> <INDENT> b.SetValue(True) <NEW_LINE> self.button = b <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> b.SetValue(False) <NEW_LINE> <DEDENT> <DEDENT> if do_cmd and self.command and self.button: <NEW_LINE> <INDENT> event = wx.PyEvent() <NEW_LINE> event.SetEventObject(self.button) <NEW_LINE> self.command(event) <NEW_LINE> <DEDENT> <DEDENT> def GetButtons(self): <NEW_LINE> <INDENT> return self.buttons <NEW_LINE> <DEDENT> def OnButton(self, event): <NEW_LINE> <INDENT> win = event.GetEventObject() <NEW_LINE> for b in self.buttons: <NEW_LINE> <INDENT> if b is win: <NEW_LINE> <INDENT> self.button = b <NEW_LINE> b.SetValue(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> b.SetValue(False) <NEW_LINE> <DEDENT> <DEDENT> if self.command: <NEW_LINE> <INDENT> self.command(event) <NEW_LINE> <DEDENT> <DEDENT> def GetLabel(self): <NEW_LINE> <INDENT> if not self.button: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.button.GetLabel() <NEW_LINE> <DEDENT> def GetSelectedButton(self): <NEW_LINE> <INDENT> return self.button
This class encapsulates a group of radio buttons. This class is not a button! The "labels" is a list of labels for the toggle buttons. An item of labels can be a list/tuple, and the corresponding button will be a cycle button.
62599050e5267d203ee6cd7a
class CipurpfifipversionEnum(Enum): <NEW_LINE> <INDENT> ipv4 = 1 <NEW_LINE> ipv6 = 2 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xe._meta import _CISCO_IP_URPF_MIB as meta <NEW_LINE> return meta._meta_table['CiscoIpUrpfMib.Cipurpfifmontable.Cipurpfifmonentry.CipurpfifipversionEnum']
CipurpfifipversionEnum Specifies the version of IP forwarding on an interface to which the table row URPF counts, rates, and configuration apply. .. data:: ipv4 = 1 .. data:: ipv6 = 2
62599050097d151d1a2c2500
class VirtualRouterListener(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "PortMapping": (PortMapping, True), }
`VirtualRouterListener <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appmesh-virtualrouter-virtualrouterlistener.html>`__
625990506fece00bbaccce4a
class WeightedNGramDict(object): <NEW_LINE> <INDENT> def __init__(self, corpus, train_sentences): <NEW_LINE> <INDENT> self.weights = {} <NEW_LINE> sub_corpus = bagofwords.BagOfWords() <NEW_LINE> for sentence in train_sentences: <NEW_LINE> <INDENT> sub_corpus.add_words(sentence.lower().split(" ")) <NEW_LINE> <DEDENT> self.weights = corpus.tfidf(sub_corpus) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> output = "WeightedNGramDict\n================\n" <NEW_LINE> for word in self.weights.keys(): <NEW_LINE> <INDENT> output += "%s: %s\n" % (word, self.weights[word]) <NEW_LINE> <DEDENT> return output.strip() <NEW_LINE> <DEDENT> def get_sentence_weight(self, sentence): <NEW_LINE> <INDENT> total_weight = 0 <NEW_LINE> for word in sentence.split(" "): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> total_weight += self.weights[word] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return total_weight
A dictionary where each NGram hold only one value - a weight.
62599050b5575c28eb713711
class StartDateSorter(Sorter): <NEW_LINE> <INDENT> def __init__(self, *, ascending: bool = True, missing_first: bool = False) -> None: <NEW_LINE> <INDENT> self.order = 1 if ascending else -1 <NEW_LINE> self.missing_value = ( date.max if (ascending ^ missing_first) else date.min ).toordinal() <NEW_LINE> <DEDENT> def key(self, task: Task) -> Tuple[int, ...]: <NEW_LINE> <INDENT> if task.start_on: <NEW_LINE> <INDENT> return (self.order * task.start_on.toordinal(),) <NEW_LINE> <DEDENT> return (self.missing_value,) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.__class__.__name__
Sort tasks by start dates. Tasks can be sorted in either ascending or descending order. Tasks without start dates can be put at either the start or the end of the section. :param ascending: Direction of the sort. :param missing_first: Whether to place tasks without a due date at the start or end.
625990507b25080760ed8725
class itkMapContainerULSUL(ITKCommonBasePython.itkObject,pyBasePython.mapsetUL): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> _itkMapContainerPython.itkMapContainerULSUL_swiginit(self,_itkMapContainerPython.new_itkMapContainerULSUL(*args)) <NEW_LINE> <DEDENT> def __New_orig__(): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def CastToSTLContainer(self): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_CastToSTLContainer(self) <NEW_LINE> <DEDENT> def CastToSTLConstContainer(self): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_CastToSTLConstContainer(self) <NEW_LINE> <DEDENT> def ElementAt(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_ElementAt(self, *args) <NEW_LINE> <DEDENT> def CreateElementAt(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_CreateElementAt(self, *args) <NEW_LINE> <DEDENT> def GetElement(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_GetElement(self, *args) <NEW_LINE> <DEDENT> def SetElement(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_SetElement(self, *args) <NEW_LINE> <DEDENT> def InsertElement(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_InsertElement(self, *args) <NEW_LINE> <DEDENT> def IndexExists(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_IndexExists(self, *args) <NEW_LINE> <DEDENT> def GetElementIfIndexExists(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_GetElementIfIndexExists(self, *args) <NEW_LINE> <DEDENT> def CreateIndex(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_CreateIndex(self, *args) <NEW_LINE> <DEDENT> def DeleteIndex(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_DeleteIndex(self, *args) <NEW_LINE> <DEDENT> def Size(self): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_Size(self) <NEW_LINE> <DEDENT> def Reserve(self, *args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_Reserve(self, *args) <NEW_LINE> <DEDENT> def Squeeze(self): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_Squeeze(self) <NEW_LINE> <DEDENT> def Initialize(self): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_Initialize(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkMapContainerPython.delete_itkMapContainerULSUL <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkMapContainerPython.itkMapContainerULSUL_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkMapContainerULSUL.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkMapContainerULSUL class
6259905007d97122c4218134
class Builder: <NEW_LINE> <INDENT> walk = Graph() <NEW_LINE> subway = Graph() <NEW_LINE> bus = Graph() <NEW_LINE> def addStation(self, name): <NEW_LINE> <INDENT> self.walk.addNode(name) <NEW_LINE> self.subway.addNode(name) <NEW_LINE> self.bus.addNode(name) <NEW_LINE> <DEDENT> def canWalk(self, a, b, time): <NEW_LINE> <INDENT> self.walk.join(a, b, time) <NEW_LINE> <DEDENT> def canTakeSubway(self, a, b, time): <NEW_LINE> <INDENT> self.subway.join(a, b, time) <NEW_LINE> <DEDENT> def canTakeBus(self, a, b, time): <NEW_LINE> <INDENT> self.bus.join(a, b, time)
Further abstract Graph for our specific case.
6259905073bcbd0ca4bcb719
class UserChangeForm(forms.ModelForm): <NEW_LINE> <INDENT> password = ReadOnlyPasswordHashField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Thinker <NEW_LINE> fields = ('email', 'password', 'date_of_birth', 'is_active', 'is_admin') <NEW_LINE> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> return self.initial["password"]
A form for updating users. Includes all the fields on the user, but replaces the password field with admin's password hash display field.
62599050e64d504609df9e16
class SomfyDataUpdateCoordinator(DataUpdateCoordinator): <NEW_LINE> <INDENT> def __init__( self, hass: HomeAssistant, logger: logging.Logger, *, name: str, client: SomfyApi, update_interval: timedelta | None = None, ) -> None: <NEW_LINE> <INDENT> super().__init__( hass, logger, name=name, update_interval=update_interval, ) <NEW_LINE> self.data = {} <NEW_LINE> self.client = client <NEW_LINE> self.site_device = {} <NEW_LINE> self.last_site_index = -1 <NEW_LINE> <DEDENT> async def _async_update_data(self) -> dict[str, Device]: <NEW_LINE> <INDENT> if not self.site_device: <NEW_LINE> <INDENT> sites = await self.hass.async_add_executor_job(self.client.get_sites) <NEW_LINE> if not sites: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> self.site_device = {site.id: [] for site in sites} <NEW_LINE> <DEDENT> site_id = self._site_id <NEW_LINE> try: <NEW_LINE> <INDENT> devices = await self.hass.async_add_executor_job( self.client.get_devices, site_id ) <NEW_LINE> self.site_device[site_id] = devices <NEW_LINE> <DEDENT> except SetupNotFoundException: <NEW_LINE> <INDENT> del self.site_device[site_id] <NEW_LINE> return await self._async_update_data() <NEW_LINE> <DEDENT> except QuotaViolationException: <NEW_LINE> <INDENT> self.logger.warning("Quota violation") <NEW_LINE> <DEDENT> return {dev.id: dev for devices in self.site_device.values() for dev in devices} <NEW_LINE> <DEDENT> @property <NEW_LINE> def _site_id(self): <NEW_LINE> <INDENT> self.last_site_index = (self.last_site_index + 1) % len(self.site_device) <NEW_LINE> return list(self.site_device.keys())[self.last_site_index]
Class to manage fetching Somfy data.
62599050009cb60464d029c9
class GtkUIPlugin(PluginInitBase): <NEW_LINE> <INDENT> def __init__(self, plugin_name): <NEW_LINE> <INDENT> from piaportforward.gtkui import GtkUI as _plugin_cls <NEW_LINE> self._plugin_cls = _plugin_cls <NEW_LINE> super(GtkUIPlugin, self).__init__(plugin_name)
Plugin for the GTK UI
62599050d6c5a102081e35ae
class Buffer2D(BufferBase): <NEW_LINE> <INDENT> def __init__(self, cell_num, *args, **kwargs): <NEW_LINE> <INDENT> if cell_num is None: cell_num = 100 <NEW_LINE> super().__init__(cell_num, *args, **kwargs) <NEW_LINE> if self.origin is None: <NEW_LINE> <INDENT> self.origin = np.zeros(2) <NEW_LINE> <DEDENT> self.dtheta = np.pi / 2.0 / self.cell_num <NEW_LINE> <DEDENT> def _find_cell_id(self, F): <NEW_LINE> <INDENT> dist = np.linalg.norm(F, axis=1) <NEW_LINE> theta = np.arccos(F[:, 0] / dist) <NEW_LINE> cell_ids = theta / self.dtheta <NEW_LINE> cell_ids = np.minimum(cell_ids.astype(int), self.cell_num - 1) <NEW_LINE> return cell_ids <NEW_LINE> <DEDENT> def _get_graph_edges(self, valid_cells): <NEW_LINE> <INDENT> edges = np.array([[i, i + 1] for i in range(len(valid_cells) - 1)]) <NEW_LINE> return edges
2D performance buffer.
62599050498bea3a75a58fb1
class ProtectedExpatParser(expatreader.ExpatParser): <NEW_LINE> <INDENT> def __init__(self, forbid_dtd=True, forbid_entities=True, *args, **kwargs): <NEW_LINE> <INDENT> expatreader.ExpatParser.__init__(self, *args, **kwargs) <NEW_LINE> self.forbid_dtd = forbid_dtd <NEW_LINE> self.forbid_entities = forbid_entities <NEW_LINE> <DEDENT> def start_doctype_decl(self, name, sysid, pubid, has_internal_subset): <NEW_LINE> <INDENT> raise ValueError("Inline DTD forbidden") <NEW_LINE> <DEDENT> def entity_decl(self, entityName, is_parameter_entity, value, base, systemId, publicId, notationName): <NEW_LINE> <INDENT> raise ValueError("<!ENTITY> entity declaration forbidden") <NEW_LINE> <DEDENT> def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): <NEW_LINE> <INDENT> raise ValueError("<!ENTITY> unparsed entity forbidden") <NEW_LINE> <DEDENT> def external_entity_ref(self, context, base, systemId, publicId): <NEW_LINE> <INDENT> raise ValueError("<!ENTITY> external entity forbidden") <NEW_LINE> <DEDENT> def notation_decl(self, name, base, sysid, pubid): <NEW_LINE> <INDENT> raise ValueError("<!ENTITY> notation forbidden") <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> expatreader.ExpatParser.reset(self) <NEW_LINE> if self.forbid_dtd: <NEW_LINE> <INDENT> self._parser.StartDoctypeDeclHandler = self.start_doctype_decl <NEW_LINE> self._parser.EndDoctypeDeclHandler = None <NEW_LINE> <DEDENT> if self.forbid_entities: <NEW_LINE> <INDENT> self._parser.EntityDeclHandler = self.entity_decl <NEW_LINE> self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl <NEW_LINE> self._parser.ExternalEntityRefHandler = self.external_entity_ref <NEW_LINE> self._parser.NotationDeclHandler = self.notation_decl <NEW_LINE> try: <NEW_LINE> <INDENT> self._parser.SkippedEntityHandler = None <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass
An expat parser which disables DTD's and entities by default.
625990508e71fb1e983bcf55
class HTTPSDFRead(SDFRead): <NEW_LINE> <INDENT> _data_struct = HTTPDataStruct <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if None in (PageCacheURL, HTTPArray): <NEW_LINE> <INDENT> raise ImportError("thingking") <NEW_LINE> <DEDENT> super(HTTPSDFRead, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def parse_header(self): <NEW_LINE> <INDENT> ascfile = HTTPArray(self.header) <NEW_LINE> max_header_size = 1024*1024 <NEW_LINE> lines = cStringIO(ascfile[:max_header_size].data[:]) <NEW_LINE> while True: <NEW_LINE> <INDENT> l = lines.readline() <NEW_LINE> if self._eof in l: break <NEW_LINE> self.parse_line(l, lines) <NEW_LINE> <DEDENT> hoff = lines.tell() <NEW_LINE> if self.header != self.filename: <NEW_LINE> <INDENT> hoff = 0 <NEW_LINE> <DEDENT> self.parameters['header_offset'] = hoff
Read an SDF file hosted on the internet. Given an SDF file (see http://bitbucket.org/JohnSalmon/sdf), parse the ASCII header and construct numpy memmap array access. Parameters ---------- filename: string The filename associated with the data to be loaded. header: string, optional If separate from the data file, a file containing the header can be specified. Default: None. Returns ------- self : SDFRead object Dict-like container of parameters and data. References ---------- SDF is described here: J. K. Salmon and M. S. Warren. Self-Describing File (SDF) Library. Zenodo, Jun 2014. URL http://bitbucket.org/JohnSalmon/sdf. Examples -------- >>> sdf = SDFRead("data.sdf", header="data.hdr") >>> print sdf.parameters >>> print sdf['x']
625990508da39b475be04676
class _KMedoids(ClusterMixin, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, n_clusters=8, n_passes=1, metric='euclidean', random_state=None): <NEW_LINE> <INDENT> self.n_clusters = n_clusters <NEW_LINE> self.n_passes = n_passes <NEW_LINE> self.metric = metric <NEW_LINE> self.random_state = random_state <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> if self.n_passes < 1: <NEW_LINE> <INDENT> raise ValueError('n_passes must be greater than 0. got %s' % self.n_passes) <NEW_LINE> <DEDENT> if self.n_clusters < 1: <NEW_LINE> <INDENT> raise ValueError('n_passes must be greater than 0. got %s' % self.n_clusters) <NEW_LINE> <DEDENT> dmat = libdistance.pdist(X, metric=self.metric) <NEW_LINE> ids, self.inertia_, _ = _kmedoids.kmedoids( self.n_clusters, dmat, self.n_passes, random_state=self.random_state) <NEW_LINE> self.labels_, mapping = _kmedoids.contigify_ids(ids) <NEW_LINE> smapping = sorted(mapping.items(), key=itemgetter(1)) <NEW_LINE> self.cluster_ids_ = np.array(smapping)[:, 0] <NEW_LINE> self.cluster_centers_ = X[self.cluster_ids_] <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> labels, inertia = libdistance.assign_nearest( X, self.cluster_centers_, metric=self.metric) <NEW_LINE> return labels <NEW_LINE> <DEDENT> def fit_predict(self, X, y=None): <NEW_LINE> <INDENT> return self.fit(X, y).labels_
K-Medoids clustering This method finds a set of cluster centers that are themselves data points, attempting to minimize the mean-squared distance from the datapoints to their assigned cluster centers. This algorithm requires computing the full distance matrix between all pairs of data points, requiring O(N^2) memory. The implementation of this method is based on the C clustering library [1]. Parameters ---------- n_clusters : int, optional, default: 8 The number of clusters to be found. n_passes : int, default=1 The number of times clustering is performed. Clustering is performed n_passes times, each time starting from a different (random) initial assignment. metric : {"euclidean", "sqeuclidean", "cityblock", "chebyshev", "canberra", "braycurtis", "hamming", "jaccard", "cityblock", "rmsd"} The distance metric to use. metric = "rmsd" requires that sequences passed to ``fit()`` be ```md.Trajectory```; other distance metrics require ``np.ndarray``s. random_state : integer or numpy.RandomState, optional The generator used to initialize the centers. If an integer is given, it fixes the seed. Defaults to the global numpy random number generator. References ---------- .. [1] de Hoon, Michiel JL, et al. "Open source clustering software." Bioinformatics 20.9 (2004): 1453-1454. See Also -------- MiniBatchKMedoids: Alternative online implementation that does incremental updates of the cluster centers using mini-batches, for more memory efficiency. Attributes ---------- cluster_ids_ : array, [n_clusters] Index of the data point that each cluster label corresponds to. labels_ : array, [n_samples,] The label of each point is an integer in [0, n_clusters). inertia_ : float Sum of distances of samples to their closest cluster center.
62599050d486a94d0ba2d455
class Iter: <NEW_LINE> <INDENT> class Frame: <NEW_LINE> <INDENT> def __init__(self, sx): <NEW_LINE> <INDENT> self.sx = sx <NEW_LINE> self.items = sx.rawchildren <NEW_LINE> self.index = 0 <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.index < len(self.items): <NEW_LINE> <INDENT> result = self.items[self.index] <NEW_LINE> self.index += 1 <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __init__(self, sx): <NEW_LINE> <INDENT> self.stack = [] <NEW_LINE> self.push(sx) <NEW_LINE> <DEDENT> def push(self, sx): <NEW_LINE> <INDENT> self.stack.append(Iter.Frame(sx)) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if self.stack: <NEW_LINE> <INDENT> return self.stack.pop() <NEW_LINE> <DEDENT> raise StopIteration() <NEW_LINE> <DEDENT> def top(self): <NEW_LINE> <INDENT> if self.stack: <NEW_LINE> <INDENT> return self.stack[-1] <NEW_LINE> <DEDENT> raise StopIteration() <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> frame = self.top() <NEW_LINE> while True: <NEW_LINE> <INDENT> result = frame.next() <NEW_LINE> if result is None: <NEW_LINE> <INDENT> self.pop() <NEW_LINE> return self.next() <NEW_LINE> <DEDENT> if isinstance(result, Content): <NEW_LINE> <INDENT> ancestry = [f.sx for f in self.stack] <NEW_LINE> return result, ancestry <NEW_LINE> <DEDENT> self.push(result) <NEW_LINE> return self.next() <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self
The content iterator - used to iterate the L{Content} children. The iterator provides a I{view} of the children that is free of container elements such as <xsd::all/>, <xsd:choice/> or <xsd:sequence/>. @ivar stack: A stack used to control nesting. @type stack: list
6259905007d97122c4218135
class StaticAssetsIntegration(DevServerIntegration): <NEW_LINE> <INDENT> _response = None <NEW_LINE> _root_folder = None <NEW_LINE> def init(self): <NEW_LINE> <INDENT> self._response = None <NEW_LINE> self._root_folder = os.path.realpath(instantiator.get_class_abslocation(BasicSettings)) <NEW_LINE> <DEDENT> def test_image_serve_ok(self): <NEW_LINE> <INDENT> img_route = "/samples/mvc/static/sample.jpg" <NEW_LINE> def request_logic(server): <NEW_LINE> <INDENT> request = Request(self._get_server_base_url(server, img_route)) <NEW_LINE> self._response = urllib.request.urlopen(request) <NEW_LINE> <DEDENT> def assert_logic(server): <NEW_LINE> <INDENT> self.assertEqual(200, self._response.getcode()) <NEW_LINE> self.assertEqual("image/jpeg", self._response.info()["Content-Type"]) <NEW_LINE> expected_filepath = "%s/samples/mvc/static/sample.jpg" % self._root_folder <NEW_LINE> with open(expected_filepath, "rb") as file_input: <NEW_LINE> <INDENT> expected_content = file_input.read() <NEW_LINE> <DEDENT> self.assertIsNotNone(expected_content) <NEW_LINE> self.assertEqual(expected_content, self._response.read()) <NEW_LINE> <DEDENT> self._run_test_against_dev_server(request_logic, assert_logic) <NEW_LINE> <DEDENT> def test_icon_serve_notfound(self): <NEW_LINE> <INDENT> icon_route = "/favicon.ico" <NEW_LINE> def request_logic(server): <NEW_LINE> <INDENT> request = Request(self._get_server_base_url(server, icon_route)) <NEW_LINE> self._response = urllib.request.urlopen(request) <NEW_LINE> <DEDENT> def assert_logic(server): <NEW_LINE> <INDENT> self.assertEqual(200, self._response.getcode()) <NEW_LINE> self.assertTrue(self._response.info()["Content-Type"] in ["image/vnd.microsoft.icon", "image/x-icon"]) <NEW_LINE> <DEDENT> self._run_test_against_dev_server(request_logic, assert_logic)
This class provides the integration tests for ensuring StaticAssets controller works as expected.
6259905082261d6c5273090f
class V1beta1StatefulSetSpec(object): <NEW_LINE> <INDENT> def __init__(self, replicas=None, selector=None, service_name=None, template=None, volume_claim_templates=None): <NEW_LINE> <INDENT> self.swagger_types = { 'replicas': 'int', 'selector': 'V1LabelSelector', 'service_name': 'str', 'template': 'V1PodTemplateSpec', 'volume_claim_templates': 'list[V1PersistentVolumeClaim]' } <NEW_LINE> self.attribute_map = { 'replicas': 'replicas', 'selector': 'selector', 'service_name': 'serviceName', 'template': 'template', 'volume_claim_templates': 'volumeClaimTemplates' } <NEW_LINE> self._replicas = replicas <NEW_LINE> self._selector = selector <NEW_LINE> self._service_name = service_name <NEW_LINE> self._template = template <NEW_LINE> self._volume_claim_templates = volume_claim_templates <NEW_LINE> <DEDENT> @property <NEW_LINE> def replicas(self): <NEW_LINE> <INDENT> return self._replicas <NEW_LINE> <DEDENT> @replicas.setter <NEW_LINE> def replicas(self, replicas): <NEW_LINE> <INDENT> self._replicas = replicas <NEW_LINE> <DEDENT> @property <NEW_LINE> def selector(self): <NEW_LINE> <INDENT> return self._selector <NEW_LINE> <DEDENT> @selector.setter <NEW_LINE> def selector(self, selector): <NEW_LINE> <INDENT> self._selector = selector <NEW_LINE> <DEDENT> @property <NEW_LINE> def service_name(self): <NEW_LINE> <INDENT> return self._service_name <NEW_LINE> <DEDENT> @service_name.setter <NEW_LINE> def service_name(self, service_name): <NEW_LINE> <INDENT> if service_name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `service_name`, must not be `None`") <NEW_LINE> <DEDENT> self._service_name = service_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def template(self): <NEW_LINE> <INDENT> return self._template <NEW_LINE> <DEDENT> @template.setter <NEW_LINE> def template(self, template): <NEW_LINE> <INDENT> if template is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `template`, must not be `None`") <NEW_LINE> <DEDENT> self._template = template <NEW_LINE> <DEDENT> @property <NEW_LINE> def volume_claim_templates(self): <NEW_LINE> <INDENT> return self._volume_claim_templates <NEW_LINE> <DEDENT> @volume_claim_templates.setter <NEW_LINE> def volume_claim_templates(self, volume_claim_templates): <NEW_LINE> <INDENT> self._volume_claim_templates = volume_claim_templates <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599050fff4ab517ebcecae
class flooder(threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> grasp.tprint("Flooding", obj2.name, "for ever") <NEW_LINE> while True: <NEW_LINE> <INDENT> time.sleep(60) <NEW_LINE> grasp.flood(asa_nonce, 59000, grasp.tagged_objective(obj2,None)) <NEW_LINE> time.sleep(5)
Thread to flood PrefixManager.Params repeatedly
62599050e76e3b2f99fd9e91
class CommentCreateView(CreateView): <NEW_LINE> <INDENT> model = Comment <NEW_LINE> fields = ['comment'] <NEW_LINE> template_name = 'comment.html' <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> form.instance.author = self.request.user <NEW_LINE> form.instance.article = Article.objects.get(id=self.kwargs['pk']) <NEW_LINE> return super().form_valid(form)
Let the user to comment of an article.
62599050097d151d1a2c2502
class StatTextLineParser(BaseParser): <NEW_LINE> <INDENT> whois_option_list = [ make_option("-k", "--keep-alive", action="store_true", help="use a persistent connection") ] <NEW_LINE> def __init__(self, protocol, *args, **kwargs): <NEW_LINE> <INDENT> self.protocol = protocol <NEW_LINE> BaseParser.__init__(self, *args, **kwargs) <NEW_LINE> for option in self.whois_option_list: <NEW_LINE> <INDENT> self.add_option(option) <NEW_LINE> <DEDENT> <DEDENT> def print_help(self, *args, **kwargs): <NEW_LINE> <INDENT> for line in self.format_option_help().split("\n"): <NEW_LINE> <INDENT> self.protocol.queueLine(line) <NEW_LINE> <DEDENT> <DEDENT> def print_usage(self, *args, **kwargs): <NEW_LINE> <INDENT> self.print_help() <NEW_LINE> <DEDENT> def exit(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
BaseParser subclass that responds to input from whois clients.
6259905007f4c71912bb08c6
class StreamReader(_Reader): <NEW_LINE> <INDENT> def __init__(self, input, peek=None): <NEW_LINE> <INDENT> super().__init__(input) <NEW_LINE> self._peek = peek <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> block, cdata = Block.from_stream(self._input, self._peek) <NEW_LINE> if not block.uncompressed_size: <NEW_LINE> <INDENT> raise EmptyBlock() <NEW_LINE> <DEDENT> self._inflate(block, cdata) <NEW_LINE> self._peek = None <NEW_LINE> return self.buffer <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> raise StopIteration()
Implements _Reader to handle input data that is not accessible through a buffer interface.
6259905076d4e153a661dcc1
class VirtualNetworkBgpCommunities(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'virtual_network_community': {'required': True}, 'regional_community': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'virtual_network_community': {'key': 'virtualNetworkCommunity', 'type': 'str'}, 'regional_community': {'key': 'regionalCommunity', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(VirtualNetworkBgpCommunities, self).__init__(**kwargs) <NEW_LINE> self.virtual_network_community = kwargs['virtual_network_community'] <NEW_LINE> self.regional_community = None
Bgp Communities sent over ExpressRoute with each route corresponding to a prefix in this VNET. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param virtual_network_community: Required. The BGP community associated with the virtual network. :type virtual_network_community: str :ivar regional_community: The BGP community associated with the region of the virtual network. :vartype regional_community: str
62599050dc8b845886d54a50
class Int8(Uint8): <NEW_LINE> <INDENT> dtype_id = 6 <NEW_LINE> _ctype = _C.c_int8 <NEW_LINE> _ntype = _N.int8
8-bit signed number
6259905029b78933be26ab0b
class ChecksConfig(AppConfig): <NEW_LINE> <INDENT> name = 'specialhandling.checks'
Checks app configuration
625990507cff6e4e811b6ecd
class TextBuilder(object): <NEW_LINE> <INDENT> file_extensions = ["txt"] <NEW_LINE> tesseract_configs = [] <NEW_LINE> cuneiform_args = ["-f", "text"] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read_file(file_descriptor): <NEW_LINE> <INDENT> return file_descriptor.read().strip() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def write_file(file_descriptor, text): <NEW_LINE> <INDENT> file_descriptor.write(text) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __str__(): <NEW_LINE> <INDENT> return "Raw text"
If passed to image_to_string(), image_to_string() will return a simple string. This string will be the output of the OCR tool, as-is. In other words, the raw text as produced by the tool. Warning: The returned string is encoded in UTF-8
62599050b57a9660fecd2f0e
class DesignateInstanceEntryFactory(driver.DnsInstanceEntryFactory): <NEW_LINE> <INDENT> def create_entry(self, instance_id): <NEW_LINE> <INDENT> zone = DesignateDnsZone(id=DNS_DOMAIN_ID, name=DNS_DOMAIN_NAME) <NEW_LINE> name = base64.b32encode(hashlib.md5(instance_id).digest())[:11] <NEW_LINE> hostname = ("%s.%s" % (name, zone.name)) <NEW_LINE> if hostname.endswith('.'): <NEW_LINE> <INDENT> hostname = hostname[:-1] <NEW_LINE> <DEDENT> return driver.DnsEntry(name=hostname, content=None, type="A", ttl=DNS_TTL, dns_zone=zone)
Defines how instance DNS entries are created for instances.
625990503eb6a72ae038baed
class NodeIsOwnerOrReadOnly(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, node): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return node.map.author == request.user
Object-level permission to only allow owners of an object to edit it.
62599050d6c5a102081e35b0
class TargetPoolAggregatedList(messages.Message): <NEW_LINE> <INDENT> @encoding.MapUnrecognizedFields('additionalProperties') <NEW_LINE> class ItemsValue(messages.Message): <NEW_LINE> <INDENT> class AdditionalProperty(messages.Message): <NEW_LINE> <INDENT> key = messages.StringField(1) <NEW_LINE> value = messages.MessageField('TargetPoolsScopedList', 2) <NEW_LINE> <DEDENT> additionalProperties = messages.MessageField('AdditionalProperty', 1, repeated=True) <NEW_LINE> <DEDENT> id = messages.StringField(1) <NEW_LINE> items = messages.MessageField('ItemsValue', 2) <NEW_LINE> kind = messages.StringField(3, default=u'compute#targetPoolAggregatedList') <NEW_LINE> nextPageToken = messages.StringField(4) <NEW_LINE> selfLink = messages.StringField(5)
A TargetPoolAggregatedList object. Messages: ItemsValue: A map of scoped target pool lists. Fields: id: Unique identifier for the resource; defined by the server (output only). items: A map of scoped target pool lists. kind: Type of resource. nextPageToken: A token used to continue a truncated list request (output only). selfLink: Server defined URL for this resource (output only).
6259905021a7993f00c673fb
class CategorySerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Category <NEW_LINE> fields = ('id', 'name', 'description')
Description: Default Serializers
625990508e7ae83300eea524
class User(Model): <NEW_LINE> <INDENT> id = Column(Integer, primary_key=True) <NEW_LINE> username = Column(String(32), index=True) <NEW_LINE> password_hash = Column(String(64)) <NEW_LINE> email = Column(String(254)) <NEW_LINE> def hash_password(self, password): <NEW_LINE> <INDENT> self.password_hash = pwd_context.encrypt(password) <NEW_LINE> <DEDENT> def verify_password(self, password): <NEW_LINE> <INDENT> return pwd_context.verify(password, self.password_hash)
Registered user information is stored in database.db
62599050d53ae8145f9198f5
class JunitXml(object): <NEW_LINE> <INDENT> def __init__(self, testsuit_name, test_cases, total_tests=None, total_failures=None): <NEW_LINE> <INDENT> self.testsuit_name = testsuit_name <NEW_LINE> self.test_cases = test_cases <NEW_LINE> self.failing_test_cases = self._get_failing_test_cases() <NEW_LINE> self.total_tests = total_tests <NEW_LINE> self.total_failures = total_failures <NEW_LINE> if total_tests is None: <NEW_LINE> <INDENT> self.total_tests = len(self.test_cases) <NEW_LINE> <DEDENT> if total_failures is None: <NEW_LINE> <INDENT> self.total_failures = len(self.failing_test_cases) <NEW_LINE> <DEDENT> self.root = ET.Element("testsuite", { "name": str(self.testsuit_name), "failures": str(self.total_failures), "tests": str(self.total_tests) } ) <NEW_LINE> self.build_junit_xml() <NEW_LINE> <DEDENT> def _get_failing_test_cases(self): <NEW_LINE> <INDENT> return set([case for case in self.test_cases if case.is_failure()]) <NEW_LINE> <DEDENT> def build_junit_xml(self): <NEW_LINE> <INDENT> for case in self.test_cases: <NEW_LINE> <INDENT> test_case_element = ET.SubElement( self.root, "testcase", {"name": str(case.name)}) <NEW_LINE> if case.is_failure(): <NEW_LINE> <INDENT> failure_element = ET.Element("failure") <NEW_LINE> failure_element.text = case.contents <NEW_LINE> test_case_element.append(failure_element) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def dump(self, pretty=True): <NEW_LINE> <INDENT> out = ET.tostring(self.root) <NEW_LINE> if pretty: <NEW_LINE> <INDENT> dom = xml.dom.minidom.parseString(out) <NEW_LINE> out = dom.toprettyxml() <NEW_LINE> <DEDENT> return out
A class which is designed to create a junit test xml file. Note: currently this class is designed to return the junit xml file in a string format (through the dump method).
62599050004d5f362081fa32
class UrlTableTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_insert_url(self): <NEW_LINE> <INDENT> url1 = "http://pycm.baidu.com:8081" <NEW_LINE> url_table_obj = url_table.UrlTable() <NEW_LINE> url_table_obj.insert_url(url1) <NEW_LINE> result = url_table_obj.insert_url(url1) <NEW_LINE> assert result is False
url table class test
625990504428ac0f6e6599c6
class PropertiedClassWithDjango(db.PropertiedClass): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> if name == 'BaseModel': <NEW_LINE> <INDENT> return super(PropertiedClassWithDjango, cls).__new__(cls, name, bases, attrs) <NEW_LINE> <DEDENT> new_class = super(PropertiedClassWithDjango, cls).__new__(cls, name, bases, attrs) <NEW_LINE> new_class._meta = ModelOptions(new_class) <NEW_LINE> new_class.objects = ModelManager(new_class) <NEW_LINE> new_class._default_manager = new_class.objects <NEW_LINE> new_class.DoesNotExist = types.ClassType('DoesNotExist', (ObjectDoesNotExist,), {}) <NEW_LINE> m = get_model(new_class._meta.app_label, name, False) <NEW_LINE> if m: <NEW_LINE> <INDENT> return m <NEW_LINE> <DEDENT> register_models(new_class._meta.app_label, new_class) <NEW_LINE> return get_model(new_class._meta.app_label, name, False) <NEW_LINE> <DEDENT> def __init__(cls, name, bases, attrs): <NEW_LINE> <INDENT> super(PropertiedClassWithDjango, cls).__init__(name, bases, attrs) <NEW_LINE> if name == 'BaseModel': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> fields = [PropertyWrapper(p) for p in cls._properties.values()] <NEW_LINE> if VERSION >= (0, 97, None): <NEW_LINE> <INDENT> cls._meta.local_fields = fields <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls._meta.fields = fields
Metaclass for the combined Django + App Engine model class. This metaclass inherits from db.PropertiedClass in the appengine library. This metaclass has two additional purposes: 1) Register each model class created with Django (the parent class will take care of registering it with the appengine libraries). 2) Add the (minimum number) of attributes and methods to make Django believe the class is a normal Django model. The resulting classes are still not generally useful as Django classes and are intended to be used by Django only in limited situations such as loading and dumping fixtures.
6259905026068e7796d4ddd7
class TestScriptHelperEnvironment(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.assertTrue( hasattr(script_helper, '__cached_interp_requires_environment')) <NEW_LINE> script_helper.__dict__['__cached_interp_requires_environment'] = None <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> script_helper.__dict__['__cached_interp_requires_environment'] = None <NEW_LINE> <DEDENT> @unittest.skipUnless(hasattr(subprocess, 'check_call'), 'test needs subprocess.check_call()') <NEW_LINE> @mock.patch('subprocess.check_call') <NEW_LINE> def test_interpreter_requires_environment_true(self, mock_check_call): <NEW_LINE> <INDENT> mock_check_call.side_effect = subprocess.CalledProcessError('', '') <NEW_LINE> self.assertTrue(script_helper.interpreter_requires_environment()) <NEW_LINE> self.assertTrue(script_helper.interpreter_requires_environment()) <NEW_LINE> self.assertEqual(1, mock_check_call.call_count) <NEW_LINE> <DEDENT> @unittest.skipUnless(hasattr(subprocess, 'check_call'), 'test needs subprocess.check_call()') <NEW_LINE> @mock.patch('subprocess.check_call') <NEW_LINE> def test_interpreter_requires_environment_false(self, mock_check_call): <NEW_LINE> <INDENT> script_helper.interpreter_requires_environment() <NEW_LINE> self.assertFalse(script_helper.interpreter_requires_environment()) <NEW_LINE> self.assertEqual(1, mock_check_call.call_count) <NEW_LINE> <DEDENT> @unittest.skipUnless(hasattr(subprocess, 'check_call'), 'test needs subprocess.check_call()') <NEW_LINE> @mock.patch('subprocess.check_call') <NEW_LINE> def test_interpreter_requires_environment_details(self, mock_check_call): <NEW_LINE> <INDENT> script_helper.interpreter_requires_environment() <NEW_LINE> self.assertFalse(script_helper.interpreter_requires_environment()) <NEW_LINE> self.assertFalse(script_helper.interpreter_requires_environment()) <NEW_LINE> self.assertEqual(1, mock_check_call.call_count) <NEW_LINE> check_call_command = mock_check_call.call_args[0][0] <NEW_LINE> self.assertEqual(sys.executable, check_call_command[0]) <NEW_LINE> self.assertIn('-E', check_call_command)
Code coverage for interpreter_requires_environment().
625990506fece00bbaccce4e
class InstanceGroupManager(_messages.Message): <NEW_LINE> <INDENT> baseInstanceName = _messages.StringField(1) <NEW_LINE> creationTimestamp = _messages.StringField(2) <NEW_LINE> currentActions = _messages.MessageField('InstanceGroupManagerActionsSummary', 3) <NEW_LINE> description = _messages.StringField(4) <NEW_LINE> fingerprint = _messages.BytesField(5) <NEW_LINE> id = _messages.IntegerField(6, variant=_messages.Variant.UINT64) <NEW_LINE> instanceGroup = _messages.StringField(7) <NEW_LINE> instanceTemplate = _messages.StringField(8) <NEW_LINE> kind = _messages.StringField(9, default=u'compute#instanceGroupManager') <NEW_LINE> name = _messages.StringField(10) <NEW_LINE> namedPorts = _messages.MessageField('NamedPort', 11, repeated=True) <NEW_LINE> region = _messages.StringField(12) <NEW_LINE> selfLink = _messages.StringField(13) <NEW_LINE> targetPools = _messages.StringField(14, repeated=True) <NEW_LINE> targetSize = _messages.IntegerField(15, variant=_messages.Variant.INT32) <NEW_LINE> zone = _messages.StringField(16)
An Instance Group Manager resource. Fields: baseInstanceName: The base instance name to use for instances in this group. The value must be 1-58 characters long. Instances are named by appending a hyphen and a random four-character string to the base instance name. The base instance name must comply with RFC1035. creationTimestamp: [Output Only] The creation timestamp for this managed instance group in RFC3339 text format. currentActions: [Output Only] The list of instance actions and the number of instances in this managed instance group that are scheduled for each of those actions. description: An optional description of this resource. Provide this property when you create the resource. fingerprint: [Output Only] The fingerprint of the resource data. You can use this optional field for optimistic locking when you update the resource. id: [Output Only] A unique identifier for this resource type. The server generates this identifier. instanceGroup: [Output Only] The URL of the Instance Group resource. instanceTemplate: The URL of the instance template that is specified for this managed instance group. The group uses this template to create all new instances in the managed instance group. kind: [Output Only] The resource type, which is always compute#instanceGroupManager for managed instance groups. name: The name of the managed instance group. The name must be 1-63 characters long, and comply with RFC1035. namedPorts: Named ports configured for the Instance Groups complementary to this Instance Group Manager. region: [Output Only] The URL of the region where the managed instance group resides (for regional resources). selfLink: [Output Only] The URL for this managed instance group. The server defines this URL. targetPools: The URLs for all TargetPool resources to which instances in the instanceGroup field are added. The target pools automatically apply to all of the instances in the managed instance group. targetSize: The target number of running instances for this managed instance group. Deleting or abandoning instances reduces this number. Resizing the group changes this number. zone: [Output Only] The URL of the zone where the managed instance group is located (for zonal resources).
6259905007f4c71912bb08c9
class ChatServicer(bolthole_pb2_grpc.ChatServicer): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__queues__ = [] <NEW_LINE> self.__users__ = {} <NEW_LINE> <DEDENT> def GetMessage(self, request, context): <NEW_LINE> <INDENT> if not request.user.id: <NEW_LINE> <INDENT> context.set_details('bad argument user id') <NEW_LINE> context.set_code(grpc.StatusCode.INVALID_ARGUMENT) <NEW_LINE> return None <NEW_LINE> <DEDENT> if not request.user.name: <NEW_LINE> <INDENT> context.set_details('bad argument user name') <NEW_LINE> context.set_code(grpc.StatusCode.INVALID_ARGUMENT) <NEW_LINE> return None <NEW_LINE> <DEDENT> if self.__users__.get(request.user.id): <NEW_LINE> <INDENT> context.set_details('user id already in use') <NEW_LINE> context.set_code(grpc.StatusCode.ALREADY_EXISTS) <NEW_LINE> return None <NEW_LINE> <DEDENT> user = User(request.user.id, request.user.name) <NEW_LINE> self.__users__[request.user.id] = user <NEW_LINE> messages = queue.Queue(maxsize=__MAX_QUEUE_SIZE__) <NEW_LINE> self.__queues__.append(messages) <NEW_LINE> print("connected new user '%s'" % request.user.id) <NEW_LINE> while context.is_active(): <NEW_LINE> <INDENT> message = messages.get() <NEW_LINE> yield bolthole_pb2.Message( id=message.user.id, content=message.content, ) <NEW_LINE> <DEDENT> print("client disconnected") <NEW_LINE> del self.__users__[request.user.id] <NEW_LINE> self.__queues__.remove(messages) <NEW_LINE> return None <NEW_LINE> <DEDENT> def SendMessage(self, request, context): <NEW_LINE> <INDENT> if not request.id: <NEW_LINE> <INDENT> context.set_details('bad argument id') <NEW_LINE> context.set_code(grpc.StatusCode.INVALID_ARGUMENT) <NEW_LINE> return None <NEW_LINE> <DEDENT> if not request.content: <NEW_LINE> <INDENT> context.set_details('bad argument content') <NEW_LINE> context.set_code(grpc.StatusCode.INVALID_ARGUMENT) <NEW_LINE> return None <NEW_LINE> <DEDENT> user = self.__users__.get(request.id) <NEW_LINE> if not user: <NEW_LINE> <INDENT> context.set_details('unknown user') <NEW_LINE> context.set_code(grpc.StatusCode.NOT_FOUND) <NEW_LINE> return None <NEW_LINE> <DEDENT> message = Message(user, request.content) <NEW_LINE> for queue in self.__queues__: <NEW_LINE> <INDENT> queue.put(message) <NEW_LINE> <DEDENT> return Empty()
Class to handle the Chat service implementation Attributes ---------- __queues__ : list with users chat history queues __users__ : dictionary with users Methods ------- __init__(self) : Constructor GetMessage(self, request, context) : Handles the GetMessage
6259905030dc7b76659a0cc6
class TestVertexShard(unittest.TestCase): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def setup_class(self): <NEW_LINE> <INDENT> if _cfg.server_backend == 'cassandra': <NEW_LINE> <INDENT> clear_graph() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Gremlin().gremlin_post('graph.truncateBackend();') <NEW_LINE> <DEDENT> InsertData(gremlin='gremlin_hlm.txt').gremlin_graph() <NEW_LINE> <DEDENT> def test_reqiured_params(self): <NEW_LINE> <INDENT> json = {'split_size': 1048576} <NEW_LINE> code, res = Traverser().get_vertex_shard(json, auth=auth) <NEW_LINE> print(code, res) <NEW_LINE> self.assertEqual(code, 200) <NEW_LINE> self.assertEqual(res, {'shards': [{'start': 'hzE65Y+y5YCZ', 'end': 'ijI66LW15aeo5aiYAA==', 'length': 0}]})
通过指定的分片大小split_size,获取顶点分片信息(可以与 Scan 配合使用来获取顶点)。
625990508a43f66fc4bf362c
class AdaDelta(StepStrategy): <NEW_LINE> <INDENT> def __init__(self, rho=0.95, eps=1.0e-6): <NEW_LINE> <INDENT> assert eps > 0, 'eps must be positive.' <NEW_LINE> assert 0 < rho < 1, 'rho must be strictly between 0 and 1.' <NEW_LINE> self.eps = eps <NEW_LINE> self.rho = rho <NEW_LINE> <DEDENT> def updates(self, parms, grads): <NEW_LINE> <INDENT> acc_gs = [theano.shared(np.zeros_like(p.get_value(borrow=True)), borrow=True) for p in parms] <NEW_LINE> acc_ds = [theano.shared(np.zeros_like(p.get_value(borrow=True)), borrow=True) for p in parms] <NEW_LINE> new_acc_gs = [self.rho * ag + (1-self.rho) * g**2 for g, ag in zip(grads, acc_gs)] <NEW_LINE> ds = [tt.sqrt((ad + self.eps) / (ag + self.eps)) * g for g, ag, ad in zip(grads, new_acc_gs, acc_ds)] <NEW_LINE> new_acc_ds = [self.rho * ad + (1-self.rho) * d**2 for d, ad in zip(ds, acc_ds)] <NEW_LINE> new_parms = [p - d for p, d in zip(parms, ds)] <NEW_LINE> return list(zip(acc_gs, new_acc_gs)) + list(zip(acc_ds, new_acc_ds)) + list(zip(parms, new_parms))
ADADELTA step size strategy. For details, see: M. D. Zeiler, "ADADELTA: An adaptive learning rate method", arXiv, 2012.
62599050498bea3a75a58fb5
class Crypt(Transformer): <NEW_LINE> <INDENT> _salt = None <NEW_LINE> def __init__(self, salt=None): <NEW_LINE> <INDENT> Transformer.__init__(self) <NEW_LINE> self._salt = salt <NEW_LINE> <DEDENT> def realEncode(self, data): <NEW_LINE> <INDENT> if self._salt is None: <NEW_LINE> <INDENT> return crypt(data, data[:2]) <NEW_LINE> <DEDENT> return crypt(data, self._salt)
UNIX style crypt. If no salt is specified will use first two chars of data, ala pwd style.
625990508e71fb1e983bcf59
class ManPageCliTreeGenerator(CliTreeGenerator): <NEW_LINE> <INDENT> def Run(self, cmd): <NEW_LINE> <INDENT> return subprocess.check_output(cmd) <NEW_LINE> <DEDENT> def GetVersion(self): <NEW_LINE> <INDENT> return 'MAN(1)' <NEW_LINE> <DEDENT> def AddFlags(self, command, content, is_global=False): <NEW_LINE> <INDENT> def _Add(name, description, category): <NEW_LINE> <INDENT> if '=' in name: <NEW_LINE> <INDENT> name, value = name.split('=', 1) <NEW_LINE> type_ = 'string' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = '' <NEW_LINE> type_ = 'bool' <NEW_LINE> <DEDENT> default = '' <NEW_LINE> command[cli_tree.LOOKUP_FLAGS][name] = _Flag( name=name, description='\n'.join(description), type_=type_, value=value, category=category, default=default, is_required=False, is_global=is_global, ) <NEW_LINE> <DEDENT> names = [] <NEW_LINE> description = [] <NEW_LINE> category = '' <NEW_LINE> for line in content: <NEW_LINE> <INDENT> if line.lstrip().startswith('-'): <NEW_LINE> <INDENT> for name in names: <NEW_LINE> <INDENT> _Add(name, description, category) <NEW_LINE> <DEDENT> line = line.lstrip() <NEW_LINE> names = line.strip().replace(', -', ', --').split(', -') <NEW_LINE> if ' ' in names[-1]: <NEW_LINE> <INDENT> names[-1], text = names[-1].split(' ', 1) <NEW_LINE> description = [text.strip()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> description = [] <NEW_LINE> <DEDENT> <DEDENT> elif line.startswith('### '): <NEW_LINE> <INDENT> category = line[4:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> description.append(line) <NEW_LINE> <DEDENT> <DEDENT> for name in names: <NEW_LINE> <INDENT> _Add(name, description, category) <NEW_LINE> <DEDENT> <DEDENT> def SubTree(self, path): <NEW_LINE> <INDENT> command = _Command(path) <NEW_LINE> text = self.Run(['man'] + path) <NEW_LINE> collector = _ManPageCollector(text) <NEW_LINE> while True: <NEW_LINE> <INDENT> heading, content = collector.Collect() <NEW_LINE> if not heading: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> elif heading == 'NAME': <NEW_LINE> <INDENT> if content: <NEW_LINE> <INDENT> command[cli_tree.LOOKUP_CAPSULE] = content[0].split('-', 1)[1].strip() <NEW_LINE> <DEDENT> <DEDENT> elif heading == 'FLAGS': <NEW_LINE> <INDENT> self.AddFlags(command, content) <NEW_LINE> <DEDENT> elif heading in ('DESCRIPTION', 'SEE ALSO'): <NEW_LINE> <INDENT> text = _NormalizeSpace('\n'.join(content)) <NEW_LINE> if heading in command[cli_tree.LOOKUP_SECTIONS]: <NEW_LINE> <INDENT> command[cli_tree.LOOKUP_SECTIONS][heading] += '\n\n' + text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command[cli_tree.LOOKUP_SECTIONS][heading] = text <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return command <NEW_LINE> <DEDENT> def GenerateTree(self): <NEW_LINE> <INDENT> tree = self.SubTree([self.cli_name]) <NEW_LINE> tree[cli_tree.LOOKUP_CLI_VERSION] = self.GetVersion() <NEW_LINE> tree[cli_tree.LOOKUP_VERSION] = cli_tree.VERSION <NEW_LINE> return tree
man page CLI tree generator.
62599050a79ad1619776b506
class GenericValidator(BaseValidator): <NEW_LINE> <INDENT> __slots__ = ('base_schema') <NEW_LINE> _diagnostic = ( 'Ensure that each document has a metadata, schema and data section. ' 'Each document must pass the schema defined under: ' 'https://airship-deckhand.readthedocs.io/en/latest/' 'validation.html#base-schema') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(GenericValidator, self).__init__() <NEW_LINE> self.base_schema = self._schema_map['v1']['deckhand/Base'] <NEW_LINE> <DEDENT> def validate_metadata(self, metadata): <NEW_LINE> <INDENT> errors = list() <NEW_LINE> schema_name, schema_ver = _get_schema_parts(metadata) <NEW_LINE> schema = self._schema_map.get(schema_ver, {}).get(schema_name, {}) <NEW_LINE> if not schema: <NEW_LINE> <INDENT> return ['Invalid metadata schema %s version %s specified.' % (schema_name, schema_ver)] <NEW_LINE> <DEDENT> LOG.debug("Validating document metadata with schema %s/%s.", schema_name, schema_ver) <NEW_LINE> jsonschema.Draft4Validator.check_schema(schema) <NEW_LINE> schema_validator = jsonschema.Draft4Validator(schema) <NEW_LINE> errors.extend([e.message for e in schema_validator.iter_errors(metadata)]) <NEW_LINE> return errors <NEW_LINE> <DEDENT> def validate(self, document, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> jsonschema.Draft4Validator.check_schema(self.base_schema) <NEW_LINE> schema_validator = jsonschema.Draft4Validator(self.base_schema) <NEW_LINE> error_messages = [ e.message for e in schema_validator.iter_errors(document)] <NEW_LINE> if not error_messages: <NEW_LINE> <INDENT> error_messages.extend( self.validate_metadata(document.metadata)) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise RuntimeError( 'Unknown error occurred while attempting to use Deckhand ' 'schema. Details: %s' % six.text_type(e)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if error_messages: <NEW_LINE> <INDENT> LOG.error( 'Failed sanity-check validation for document [%s, %s] %s. ' 'Details: %s', document.schema, document.layer, document.name, error_messages) <NEW_LINE> raise errors.InvalidDocumentFormat( error_list=[ vm.ValidationMessage( message=message, name=vm.DOCUMENT_SANITY_CHECK_FAILURE, doc_schema=document.schema, doc_name=document.name, doc_layer=document.layer, diagnostic=self._diagnostic) for message in error_messages ], reason='Validation' )
Validator used for validating all documents, regardless whether concrete or abstract, or what version its schema is.
625990508e71fb1e983bcf5a
class PolyFitter(Fitter): <NEW_LINE> <INDENT> def __init__(self, x, y, degree): <NEW_LINE> <INDENT> Fitter.__init__(self, x, y) <NEW_LINE> coeffs = numpy.polyfit(x, y, degree) <NEW_LINE> repr_str = str(numpy.poly1d(coeffs)) <NEW_LINE> values = numpy.polyval(coeffs, self.x) <NEW_LINE> self._fill_cache(0, coeffs, values, repr_str) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.data_cache[0]['repr'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def degree(self): <NEW_LINE> <INDENT> return len(self.data_cache[0]['obj']) - 1 <NEW_LINE> <DEDENT> def _derivate(self, n): <NEW_LINE> <INDENT> if n not in self.data_cache: <NEW_LINE> <INDENT> m = max((d for d in self.data_cache if d < n)) <NEW_LINE> coeffs_m = self.data_cache[m]['obj'] <NEW_LINE> coeffs = numpy.polyder(coeffs_m, n - m) <NEW_LINE> repr_str = str(numpy.poly1d(coeffs)) <NEW_LINE> values = numpy.polyval(coeffs, self.x) <NEW_LINE> self._fill_cache(n, coeffs, values, repr_str) <NEW_LINE> <DEDENT> <DEDENT> def get_inflec_points(self): <NEW_LINE> <INDENT> coeffs = self.data_cache[0]['obj'] <NEW_LINE> self._derivate(1) <NEW_LINE> coeffs1 = self.data_cache[1]['obj'] <NEW_LINE> self._derivate(2) <NEW_LINE> coeffs2 = self.data_cache[2]['obj'] <NEW_LINE> self._derivate(3) <NEW_LINE> coeffs3 = self.data_cache[3]['obj'] <NEW_LINE> inflec_points = [float(x_val) for x_val in sorted(numpy.roots(coeffs2)) if x_val.imag == 0 and numpy.polyval(coeffs3, float(x_val)) != 0 and self.x[0] <= float(x_val) <= self.x[-1]] <NEW_LINE> return [(x_point, numpy.polyval(coeffs, x_point), numpy.polyval(coeffs1, x_point)) for x_point in inflec_points]
Class for polynomial curve fitting.
6259905076e4537e8c3f0a1b
class CompileBbcode(PageCompiler): <NEW_LINE> <INDENT> name = "bbcode" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> if bbcode is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.parser = bbcode.Parser() <NEW_LINE> self.parser.add_simple_formatter("note", "") <NEW_LINE> <DEDENT> def compile(self, source, dest, is_two_file=True, post=None, lang=None): <NEW_LINE> <INDENT> if bbcode is None: <NEW_LINE> <INDENT> req_missing(['bbcode'], 'build this site (compile BBCode)') <NEW_LINE> <DEDENT> makedirs(os.path.dirname(dest)) <NEW_LINE> with codecs.open(dest, "w+", "utf8") as out_file: <NEW_LINE> <INDENT> with codecs.open(source, "r", "utf8") as in_file: <NEW_LINE> <INDENT> data = in_file.read() <NEW_LINE> <DEDENT> if not is_two_file: <NEW_LINE> <INDENT> data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1] <NEW_LINE> <DEDENT> output = self.parser.format(data) <NEW_LINE> output, shortcode_deps = self.site.apply_shortcodes(output, filename=source, with_dependencies=True, extra_context=dict(post=post)) <NEW_LINE> out_file.write(output) <NEW_LINE> <DEDENT> if post is None: <NEW_LINE> <INDENT> if shortcode_deps: <NEW_LINE> <INDENT> self.logger.error( "Cannot save dependencies for post {0} (post unknown)", source) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> post._depfile[dest] += shortcode_deps <NEW_LINE> <DEDENT> <DEDENT> def compile_html(self, source, dest, is_two_file=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> post = self.site.post_per_input_file[source] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> post = None <NEW_LINE> <DEDENT> return compile(source, dest, is_two_file, post, None) <NEW_LINE> <DEDENT> def create_post(self, path, **kw): <NEW_LINE> <INDENT> content = kw.pop('content', 'Write your post here.') <NEW_LINE> onefile = kw.pop('onefile', False) <NEW_LINE> kw.pop('is_page', False) <NEW_LINE> metadata = OrderedDict() <NEW_LINE> metadata.update(self.default_metadata) <NEW_LINE> metadata.update(kw) <NEW_LINE> makedirs(os.path.dirname(path)) <NEW_LINE> if not content.endswith('\n'): <NEW_LINE> <INDENT> content += '\n' <NEW_LINE> <DEDENT> with codecs.open(path, "wb+", "utf8") as fd: <NEW_LINE> <INDENT> if onefile: <NEW_LINE> <INDENT> fd.write('[note]<!--\n') <NEW_LINE> fd.write(write_metadata(metadata)) <NEW_LINE> fd.write('-->[/note]\n\n') <NEW_LINE> <DEDENT> fd.write(content)
Compile bbcode into HTML.
6259905099cbb53fe683237a
class TeamsAPI(object): <NEW_LINE> <INDENT> def __init__(self, session): <NEW_LINE> <INDENT> assert isinstance(session, RestSession) <NEW_LINE> super(TeamsAPI, self).__init__() <NEW_LINE> self.session = session <NEW_LINE> <DEDENT> @generator_container <NEW_LINE> def list(self, max=None): <NEW_LINE> <INDENT> assert max is None or isinstance(max, int) <NEW_LINE> params = {} <NEW_LINE> if max: <NEW_LINE> <INDENT> params[u'max'] = max <NEW_LINE> <DEDENT> items = self.session.get_items('teams', params=params) <NEW_LINE> for item in items: <NEW_LINE> <INDENT> yield Team(item) <NEW_LINE> <DEDENT> <DEDENT> def create(self, name): <NEW_LINE> <INDENT> assert isinstance(name, basestring) <NEW_LINE> post_data = {} <NEW_LINE> post_data[u'name'] = utf8(name) <NEW_LINE> json_obj = self.session.post('teams', json=post_data) <NEW_LINE> return Team(json_obj) <NEW_LINE> <DEDENT> def get(self, teamId): <NEW_LINE> <INDENT> assert isinstance(teamId, basestring) <NEW_LINE> json_obj = self.session.get('teams/'+teamId) <NEW_LINE> return Team(json_obj) <NEW_LINE> <DEDENT> def update(self, teamId, **update_attributes): <NEW_LINE> <INDENT> assert isinstance(teamId, basestring) <NEW_LINE> if not update_attributes: <NEW_LINE> <INDENT> error_message = "At least one **update_attributes keyword " "argument must be specified." <NEW_LINE> raise ciscosparkapiException(error_message) <NEW_LINE> <DEDENT> put_data = {} <NEW_LINE> for param, value in update_attributes.items(): <NEW_LINE> <INDENT> if isinstance(value, basestring): <NEW_LINE> <INDENT> value = utf8(value) <NEW_LINE> <DEDENT> put_data[utf8(param)] = value <NEW_LINE> <DEDENT> json_obj = self.session.post('teams/'+teamId, json=put_data) <NEW_LINE> return Team(json_obj) <NEW_LINE> <DEDENT> def delete(self, teamId): <NEW_LINE> <INDENT> assert isinstance(teamId, basestring) <NEW_LINE> self.session.delete('teams/'+teamId)
Cisco Spark Teams-API wrapper class. Wrappers the Cisco Spark Teams-API and exposes the API calls as Python method calls that return native Python objects. Attributes: session(RestSession): The RESTful session object to be used for API calls to the Cisco Spark service.
62599050596a897236128ff9
@abstract <NEW_LINE> class RedefinableElement(_user_module.RedefinableElementMixin, NamedElement): <NEW_LINE> <INDENT> isLeaf = EAttribute(eType=Boolean, derived=False, changeable=True, default_value=False) <NEW_LINE> redefinedElement = EReference( ordered=False, unique=True, containment=False, derived=True, upper=-1, transient=True, derived_class=_user_module.DerivedRedefinedelement) <NEW_LINE> redefinitionContext = EReference( ordered=False, unique=True, containment=False, derived=True, upper=-1, transient=True, derived_class=_user_module.DerivedRedefinitioncontext) <NEW_LINE> def __init__(self, isLeaf=None, redefinedElement=None, redefinitionContext=None, **kwargs): <NEW_LINE> <INDENT> super(RedefinableElement, self).__init__(**kwargs) <NEW_LINE> if isLeaf is not None: <NEW_LINE> <INDENT> self.isLeaf = isLeaf <NEW_LINE> <DEDENT> if redefinedElement: <NEW_LINE> <INDENT> self.redefinedElement.extend(redefinedElement) <NEW_LINE> <DEDENT> if redefinitionContext: <NEW_LINE> <INDENT> self.redefinitionContext.extend(redefinitionContext)
A RedefinableElement is an element that, when defined in the context of a Classifier, can be redefined more specifically or differently in the context of another Classifier that specializes (directly or indirectly) the context Classifier. <p>From package UML::Classification.</p>
62599050379a373c97d9a4c0
class DailyActivity(Resource): <NEW_LINE> <INDENT> def create_transaction(self, user_id, access_token): <NEW_LINE> <INDENT> response = self._post(endpoint="/users/{}/activity-transactions".format(user_id), access_token=access_token) <NEW_LINE> if not response: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return DailyActivityTransaction(oauth=self.oauth, transaction_url=response["resource-uri"], user_id=user_id, access_token=access_token)
This resource allows partners to access their users' daily activity data. https://www.polar.com/accesslink-api/?http#daily-activity
62599050ac7a0e7691f73970
class HTTPException(Exception): <NEW_LINE> <INDENT> def __init__(self, reference, http_status_code, url, method_name, message): <NEW_LINE> <INDENT> self.reference = reference <NEW_LINE> self.url = url <NEW_LINE> self.method_name = method_name <NEW_LINE> self.http_status_code = http_status_code <NEW_LINE> self.msg = message <NEW_LINE> Exception.__init__(self, self.reference, self.http_status_code, self.url, self.msg) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"Calling method {self.method_name}() {self.url} returned HTTP {self.http_status_code}. {self.msg}"
Custom Exception non 404 errors
62599050e5267d203ee6cd80