code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class MetaWinnerFiniteMemory(MetaWinner): <NEW_LINE> <INDENT> name = "Meta Winner Finite Memory" <NEW_LINE> @init_args <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> team = [s for s in ordinary_strategies if s().classifier['memory_depth'] < float('inf')] <NEW_LINE> super(MetaWinnerFiniteMemory, self).__init__(team=team) | MetaWinner with the team of Finite Memory Players | 62599036507cdc57c63a5eef |
class Sender(object): <NEW_LINE> <INDENT> def __init__(self, FD_, FDD_, FC_, N_, SPEED_, A_NOISE_, A_SIGNAL_): <NEW_LINE> <INDENT> self.source_sequence = [] <NEW_LINE> self.source_signal = [] <NEW_LINE> self.encoded_signal = [] <NEW_LINE> self.noise = [] <NEW_LINE> self.ASK = [] <NEW_LINE> self.noise_ASK = [] <NEW_LINE> self.FD = FD_ <NEW_LINE> self.FDD = FDD_ <NEW_LINE> self.FC = FC_ <NEW_LINE> self.N = N_ <NEW_LINE> self.SPEED = SPEED_ <NEW_LINE> self.duration = 1 / self.SPEED <NEW_LINE> self.time_signal = self.N * self.duration <NEW_LINE> self.Wc = 2 * math.pi * self.FC <NEW_LINE> self.A_NOISE = A_NOISE_ <NEW_LINE> self.A_SIGNAL = A_SIGNAL_ <NEW_LINE> <DEDENT> def _coder_hamming(self, code): <NEW_LINE> <INDENT> encoded_sequence = [((code[0] + code[1]) % 2 + code[3]) % 2, ((code[0] + code[2]) % 2 + code[3]) % 2, code[0], ((code[1] + code[2]) % 2 + code[3]) % 2, code[1], code[2], code[3]] <NEW_LINE> return encoded_sequence <NEW_LINE> <DEDENT> def generate_signal(self): <NEW_LINE> <INDENT> self.source_sequence = [random.randint(0, 2) for x in range(0, self.N)] <NEW_LINE> self.source_signal = [] <NEW_LINE> for x in range(0, self.N): <NEW_LINE> <INDENT> self.source_signal += [self.source_sequence[x] for y in arange(0, self.duration, (1.0 / self.FDD))] <NEW_LINE> <DEDENT> <DEDENT> def encode_signal(self): <NEW_LINE> <INDENT> if len(self.source_sequence) % 4 != 0: <NEW_LINE> <INDENT> for x in xrange(len(self.source_sequence) - 4 * (len(self.source_sequence) / 4)): <NEW_LINE> <INDENT> self.source_sequence += [1] <NEW_LINE> <DEDENT> <DEDENT> for x in xrange(len(self.source_sequence) / 4): <NEW_LINE> <INDENT> self.encoded_signal += self._coder_hamming(self.source_sequence[(x * 4): ((x + 1) * 4)]) <NEW_LINE> <DEDENT> <DEDENT> def genetare_noise(self): <NEW_LINE> <INDENT> for x in xrange(0, len(self.encoded_signal)): <NEW_LINE> <INDENT> self.noise += [self.A_NOISE * random.uniform(-1.0, 1.0) for x in arange(0, self.duration, (1.0 / self.FD))] <NEW_LINE> <DEDENT> <DEDENT> def modulate_signal(self): <NEW_LINE> <INDENT> for x in xrange(0, len(self.encoded_signal)): <NEW_LINE> <INDENT> self.ASK += [self.A_SIGNAL * self.encoded_signal[x] * math.sin(self.Wc * t) for t in arange(0, self.duration, (1.0 / self.FD))] <NEW_LINE> <DEDENT> self.noise_ASK = [(self.noise[x] + self.ASK[x]) / 2 for x in xrange(len(self.encoded_signal) * int(self.duration * self.FD))] | Класс имитирует передатчик и реализует атрибуты-методы,
которые существуют в реальной системе | 6259903666673b3332c31549 |
class RecursiveTestFlow(flow.GRRFlow): <NEW_LINE> <INDENT> args_type = RecursiveTestFlowArgs <NEW_LINE> @flow.StateHandler(next_state="End") <NEW_LINE> def Start(self): <NEW_LINE> <INDENT> if self.args.depth < 2: <NEW_LINE> <INDENT> for _ in range(2): <NEW_LINE> <INDENT> self.CallFlow("RecursiveTestFlow", depth=self.args.depth+1, next_state="End") | A test flow which starts some subflows. | 625990366fece00bbacccb00 |
class Geometry(object): <NEW_LINE> <INDENT> def __init__(self, geom_type, coords): <NEW_LINE> <INDENT> self.geom_type = geom_type <NEW_LINE> coords = coords | A basic geometry class for ``GeoFeedMixin``.
Instances have two public attributes:
.. attribute:: geom_type
"point", "linestring", "linearring", "polygon"
.. attribute:: coords
For **point**, a tuple or list of two floats: ``(X, Y)``.
For **linestring** or **linearring**, a string: ``"X0 Y0 X1 Y1 ..."``.
For **polygon**, a list of strings: ``["X0 Y0 X1 Y1 ..."]``. Only the
first element is used because the Geo classes support only the exterior
ring.
The constructor does not check its argument types.
This class was created for WebHelpers based on the interface expected by
``GeoFeedMixin.add_georss_element()``. The class is untested. Please send
us feedback on whether it works for you. | 625990363eb6a72ae038b7bd |
class RegistrationChart(BaseChart): <NEW_LINE> <INDENT> queryset = get_user_model().objects.all() <NEW_LINE> date_field = 'date_joined' | Dashboard module with user registration charts.
With default values it is suited best for 2-column dashboard layouts. | 625990361d351010ab8f4c70 |
class Comment(db.Model): <NEW_LINE> <INDENT> __tablename__ = "comment" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False) <NEW_LINE> news_id = db.Column(db.Integer, db.ForeignKey("news.id"), nullable=False) <NEW_LINE> content = db.Column(db.Text, nullable=False) <NEW_LINE> create_time = db.Column(db.DateTime, default=datetime.now) <NEW_LINE> update_time = db.Column(db.DateTime, default=datetime.now) <NEW_LINE> user = db.relationship("User", backref="comments") | 评论 | 62599036ac7a0e7691f7363e |
class AlgorithmProvider(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.activate = True <NEW_LINE> self.actions = [] <NEW_LINE> self.contextMenuActions = [] <NEW_LINE> <DEDENT> def loadAlgorithms(self): <NEW_LINE> <INDENT> self.algs = [] <NEW_LINE> self._loadAlgorithms() <NEW_LINE> for alg in self.algs: <NEW_LINE> <INDENT> alg.provider = self <NEW_LINE> <DEDENT> <DEDENT> def _loadAlgorithms(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def initializeSettings(self): <NEW_LINE> <INDENT> ProcessingConfig.settingIcons[self.getDescription()] = self.getIcon() <NEW_LINE> name = 'ACTIVATE_' + self.getName().upper().replace(' ', '_') <NEW_LINE> ProcessingConfig.addSetting(Setting(self.getDescription(), name, self.tr('Activate'), self.activate)) <NEW_LINE> <DEDENT> def unload(self): <NEW_LINE> <INDENT> name = 'ACTIVATE_' + self.getName().upper().replace(' ', '_') <NEW_LINE> ProcessingConfig.removeSetting(name) <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return 'processing' <NEW_LINE> <DEDENT> def getDescription(self): <NEW_LINE> <INDENT> return self.tr('Generic algorithm provider') <NEW_LINE> <DEDENT> def getIcon(self): <NEW_LINE> <INDENT> return QIcon(os.path.dirname(__file__) + '/../images/alg.png') <NEW_LINE> <DEDENT> def getSupportedOutputRasterLayerExtensions(self): <NEW_LINE> <INDENT> return ['tif'] <NEW_LINE> <DEDENT> def getSupportedOutputVectorLayerExtensions(self): <NEW_LINE> <INDENT> formats = QgsVectorFileWriter.supportedFiltersAndFormats() <NEW_LINE> extensions = ['shp'] <NEW_LINE> for extension in formats.keys(): <NEW_LINE> <INDENT> extension = unicode(extension) <NEW_LINE> extension = extension[extension.find('*.') + 2:] <NEW_LINE> extension = extension[:extension.find(' ')] <NEW_LINE> if extension.lower() != 'shp': <NEW_LINE> <INDENT> extensions.append(extension) <NEW_LINE> <DEDENT> <DEDENT> return extensions <NEW_LINE> <DEDENT> def getSupportedOutputTableExtensions(self): <NEW_LINE> <INDENT> return ['csv'] <NEW_LINE> <DEDENT> def supportsNonFileBasedOutput(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def canBeActivated(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def tr(self, string, context=''): <NEW_LINE> <INDENT> if context == '': <NEW_LINE> <INDENT> context = self.__class__.__name__ <NEW_LINE> <DEDENT> return QCoreApplication.translate(context, string) | This is the base class for algorithms providers.
An algorithm provider is a set of related algorithms, typically
from the same external application or related to a common area
of analysis. | 6259903615baa723494630f0 |
class CheckinRuleCmd(Command): <NEW_LINE> <INDENT> def execute(self): <NEW_LINE> <INDENT> sandbox_dir = "/home/apache/inhance_asset_library" <NEW_LINE> rule_code = "6SIMULATION" <NEW_LINE> rule_code = self.kwargs.get("rule_code") <NEW_LINE> rule = Search.get_by_code("config/ingest_rule", rule_code) <NEW_LINE> cmd = IngestionCmd(rule=rule, session_base_dir=sandbox_dir) <NEW_LINE> cmd.execute() | Command to checkin a rule | 6259903673bcbd0ca4bcb3dd |
class ConvoLayer(nn.Module): <NEW_LINE> <INDENT> def __init__(self, channel_in, channel_out, kernel_size, stride = 1, neg_slope = 0.1): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> padding = (kernel_size-1)//2 <NEW_LINE> self.conv = nn.Conv2d(channel_in, channel_out, kernel_size, stride, padding, bias=False) <NEW_LINE> self.bn = nn.BatchNorm2d(channel_out) <NEW_LINE> self.lrelu = nn.LeakyReLU(neg_slope) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> y = self.conv(x) <NEW_LINE> y = self.bn(y) <NEW_LINE> y = self.lrelu(y) <NEW_LINE> return y | Basic Conv2D layer with few parameters: channels, kernel size
Then batch norm - layer and Leaky ReLu layer as follow
Leaky ReLu has negative slope 0.1 as default | 6259903630dc7b76659a0989 |
class HttpResolver(object): <NEW_LINE> <INDENT> def __init__(self, url_download, dependency, source, cwd): <NEW_LINE> <INDENT> self.url_download = url_download <NEW_LINE> self.dependency = dependency <NEW_LINE> self.source = source <NEW_LINE> self.cwd = cwd <NEW_LINE> <DEDENT> def resolve(self): <NEW_LINE> <INDENT> self.dependency.current_source = self.source <NEW_LINE> source_hash = hashlib.sha1(self.source.encode("utf-8")).hexdigest()[:6] <NEW_LINE> folder_name = "http-" + source_hash <NEW_LINE> folder_path = os.path.join(self.cwd, folder_name) <NEW_LINE> if not os.path.exists(folder_path): <NEW_LINE> <INDENT> os.makedirs(folder_path) <NEW_LINE> <DEDENT> if self.dependency.filename: <NEW_LINE> <INDENT> filename = self.dependency.filename <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filename = None <NEW_LINE> <DEDENT> file_path = self.url_download.download( cwd=folder_path, source=self.source, filename=filename ) <NEW_LINE> assert os.path.isfile(file_path), "We should have a valid path here!" <NEW_LINE> return file_path | Http Resolver functionality. Downloads a file. | 625990368c3a8732951f76af |
class ComputeMachineTypesListRequest(_messages.Message): <NEW_LINE> <INDENT> filter = _messages.StringField(1) <NEW_LINE> maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32, default=500) <NEW_LINE> pageToken = _messages.StringField(3) <NEW_LINE> project = _messages.StringField(4, required=True) <NEW_LINE> zone = _messages.StringField(5, required=True) | A ComputeMachineTypesListRequest object.
Fields:
filter: Sets a filter expression for filtering listed resources, in the
form filter={expression}. Your {expression} must be in the format:
field_name comparison_string literal_string. The field_name is the name
of the field you want to compare. Only atomic field types are supported
(string, number, boolean). The comparison_string must be either eq
(equals) or ne (not equals). The literal_string is the string value to
filter to. The literal value must be valid for the type of field you are
filtering by (string, number, boolean). For string fields, the literal
value is interpreted as a regular expression using RE2 syntax. The
literal value must match the entire field. For example, to filter for
instances that do not have a name of example-instance, you would use
filter=name ne example-instance. Compute Engine Beta API Only: When
filtering in the Beta API, you can also filter on nested fields. For
example, you could filter on instances that have set the
scheduling.automaticRestart field to true. Use filtering on nested
fields to take advantage of labels to organize and search for results
based on label values. The Beta API also supports filtering on multiple
expressions by providing each separate expression within parentheses.
For example, (scheduling.automaticRestart eq true) (zone eq us-
central1-f). Multiple expressions are treated as AND expressions,
meaning that resources must match all expressions to pass the filters.
maxResults: The maximum number of results per page that should be
returned. If the number of available results is larger than maxResults,
Compute Engine returns a nextPageToken that can be used to get the next
page of results in subsequent list requests.
pageToken: Specifies a page token to use. Set pageToken to the
nextPageToken returned by a previous list request to get the next page
of results.
project: Project ID for this request.
zone: The name of the zone for this request. | 6259903626238365f5fadcab |
class LinearInterpolationLanguageModel(TrigramLanguageModel): <NEW_LINE> <INDENT> def __init__(self, sentences, lambda1, lambda2, lambda3, lambda4, lambda5, k_smoothing=0): <NEW_LINE> <INDENT> TrigramLanguageModel.__init__(self, sentences, k_smoothing) <NEW_LINE> self.lambda1 = lambda1 <NEW_LINE> self.lambda2 = lambda2 <NEW_LINE> self.lambda3 = lambda3 <NEW_LINE> self.lambda4 = lambda4 <NEW_LINE> self.lambda5 = lambda5 <NEW_LINE> <DEDENT> def calculate_linear_probability(self, previous_previous_word, previous_word, word): <NEW_LINE> <INDENT> unigram_prob = self.calculate_unigram_probablities(word) <NEW_LINE> bigram_prob = self.calculate_bigram_probability(previous_word, word) <NEW_LINE> trigram_prob = self.calculate_trigram_probability( previous_previous_word, previous_word, word) <NEW_LINE> if (trigram_prob == None or trigram_prob == None): <NEW_LINE> <INDENT> print('Fall back error due to trigram 0') <NEW_LINE> return self.lambda4 * unigram_prob + self.lambda5 * bigram_prob <NEW_LINE> <DEDENT> return self.lambda1 * unigram_prob + self.lambda2 * bigram_prob + self.lambda3 * trigram_prob <NEW_LINE> <DEDENT> def calculate_trigram_sentence_log_probability(self, sentence): <NEW_LINE> <INDENT> sentence_log_probability = 0 <NEW_LINE> previous_previous_word = None <NEW_LINE> previous_word = None <NEW_LINE> for word in sentence: <NEW_LINE> <INDENT> if previous_previous_word != None and previous_word != None: <NEW_LINE> <INDENT> linear_prob = self.calculate_linear_probability( previous_previous_word, previous_word, word ) <NEW_LINE> if linear_prob == 0: <NEW_LINE> <INDENT> return float('-inf') <NEW_LINE> <DEDENT> sentence_log_probability += math.log(linear_prob, 2) <NEW_LINE> <DEDENT> previous_previous_word = previous_word <NEW_LINE> previous_word = word <NEW_LINE> <DEDENT> return sentence_log_probability | @Param: Sentences with each sentence is a list of words
@Param smoothing: Function that do smoothing | 62599036507cdc57c63a5ef1 |
class MessageLayer(object): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> self._parent = parent <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def reliability_response(request, response): <NEW_LINE> <INDENT> if not (response.type == defines.inv_types['ACK'] or response.type == defines.inv_types['RST']): <NEW_LINE> <INDENT> if request.type == defines.inv_types['CON']: <NEW_LINE> <INDENT> if request.acknowledged: <NEW_LINE> <INDENT> response.type = defines.inv_types['CON'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.acknowledged = True <NEW_LINE> response.type = defines.inv_types['ACK'] <NEW_LINE> response.mid = request.mid <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> response.type = defines.inv_types['NON'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> response.mid = request.mid <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> def matcher_response(self, response): <NEW_LINE> <INDENT> if response.mid is None: <NEW_LINE> <INDENT> response.mid = self._parent.current_mid % (1 << 16) <NEW_LINE> self._parent.current_mid += 1 <NEW_LINE> <DEDENT> host, port = response.destination <NEW_LINE> if host is None: <NEW_LINE> <INDENT> raise AttributeError("Response has no destination address set") <NEW_LINE> <DEDENT> if port is None or port == 0: <NEW_LINE> <INDENT> raise AttributeError("Response has no destination port set") <NEW_LINE> <DEDENT> key = hash(str(host) + str(port) + str(response.mid)) <NEW_LINE> self._parent.sent[key] = (response, time.time()) <NEW_LINE> return response <NEW_LINE> <DEDENT> def handle_message(self, message): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> host, port = message.source <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> key = hash(str(host) + str(port) + str(message.mid)) <NEW_LINE> t = self._parent.sent.get(key) <NEW_LINE> if t is None: <NEW_LINE> <INDENT> log.err(defines.types[message.type] + " received without the corresponding message") <NEW_LINE> return <NEW_LINE> <DEDENT> response, timestamp = t <NEW_LINE> if message.type == defines.inv_types['ACK']: <NEW_LINE> <INDENT> response.acknowledged = True <NEW_LINE> <DEDENT> elif message.type == defines.inv_types['RST']: <NEW_LINE> <INDENT> response.rejected = True <NEW_LINE> <DEDENT> if message.type == defines.inv_types['RST']: <NEW_LINE> <INDENT> for resource in self._parent.relation.keys(): <NEW_LINE> <INDENT> host, port = message.source <NEW_LINE> key = hash(str(host) + str(port) + str(response.token)) <NEW_LINE> observers = self._parent.relation.get(resource) <NEW_LINE> if observers is not None: <NEW_LINE> <INDENT> del observers[key] <NEW_LINE> log.msg("Cancel observing relation") <NEW_LINE> if len(observers) == 0: <NEW_LINE> <INDENT> del self._parent.relation[resource] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> log.msg("Cancel retrasmission to:" + host + ":" + str(port)) <NEW_LINE> try: <NEW_LINE> <INDENT> call_id, retrasmission_count = self._parent.call_id.get(key) <NEW_LINE> if call_id is not None: <NEW_LINE> <INDENT> call_id.cancel() <NEW_LINE> <DEDENT> <DEDENT> except AlreadyCancelled: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self._parent.sent[key] = (response, time.time()) | Handles message functionality: Acknowledgment, Reset. | 625990366e29344779b017a9 |
class RandomFlatten(Flatten): <NEW_LINE> <INDENT> def _propose(self, user, categories): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> friend = self._resolve_friend(random.randint(0, self.user2cat.dictionary.index.shape[0]-1)) <NEW_LINE> if friend not in user.friends: <NEW_LINE> <INDENT> user.friends.add(friend) <NEW_LINE> return | Proposes the updated list of followees by randomly adding new ones until old top categories a phased out | 625990368da39b475be04347 |
class Sampler(object): <NEW_LINE> <INDENT> def __init__(self, data_source): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for all Samplers.
Every Sampler subclass has to provide an :meth:`__iter__` method, providing a
way to iterate over indices of dataset elements, and a :meth:`__len__` method
that returns the length of the returned iterators.
.. note:: The :meth:`__len__` method isn't strictly required by
:class:`~torch.utils.data.DataLoader`, but is expected in any
calculation involving the length of a :class:`~torch.utils.data.DataLoader`. | 62599036d18da76e235b79fb |
class OpenedLoanManager(models.Manager): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> return super().get_queryset().filter(loanreturn__isnull=True) | Loan records that has not yet been returned | 62599036c432627299fa4150 |
class ModelTypeOutputOnly(object): <NEW_LINE> <INDENT> swagger_types = { 'external_station_id': 'int' } <NEW_LINE> attribute_map = { 'external_station_id': '_external_station_id' } <NEW_LINE> def __init__(self, external_station_id=None): <NEW_LINE> <INDENT> self._external_station_id = None <NEW_LINE> self.discriminator = None <NEW_LINE> if external_station_id is not None: <NEW_LINE> <INDENT> self.external_station_id = external_station_id <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def external_station_id(self): <NEW_LINE> <INDENT> return self._external_station_id <NEW_LINE> <DEDENT> @external_station_id.setter <NEW_LINE> def external_station_id(self, external_station_id): <NEW_LINE> <INDENT> self._external_station_id = external_station_id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ModelTypeOutputOnly): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599036b57a9660fecd2bd4 |
class AttentionalWeights(nn.Module): <NEW_LINE> <INDENT> def __init__(self, feature_dim, num_classes=70): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.num_classes = num_classes <NEW_LINE> self.feat_dim = feature_dim <NEW_LINE> self.att_fc = nn.Sequential( nn.Linear(128, 64), nn.ReLU(), nn.Linear(64, feature_dim * num_classes), nn.ReLU() ) <NEW_LINE> <DEDENT> def forward(self, attention): <NEW_LINE> <INDENT> return self.att_fc(attention).view(-1, self.feat_dim, self.num_classes) | Compute weights based on spatio-linguistic attention. | 6259903626238365f5fadcad |
class SessionOutResult : <NEW_LINE> <INDENT> pass | Class provides functionality for session's result by means of properties | 62599036796e427e5384f8d4 |
class KeyboardPlayer(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = "Keyboard Player" <NEW_LINE> self.uzh_shortname = "kplayer" <NEW_LINE> <DEDENT> def get_input(self, text): <NEW_LINE> <INDENT> reply = input(text + "( u - up, h - left, k - down, l - right)") <NEW_LINE> if reply[0] == 'u': <NEW_LINE> <INDENT> return MOVE_UP <NEW_LINE> <DEDENT> elif reply[0] == 'h': <NEW_LINE> <INDENT> return MOVE_LEFT <NEW_LINE> <DEDENT> elif reply[0] == 'l': <NEW_LINE> <INDENT> return MOVE_RIGHT <NEW_LINE> <DEDENT> elif reply[0] == 'k': <NEW_LINE> <INDENT> return MOVE_DOWN <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return MOVE_NONE <NEW_LINE> <DEDENT> <DEDENT> def move_sheep(self): <NEW_LINE> <INDENT> return self.get_input("Move Sheep") <NEW_LINE> <DEDENT> def move_wolf(self): <NEW_LINE> <INDENT> return self.get_input("Move Wolf") | Keyboard Kingsheep player (doesn't move) | 625990369b70327d1c57fede |
class gamification_goal_type_data(osv.Model): <NEW_LINE> <INDENT> _inherit = 'gamification.goal.type' <NEW_LINE> def number_following(self, cr, uid, xml_id="mail.thread", context=None): <NEW_LINE> <INDENT> ref_obj = self.pool.get(xml_id) <NEW_LINE> user = self.pool.get('res.users').browse(cr, uid, uid, context=context) <NEW_LINE> return ref_obj.search(cr, uid, [('message_follower_ids', '=', user.partner_id.id)], count=True, context=context) | Goal type data
Methods for more complex goals not possible with the 'sum' and 'count' mode.
Each method should return the value that will be set in the 'current' field
of a user's goal. The return type must be a float or integer. | 625990368a349b6b4368739a |
class CallMethodNode(Node): <NEW_LINE> <INDENT> def __init__(self, object_name, method_name, args=None, kwargs=None, asvar=False): <NEW_LINE> <INDENT> self.object_name_resolver = object_name <NEW_LINE> self.method_name_resolver = method_name <NEW_LINE> self.args_resolvers = args or [] <NEW_LINE> self.kwargs_resolvers = kwargs or {} <NEW_LINE> self.asvar = asvar <NEW_LINE> <DEDENT> def render(self, context): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> object_obj = self.object_name_resolver.resolve(context) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> object_obj = context.get(self.object_name_resolver, None) <NEW_LINE> <DEDENT> method_name = self.method_name_resolver.resolve(context) or str(self.method_name_resolver) <NEW_LINE> if not object_obj or not method_name: <NEW_LINE> <INDENT> raise TemplateSyntaxError("{{%% callmethod object_name.method_name %%}} cannot make sense of the resolved values for object_name.method_name '{object_name}.{method_name}'".format(object_name=self.object_name_resolver, method_name=self.method_name_resolver)) <NEW_LINE> <DEDENT> args = [] <NEW_LINE> for arg_resolver in self.args_resolvers: <NEW_LINE> <INDENT> arg = arg_resolver.resolve(context) <NEW_LINE> args.append(arg) <NEW_LINE> <DEDENT> kwargs = {} <NEW_LINE> for k_raw, v_resolver in self.kwargs_resolvers.items(): <NEW_LINE> <INDENT> k = smart_text(k_raw,'ascii') <NEW_LINE> v = v_resolver.resolve(context) <NEW_LINE> kwargs[k]=v <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> output = getattr(object_obj, method_name)(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("\033[91m{err_type} from {{%% callmethod <{obj_name}>.{method_name}() %%}}: {err_msg}\033[0m".format(err_type=e.__class__.__name__, obj_name=object_obj, method_name=method_name, err_msg=e)) <NEW_LINE> output = "" <NEW_LINE> <DEDENT> if self.asvar: <NEW_LINE> <INDENT> context[self.asvar] = output <NEW_LINE> return "" <NEW_LINE> <DEDENT> return output | Renders the relevant value of a {% callmethod %} template tag | 625990360a366e3fb87ddb3f |
class CommaList(StrList): <NEW_LINE> <INDENT> SEPARATOR = ',' | Comma-separated list | 6259903616aa5153ce401646 |
class BboxDeviceScanner(DeviceScanner): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.host = config[CONF_HOST] <NEW_LINE> """Initialize the scanner.""" <NEW_LINE> self.last_results: List[Device] = [] <NEW_LINE> self.success_init = self._update_info() <NEW_LINE> _LOGGER.info("Scanner initialized") <NEW_LINE> <DEDENT> def scan_devices(self): <NEW_LINE> <INDENT> self._update_info() <NEW_LINE> return [device.mac for device in self.last_results] <NEW_LINE> <DEDENT> def get_device_name(self, device): <NEW_LINE> <INDENT> filter_named = [ result.name for result in self.last_results if result.mac == device ] <NEW_LINE> if filter_named: <NEW_LINE> <INDENT> return filter_named[0] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @Throttle(MIN_TIME_BETWEEN_SCANS) <NEW_LINE> def _update_info(self): <NEW_LINE> <INDENT> _LOGGER.info("Scanning...") <NEW_LINE> import pybbox <NEW_LINE> box = pybbox.Bbox(ip=self.host) <NEW_LINE> result = box.get_all_connected_devices() <NEW_LINE> now = dt_util.now() <NEW_LINE> last_results = [] <NEW_LINE> for device in result: <NEW_LINE> <INDENT> if device["active"] != 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> last_results.append( Device( device["macaddress"], device["hostname"], device["ipaddress"], now ) ) <NEW_LINE> <DEDENT> self.last_results = last_results <NEW_LINE> _LOGGER.info("Scan successful") <NEW_LINE> return True | This class scans for devices connected to the bbox. | 625990368da39b475be04349 |
class Predictor_KNN(Predictor): <NEW_LINE> <INDENT> def predict(self, fromPoint,toPoint): <NEW_LINE> <INDENT> return self.predict_KNN(fromPoint, toPoint) | A utility class which inherits a complete KNN predictor from Predictor
Overrides the predict function with the Predictor.predict_knn function | 625990364e696a045264e6cf |
class MudMeta(MudObject): <NEW_LINE> <INDENT> pass | Objects that are about/describe other object(s). | 6259903673bcbd0ca4bcb3e2 |
class What(object): <NEW_LINE> <INDENT> Out = 0 <NEW_LINE> In = 1 <NEW_LINE> Both = 2 <NEW_LINE> OutE = 3 <NEW_LINE> InE = 4 <NEW_LINE> BothE = 5 <NEW_LINE> OutV = 6 <NEW_LINE> InV = 7 <NEW_LINE> Eval = 8 <NEW_LINE> Coalesce = 9 <NEW_LINE> If = 10 <NEW_LINE> IfNull = 11 <NEW_LINE> Expand = 12 <NEW_LINE> First = 13 <NEW_LINE> Last = 14 <NEW_LINE> Count = 15 <NEW_LINE> Min = 16 <NEW_LINE> Max = 17 <NEW_LINE> Avg = 18 <NEW_LINE> Mode = 19 <NEW_LINE> Median = 20 <NEW_LINE> Percentile = 21 <NEW_LINE> Variance = 22 <NEW_LINE> StdDev = 23 <NEW_LINE> Sum = 24 <NEW_LINE> Date = 25 <NEW_LINE> SysDate = 26 <NEW_LINE> Format = 27 <NEW_LINE> Dijkstra = 28 <NEW_LINE> ShortestPath = 29 <NEW_LINE> Distance = 30 <NEW_LINE> Distinct = 31 <NEW_LINE> UnionAll = 32 <NEW_LINE> Intersect = 33 <NEW_LINE> Difference = 34 <NEW_LINE> SymmetricDifference = 35 <NEW_LINE> Set = 36 <NEW_LINE> List = 37 <NEW_LINE> Map = 38 <NEW_LINE> TraversedElement = 39 <NEW_LINE> TraversedEdge = 40 <NEW_LINE> TraversedVertex = 41 <NEW_LINE> Any = 42 <NEW_LINE> All = 43 <NEW_LINE> Subscript = 44 <NEW_LINE> Append = 45 <NEW_LINE> AsBoolean = 46 <NEW_LINE> AsDate = 47 <NEW_LINE> AsDatetime = 48 <NEW_LINE> AsDecimal = 49 <NEW_LINE> AsFloat = 50 <NEW_LINE> AsInteger = 51 <NEW_LINE> AsList = 52 <NEW_LINE> AsLong = 53 <NEW_LINE> AsMap = 54 <NEW_LINE> AsSet = 55 <NEW_LINE> AsString = 56 <NEW_LINE> CharAt = 57 <NEW_LINE> Convert = 58 <NEW_LINE> Exclude = 59 <NEW_LINE> FormatMethod = 60 <NEW_LINE> Hash = 61 <NEW_LINE> Include = 62 <NEW_LINE> IndexOf = 63 <NEW_LINE> JavaType = 64 <NEW_LINE> Keys = 65 <NEW_LINE> Left = 66 <NEW_LINE> Length = 67 <NEW_LINE> Normalize = 68 <NEW_LINE> Prefix = 69 <NEW_LINE> Remove = 70 <NEW_LINE> RemoveAll = 71 <NEW_LINE> Replace = 72 <NEW_LINE> Right = 73 <NEW_LINE> Size = 74 <NEW_LINE> SubString = 75 <NEW_LINE> Trim = 76 <NEW_LINE> ToJSON = 77 <NEW_LINE> ToLowerCase = 78 <NEW_LINE> ToUpperCase = 79 <NEW_LINE> Type = 80 <NEW_LINE> Values = 81 <NEW_LINE> WhatFilter = 82 <NEW_LINE> WhatCustom = 83 <NEW_LINE> WhatLet = 84 <NEW_LINE> AtThis = 85 <NEW_LINE> AtRid = 86 <NEW_LINE> AtClass = 87 <NEW_LINE> AtVersion = 88 <NEW_LINE> AtSize = 89 <NEW_LINE> AtType = 90 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name_override = None <NEW_LINE> <DEDENT> def as_(self, name_override): <NEW_LINE> <INDENT> self.name_override = name_override <NEW_LINE> return self | Specify 'what' a Query retrieves. | 625990365e10d32532ce41b0 |
class CORSResource(ModelResource): <NEW_LINE> <INDENT> def create_response(self, *args, **kwargs): <NEW_LINE> <INDENT> response = super(CORSResource, self).create_response(*args, **kwargs) <NEW_LINE> response['Access-Control-Allow-Origin'] = '*' <NEW_LINE> response['Access-Control-Allow-Headers'] = 'Content-Type' <NEW_LINE> return response <NEW_LINE> <DEDENT> def method_check(self, request, allowed=None): <NEW_LINE> <INDENT> if allowed is None: <NEW_LINE> <INDENT> allowed = [] <NEW_LINE> <DEDENT> request_method = request.method.lower() <NEW_LINE> allows = ','.join(map(str.upper, allowed)) <NEW_LINE> if request_method == 'options': <NEW_LINE> <INDENT> response = http.HttpResponse(allows) <NEW_LINE> response['Access-Control-Allow-Origin'] = '*' <NEW_LINE> response['Access-Control-Allow-Headers'] = 'Content-Type' <NEW_LINE> response['Allow'] = allows <NEW_LINE> raise ImmediateHttpResponse(response=response) <NEW_LINE> <DEDENT> if request_method not in allowed: <NEW_LINE> <INDENT> response = tasty_http.HttpMethodNotAllowed(allows) <NEW_LINE> response['Allow'] = allows <NEW_LINE> raise ImmediateHttpResponse(response=response) <NEW_LINE> <DEDENT> return request_method | Adds CORS headers to resources that subclass this. | 62599036c432627299fa4152 |
class GetMemberInDepartmentSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> userId = serializers.IntegerField(source='department_member.user.id', read_only=True) <NEW_LINE> fullName = serializers.CharField(source='department_member') <NEW_LINE> sex = serializers.SerializerMethodField() <NEW_LINE> birthDay = serializers.SerializerMethodField() <NEW_LINE> idNumber = serializers.CharField(source='department_member.id_number') <NEW_LINE> address = serializers.CharField(source='department_member.address') <NEW_LINE> avatarUrl = serializers.SerializerMethodField() <NEW_LINE> position = serializers.CharField() <NEW_LINE> isLeader = serializers.BooleanField(source='is_leader') <NEW_LINE> department = serializers.CharField() <NEW_LINE> totalTime = serializers.SerializerMethodField() <NEW_LINE> totalTag = serializers.SerializerMethodField() <NEW_LINE> totalTagFinished = serializers.SerializerMethodField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = DepartmentMember <NEW_LINE> fields = ('userId', 'fullName', 'sex', 'birthDay', 'idNumber', 'address', 'avatarUrl', 'position', 'isLeader', 'department', 'totalTime', 'totalTag', 'totalTagFinished') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_birthDay(obj): <NEW_LINE> <INDENT> return arrow.get(obj.department_member.birth_day).replace(tzinfo=settings.TIME_ZONE).datetime <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_avatarUrl(obj): <NEW_LINE> <INDENT> return build_absolute_url(obj.department_member.get_avatar_url()) if obj.department_member else None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_totalTime(obj): <NEW_LINE> <INDENT> start = arrow.now().to(settings.TIME_ZONE).floor('month').datetime <NEW_LINE> end = arrow.now().to(settings.TIME_ZONE).ceil('month').datetime <NEW_LINE> wt = WorkTime.objects.filter(user=obj, removed=False, date__range=(start, end)).aggregate(value=Sum('time_total'))['value'] <NEW_LINE> return wt <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_totalTag(obj): <NEW_LINE> <INDENT> start = arrow.now().to(settings.TIME_ZONE).floor('month').datetime <NEW_LINE> end = arrow.now().to(settings.TIME_ZONE).ceil('month').datetime <NEW_LINE> t = Tag.objects.filter(user=obj, removed=False, created_at__range=(start, end)).count() <NEW_LINE> return t <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_totalTagFinished(obj): <NEW_LINE> <INDENT> start = arrow.now().to(settings.TIME_ZONE).floor('month').datetime <NEW_LINE> end = arrow.now().to(settings.TIME_ZONE).ceil('month').datetime <NEW_LINE> tf = Tag.objects.filter(user=obj, removed=False, state='CO', created_at__range=(start, end)).count() <NEW_LINE> return tf <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_sex(obj): <NEW_LINE> <INDENT> if obj.department_member.sex == 'M': <NEW_LINE> <INDENT> return 'Nam' <NEW_LINE> <DEDENT> elif obj.department_member.sex == 'F': <NEW_LINE> <INDENT> return 'Nữ' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> pass | Get profile member in department | 625990361f5feb6acb163d4d |
class SqlSensor(BaseSensorOperator): <NEW_LINE> <INDENT> template_fields = ('sql',) <NEW_LINE> template_ext = ('.hql', '.sql',) <NEW_LINE> __mapper_args__ = { 'polymorphic_identity': 'SqlSensor' } <NEW_LINE> @apply_defaults <NEW_LINE> def __init__(self, conn_id, sql, *args, **kwargs): <NEW_LINE> <INDENT> super(SqlSensor, self).__init__(*args, **kwargs) <NEW_LINE> self.sql = sql <NEW_LINE> self.conn_id = conn_id <NEW_LINE> session = settings.Session() <NEW_LINE> db = session.query(DB).filter(DB.conn_id==conn_id).all() <NEW_LINE> if not db: <NEW_LINE> <INDENT> raise Exception("conn_id doesn't exist in the repository") <NEW_LINE> <DEDENT> self.hook = db[0].get_hook() <NEW_LINE> session.commit() <NEW_LINE> session.close() <NEW_LINE> <DEDENT> def poke(self): <NEW_LINE> <INDENT> logging.info('Poking: ' + self.sql) <NEW_LINE> records = self.hook.get_records(self.sql) <NEW_LINE> if not records: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if str(records[0][0]) in ('0', '',): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> print(records[0][0]) | Runs a sql statement until a criteria is met. It will keep trying until
sql returns no row, or if the first cell in (0, '0', ''). | 6259903607d97122c4217df7 |
class DesignAttributes: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.annotations = [] <NEW_LINE> self.attributes = dict() <NEW_LINE> self.metadata = Metadata() <NEW_LINE> <DEDENT> def add_annotation(self, annotation): <NEW_LINE> <INDENT> self.annotations.append(annotation) <NEW_LINE> <DEDENT> def add_attribute(self, key, value): <NEW_LINE> <INDENT> self.attributes[key] = value <NEW_LINE> <DEDENT> def set_metadata(self, metadata): <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> <DEDENT> def scale(self, factor): <NEW_LINE> <INDENT> for ann in self.annotations: <NEW_LINE> <INDENT> ann.scale(factor) <NEW_LINE> <DEDENT> <DEDENT> def json(self): <NEW_LINE> <INDENT> return { "annotations" : [a.json() for a in self.annotations], "metadata" : self.metadata.json(), "attributes" : stringify_attributes(self.attributes), } | The DesignAttributes class corresponds to the design_attributes
object in the Open JSON format | 6259903630c21e258be99968 |
class Library(models.Model): <NEW_LINE> <INDENT> uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) <NEW_LINE> name = models.CharField(_("The name of the library/institute"), max_length=128) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "libraries" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | Library or institute that may hold one or more catalogues | 62599036d53ae8145f9195c0 |
class Feed(object): <NEW_LINE> <INDENT> def __init__(self, name, label, description, display_priority): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.label = label <NEW_LINE> self.description = description <NEW_LINE> self.display_priority = display_priority <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> pass | Base class for Feeds. Do not instantiate directly. | 6259903616aa5153ce401648 |
class Server(BaseModel): <NEW_LINE> <INDENT> __tablename__ = 'redis_server' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(64), unique=True) <NEW_LINE> description = db.Column(db.String(512)) <NEW_LINE> host = db.Column(db.String(15)) <NEW_LINE> port = db.Column(db.Integer, default=6379) <NEW_LINE> password = db.Column(db.String(16)) <NEW_LINE> @property <NEW_LINE> def redis(self): <NEW_LINE> <INDENT> return StrictRedis(host=self.host, port=self.port, password=self.password) <NEW_LINE> <DEDENT> def ping(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.redis.ping() <NEW_LINE> <DEDENT> except RedisError: <NEW_LINE> <INDENT> raise RedisConnectError(400, 'redis server %s can not connected' % self.host) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> status = 'error' <NEW_LINE> try: <NEW_LINE> <INDENT> if self.ping(): <NEW_LINE> <INDENT> status = 'ok' <NEW_LINE> <DEDENT> <DEDENT> except RedisConnectError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return status <NEW_LINE> <DEDENT> def get_metrics(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.redis.info() <NEW_LINE> <DEDENT> except RedisError: <NEW_LINE> <INDENT> raise RedisConnectError(400, 'redis server %s can not connected' % self.host) | Redis服务器模型
| 6259903615baa723494630f6 |
class Station(Producer): <NEW_LINE> <INDENT> key_schema = avro.load(f"{Path(__file__).parents[0]}/schemas/arrival_key.json") <NEW_LINE> value_schema = avro.load(f"{Path(__file__).parents[0]}/schemas/arrival_value.json") <NEW_LINE> def __init__(self, station_id, name, color, direction_a=None, direction_b=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> station_name = ( self.name.lower() .replace("/", "_and_") .replace(" ", "_") .replace("-", "_") .replace("'", "") ) <NEW_LINE> topic_name = f"station.arrivals.{station_name}" <NEW_LINE> super().__init__( topic_name, key_schema=Station.key_schema, value_schema=Station.value_schema, num_partitions=1, num_replicas=1, ) <NEW_LINE> self.station_id = int(station_id) <NEW_LINE> self.color = color <NEW_LINE> self.dir_a = direction_a <NEW_LINE> self.dir_b = direction_b <NEW_LINE> self.a_train = None <NEW_LINE> self.b_train = None <NEW_LINE> self.turnstile = Turnstile(self) <NEW_LINE> <DEDENT> def run(self, train, direction, prev_station_id, prev_direction): <NEW_LINE> <INDENT> self.producer.produce( topic=self.topic_name, key_schema=Station.key_schema, value_schema=Station.value_schema, key={"timestamp": self.time_millis()}, value={ "station_id" : self.station_id, "train_id" : train.train_id, "direction" : direction, "line" : self.color.name, "train_status" : str(train.status), "prev_station_id" : prev_station_id, "prev_direction" : prev_direction } ) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Station | {:^5} | {:<30} | Direction A: | {:^5} | departing to {:<30} | Direction B: | {:^5} | departing to {:<30} | ".format( self.station_id, self.name, self.a_train.train_id if self.a_train is not None else "---", self.dir_a.name if self.dir_a is not None else "---", self.b_train.train_id if self.b_train is not None else "---", self.dir_b.name if self.dir_b is not None else "---", ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def arrive_a(self, train, prev_station_id, prev_direction): <NEW_LINE> <INDENT> self.a_train = train <NEW_LINE> self.run(train, "a", prev_station_id, prev_direction) <NEW_LINE> <DEDENT> def arrive_b(self, train, prev_station_id, prev_direction): <NEW_LINE> <INDENT> self.b_train = train <NEW_LINE> self.run(train, "b", prev_station_id, prev_direction) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.turnstile.close() <NEW_LINE> super(Station, self).close() | Defines a single station | 6259903673bcbd0ca4bcb3e4 |
class Properties(object): <NEW_LINE> <INDENT> properties = {} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.application_id = os.environ['APPLICATION_ID'] <NEW_LINE> shard_index = self.application_id.find('~') <NEW_LINE> if shard_index != -1: <NEW_LINE> <INDENT> self.application_id = self.application_id[shard_index+1:] <NEW_LINE> <DEDENT> if self.application_id == 'None': <NEW_LINE> <INDENT> raise RuntimeError('Please declare application id in app.yaml.') <NEW_LINE> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> path = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> filename = '{0}.{1}'.format(self.application_id, 'json') <NEW_LINE> path = os.path.abspath(os.path.join(path, '../', 'properties', filename)) <NEW_LINE> if not os.path.exists(path): <NEW_LINE> <INDENT> raise RuntimeError(path + " doesn't exists.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> properties = '' <NEW_LINE> with open(path) as property_file: <NEW_LINE> <INDENT> for line in property_file: <NEW_LINE> <INDENT> properties += line <NEW_LINE> <DEDENT> <DEDENT> results = json.loads(properties) <NEW_LINE> return results | Class object of creating and getting list of
property of each appengine project
To use:
PROPERTIES = Properties().get_or_create() | 625990365e10d32532ce41b1 |
class AssetList(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Total = None <NEW_LINE> self.List = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Total = params.get("Total") <NEW_LINE> if params.get("List") is not None: <NEW_LINE> <INDENT> self.List = [] <NEW_LINE> for item in params.get("List"): <NEW_LINE> <INDENT> obj = Asset() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.List.append(obj) | 资产列表
| 6259903694891a1f408b9fa7 |
class RoundController(ConsoleController): <NEW_LINE> <INDENT> def __init__(self, round): <NEW_LINE> <INDENT> self.round = round <NEW_LINE> screen = RoundScreen(self.round) <NEW_LINE> ConsoleController.__init__(self, screen, commands={ENDL:self.performATurn}) <NEW_LINE> self.performPlayerTurn = self.performPlayerTurn() <NEW_LINE> self.performPlayerTurn.next() <NEW_LINE> <DEDENT> def performATurn(self, event): <NEW_LINE> <INDENT> self.performPlayerTurn.next() <NEW_LINE> <DEDENT> def performPlayerTurn(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> for player in self.round.players: <NEW_LINE> <INDENT> self.screen.currentPlayer = player <NEW_LINE> yield <NEW_LINE> self.runPlayerTurn(player) <NEW_LINE> if self.round.over: <NEW_LINE> <INDENT> self.stopRunning() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def runPlayerTurn(self, player): <NEW_LINE> <INDENT> if player.canPlay(): <NEW_LINE> <INDENT> self.runController(DrawController(player, self.round.gameDeck)) <NEW_LINE> self.runController(PlayerTurnController(player, self.round.players, self.round.matchPileManager, self.round.gameDeck)) | Represents the Round Controller | 6259903607d97122c4217df8 |
class TestBackupFoglampProcess(FoglampProcess): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> pass | # FIXME: | 625990361d351010ab8f4c77 |
class IMCache(MutableMapping): <NEW_LINE> <INDENT> MAXLEN = 1000 <NEW_LINE> def __init__(self, maxlen=MAXLEN, *a, **k): <NEW_LINE> <INDENT> self.filepath = 'IN MEMORY' <NEW_LINE> self.maxlen = maxlen <NEW_LINE> self.d = dict(*a, **k) <NEW_LINE> while len(self) > maxlen: <NEW_LINE> <INDENT> self.popitem() <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.d) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.d) <NEW_LINE> <DEDENT> def __getitem__(self, k): <NEW_LINE> <INDENT> return self.d[k] <NEW_LINE> <DEDENT> def __setitem__(self, k, v): <NEW_LINE> <INDENT> if k not in self and len(self) == self.maxlen: <NEW_LINE> <INDENT> self.popitem() <NEW_LINE> <DEDENT> self.d[k] = v <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return key in self.d <NEW_LINE> <DEDENT> def __delitem__(self, k): <NEW_LINE> <INDENT> del self.d[k] <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return len(self) != 0 <NEW_LINE> <DEDENT> __nonzero__ = __bool__ <NEW_LINE> def __call__(self, k): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.__getitem__(k) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None | Read and write to a dict-like cache. | 6259903607d97122c4217df9 |
class Vertex(object): <NEW_LINE> <INDENT> __slots__ = ('index', 'vert', 'norm') <NEW_LINE> def __init__(self, vertex, normal=None, index=None): <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.vert = Vector3(vertex) <NEW_LINE> self.norm = None <NEW_LINE> if normal != None: self.norm = Vector3 (normal) | A csúcspontok koordinátáját és a hozzá tartozó normálvektort tartalmazza | 62599036cad5886f8bdc592a |
class TestGevent(OpenTracingTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.tracer = MockTracer(GeventScopeManager()) <NEW_LINE> self.client = Client(RequestHandler(self.tracer)) <NEW_LINE> <DEDENT> def test_two_callbacks(self): <NEW_LINE> <INDENT> response_greenlet1 = gevent.spawn(self.client.send_task, 'message1') <NEW_LINE> response_greenlet2 = gevent.spawn(self.client.send_task, 'message2') <NEW_LINE> gevent.joinall([response_greenlet1, response_greenlet2]) <NEW_LINE> self.assertEquals('message1::response', response_greenlet1.get()) <NEW_LINE> self.assertEquals('message2::response', response_greenlet2.get()) <NEW_LINE> spans = self.tracer.finished_spans() <NEW_LINE> self.assertEquals(len(spans), 2) <NEW_LINE> for span in spans: <NEW_LINE> <INDENT> self.assertEquals(span.tags.get(tags.SPAN_KIND, None), tags.SPAN_KIND_RPC_CLIENT) <NEW_LINE> <DEDENT> self.assertNotSameTrace(spans[0], spans[1]) <NEW_LINE> self.assertIsNone(spans[0].parent_id) <NEW_LINE> self.assertIsNone(spans[1].parent_id) <NEW_LINE> <DEDENT> def test_parent_not_picked(self): <NEW_LINE> <INDENT> with self.tracer.start_active_span('parent'): <NEW_LINE> <INDENT> response = self.client.send_sync('no_parent') <NEW_LINE> self.assertEquals('no_parent::response', response) <NEW_LINE> <DEDENT> spans = self.tracer.finished_spans() <NEW_LINE> self.assertEquals(len(spans), 2) <NEW_LINE> child_span = get_one_by_operation_name(spans, 'send') <NEW_LINE> self.assertIsNotNone(child_span) <NEW_LINE> parent_span = get_one_by_operation_name(spans, 'parent') <NEW_LINE> self.assertIsNotNone(parent_span) <NEW_LINE> self.assertIsNotChildOf(child_span, parent_span) <NEW_LINE> <DEDENT> def test_bad_solution_to_set_parent(self): <NEW_LINE> <INDENT> with self.tracer.start_active_span('parent') as scope: <NEW_LINE> <INDENT> client = Client(RequestHandler(self.tracer, scope.span.context)) <NEW_LINE> response = client.send_sync('correct_parent') <NEW_LINE> self.assertEquals('correct_parent::response', response) <NEW_LINE> <DEDENT> response = client.send_sync('wrong_parent') <NEW_LINE> self.assertEquals('wrong_parent::response', response) <NEW_LINE> spans = self.tracer.finished_spans() <NEW_LINE> self.assertEquals(len(spans), 3) <NEW_LINE> spans = sorted(spans, key=lambda x: x.start_time) <NEW_LINE> parent_span = get_one_by_operation_name(spans, 'parent') <NEW_LINE> self.assertIsNotNone(parent_span) <NEW_LINE> self.assertIsChildOf(spans[1], parent_span) <NEW_LINE> self.assertIsChildOf(spans[2], parent_span) | There is only one instance of 'RequestHandler' per 'Client'. Methods of
'RequestHandler' are executed in different greenlets, and no Span
propagation among them is done automatically.
Therefore we cannot use current active span and activate span.
So one issue here is setting correct parent span. | 6259903626068e7796d4daa6 |
class CrossCatClient(object): <NEW_LINE> <INDENT> def __init__(self, engine): <NEW_LINE> <INDENT> self.engine = engine <NEW_LINE> <DEDENT> def __getattribute__(self, name): <NEW_LINE> <INDENT> engine = object.__getattribute__(self, 'engine') <NEW_LINE> attr = None <NEW_LINE> if hasattr(engine, name): <NEW_LINE> <INDENT> attr = getattr(engine, name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attr = object.__getattribute__(self, name) <NEW_LINE> <DEDENT> return attr | A client interface that gives a singue interface to the various engines.
Depending on the client_type, dispatch to appropriate engine constructor. | 6259903663f4b57ef0086622 |
class HtmlBuilder(object): <NEW_LINE> <INDENT> def categories(self, services): <NEW_LINE> <INDENT> div = Tag('div') <NEW_LINE> for annotation_types in sorted(services.categories): <NEW_LINE> <INDENT> p = Tag('p') <NEW_LINE> if not annotation_types: <NEW_LINE> <INDENT> p.add(Text('None')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for annotation_type in annotation_types: <NEW_LINE> <INDENT> p.add(Href(annotation_type, annotation_type)) <NEW_LINE> p.add(Tag('br')) <NEW_LINE> <DEDENT> <DEDENT> block = Tag('blockquote') <NEW_LINE> for service in services.categories[annotation_types]: <NEW_LINE> <INDENT> block.add_all([Text(service.identifier), Tag('br')]) <NEW_LINE> <DEDENT> p.add(block) <NEW_LINE> div.add(p) <NEW_LINE> <DEDENT> return Markup(str(div)) <NEW_LINE> <DEDENT> def chain(self, chain): <NEW_LINE> <INDENT> dt = Tag('dt', dtrs=Text(chain.identifier)) <NEW_LINE> dd = Tag('dd') <NEW_LINE> dl = Tag('dl', attrs={'class': 'bordered'}, dtrs=[dt, dd]) <NEW_LINE> for service in chain.services: <NEW_LINE> <INDENT> dd.add_all([Text(service.identifier), Tag('br')]) <NEW_LINE> <DEDENT> return Markup(str(dl)) <NEW_LINE> <DEDENT> def result(self, result): <NEW_LINE> <INDENT> text = result['payload']['text']['@value'] <NEW_LINE> json_str = dump(result['payload']) <NEW_LINE> views = result['payload']['views'] <NEW_LINE> buttons = [tab_button('Text'), tab_button('LIF')] <NEW_LINE> contents = [tab_text('Text', text), tab_text('LIF', json_str)] <NEW_LINE> ViewIdentifier.count = 0 <NEW_LINE> for view in views: <NEW_LINE> <INDENT> view_identifier = view.get('id', ViewIdentifier.new()) <NEW_LINE> annotation_types = view['metadata']['contains'].keys() <NEW_LINE> annotation_types = [os.path.basename(at) for at in annotation_types] <NEW_LINE> buttons.append(tab_button(view_identifier)) <NEW_LINE> contents.append(tab_content(view_identifier, annotation_types, view, text)) <NEW_LINE> <DEDENT> main_div = Tag('div') <NEW_LINE> main_div.add(div({'class': 'tab'}, buttons)) <NEW_LINE> main_div.add_all(contents) <NEW_LINE> return Markup(str(main_div)) | Utility class to help create HTML code for the LAPPS-Flask site. | 6259903615baa723494630f8 |
class TestWayTwoVersionCheck(unittest.TestCase): <NEW_LINE> <INDENT> def test_version(self): <NEW_LINE> <INDENT> self.assertIsNotNone(waytwo.__version__) | WayTwo Package | 6259903607d97122c4217dfa |
class Contact: <NEW_LINE> <INDENT> contact_list = [] <NEW_LINE> def __init__(self,first_name,last_name,number,email): <NEW_LINE> <INDENT> self.first_name = first_name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.phone_number = number <NEW_LINE> self.email = email | Class that generates new instances of contacts. | 625990361d351010ab8f4c79 |
class LLAB_ReferenceSpecification( namedtuple('LLAB_ReferenceSpecification', ('L_L', 'Ch_L', 'h_L', 's_L', 'C_L', 'HC', 'A_L', 'B_L'))): <NEW_LINE> <INDENT> pass | Defines the *LLAB(l:c)* colour appearance model reference specification.
This specification has field names consistent with **Mark D. Fairchild**
reference.
Parameters
----------
L_L : numeric
Correlate of *Lightness* :math:`L_L`.
Ch_L : numeric
Correlate of *chroma* :math:`Ch_L`.
h_L : numeric
*Hue* angle :math:`h_L` in degrees.
s_L : numeric
Correlate of *saturation* :math:`s_L`.
C_L : numeric
Correlate of *colourfulness* :math:`C_L`.
HC : numeric
*Hue* :math:`h` composition :math:`H^C`.
A_L : numeric
Opponent signal :math:`A_L`.
B_L : numeric
Opponent signal :math:`B_L`. | 62599036596a897236128dfc |
class WebPageElementSchema(SchemaObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.schema = 'WebPageElement' | Schema Mixin for WebPageElement
Usage: place after django model in class definition, schema will return the schema.org url for the object
A web page element, like a table or an image. | 625990361f5feb6acb163d51 |
class ImageReference(SubResource): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'publisher': {'key': 'publisher', 'type': 'str'}, 'offer': {'key': 'offer', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } <NEW_LINE> def __init__(self, id=None, publisher=None, offer=None, sku=None, version=None): <NEW_LINE> <INDENT> super(ImageReference, self).__init__(id=id) <NEW_LINE> self.publisher = publisher <NEW_LINE> self.offer = offer <NEW_LINE> self.sku = sku <NEW_LINE> self.version = version | The image reference.
:param id: Resource Id
:type id: str
:param publisher: The image publisher.
:type publisher: str
:param offer: The image offer.
:type offer: str
:param sku: The image SKU.
:type sku: str
:param version: The image version. The allowed formats are
Major.Minor.Build or 'latest'. Major, Minor and Build are decimal numbers.
Specify 'latest' to use the latest version of the image.
:type version: str | 62599036ec188e330fdf99f6 |
class MSSQLDriver(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> <DEDENT> def get_db(self): <NEW_LINE> <INDENT> conn = self.config <NEW_LINE> db_charset = "CHARSET={};".format(conn["charset"]) if "charset" in conn else "" <NEW_LINE> host_address = conn.get("host", "(local)") <NEW_LINE> port = conn.get("port", "1433") <NEW_LINE> if not conn["user"]: <NEW_LINE> <INDENT> str_conn = ("DRIVER={{SQL Server}};SERVER={};" "PORT={};DATABASE={};Trusted_Connection=yes;{}").format( host_address, port, conn["database"], db_charset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> str_conn = ("DRIVER={{SQL Server}};SERVER={};" "PORT={};DATABASE={};UID={};PWD={};{}").format( host_address, port, conn["database"], get_env(conn["user"]), get_env(conn["pass"]), db_charset) <NEW_LINE> <DEDENT> db = odbc.connect(str_conn) <NEW_LINE> if "initializing" in conn: <NEW_LINE> <INDENT> for sql in conn["initializing"]: <NEW_LINE> <INDENT> db.cursor().execute(sql) <NEW_LINE> <DEDENT> <DEDENT> return db <NEW_LINE> <DEDENT> def cursor(self, db): <NEW_LINE> <INDENT> return db.cursor() | Driver for MS SQL connections via ODBC | 625990378c3a8732951f76b6 |
class Ambito(models.Model): <NEW_LINE> <INDENT> nombre = models.CharField(verbose_name='Nombre de Ámbito', max_length=100) <NEW_LINE> funcionario = models.ForeignKey( Funcionario, verbose_name="Encargado", on_delete=models.CASCADE, blank=True, null=True, related_name='ambito_funcionario' ) <NEW_LINE> subrogante = models.ForeignKey( Funcionario, verbose_name="Subrogante", on_delete=models.CASCADE, blank=True, null=True, related_name='ambito_subrogante' ) <NEW_LINE> descripcion = RichTextField(verbose_name='Descripción') <NEW_LINE> sigla = models.CharField( verbose_name="Sigla", max_length=3, null=True, blank=True ) <NEW_LINE> numero = models.PositiveSmallIntegerField(verbose_name='Número de Ámbito') <NEW_LINE> created = models.DateTimeField( auto_now_add=True, verbose_name='Fecha de creación' ) <NEW_LINE> updated = models.DateTimeField( auto_now=True, verbose_name='Fecha de edición' ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Ámbito' <NEW_LINE> verbose_name_plural = 'Ámbitos' <NEW_LINE> ordering = ['numero'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.nombre | Clase para crear Ambitos.
Contiene los siguientes atributos.
nombre = Nombre del ámbito.
funcionario = Funcionario encargado del ámbito.
subrogante = Funcionario subrogante del ámbito.
descripcion = Descripción del ámbito según manual.
sigla = Sigla con la que se identifica el ámbito.
numero = Número en orden del ámbito en el manual. | 62599037596a897236128dfe |
class BookListView(generic.ListView): <NEW_LINE> <INDENT> model = Book <NEW_LINE> paginate_by = 3 | Generic class-based view for a list of books. | 62599037baa26c4b54d50408 |
class ScoreExport: <NEW_LINE> <INDENT> def __init__(self,scores_df,model_id,data_id,ranked=False): <NEW_LINE> <INDENT> self.scores_df = scores_df <NEW_LINE> self.export_dir = ("model/%s/%s/") % (model_id, data_id) <NEW_LINE> self.id_file = 'ids' + ('.desc' if ranked else '') <NEW_LINE> if not os.path.exists(self.export_dir): <NEW_LINE> <INDENT> os.makedirs(self.export_dir) <NEW_LINE> <DEDENT> <DEDENT> def __export_columns(self,file_name,columns,idx=False,fmt='%10.5f'): <NEW_LINE> <INDENT> self.scores_df.to_csv(self.export_dir+file_name,index=idx, header=False,float_format=fmt,columns=columns) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.__export_columns(self.id_file,[],idx=True,fmt='%10.0f') <NEW_LINE> return self <NEW_LINE> <DEDENT> def predictions(self,column): <NEW_LINE> <INDENT> self.__export_columns('predicions',[column]) <NEW_LINE> <DEDENT> def probabilities(self,column): <NEW_LINE> <INDENT> self.__export_columns("%s.proba" % column,[column]) <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> print("...\n%i customers scores exported to '%s'." % (len(self.scores_df),self.export_dir)) | Persists model results for given model and data ids. | 6259903715baa723494630fa |
class Column(ExpressionOperatorMixin): <NEW_LINE> <INDENT> table = None <NEW_LINE> def __init__(self, name, type_=Unknown, primary_key=False, nullable=True, auto_increment=False, default=_marker): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.type_ = type_ if isinstance(type_, Type) else type_() <NEW_LINE> self.primary_key = primary_key <NEW_LINE> self.nullable = False if primary_key else nullable <NEW_LINE> self.auto_increment = auto_increment <NEW_LINE> self.default = default <NEW_LINE> self.alias = name <NEW_LINE> <DEDENT> def is_type(self, type_): <NEW_LINE> <INDENT> return isinstance(self.type_, type_) <NEW_LINE> <DEDENT> def resolve_against(self, table): <NEW_LINE> <INDENT> if table.c.is_ambiguous(self): <NEW_LINE> <INDENT> return _QualifiedColumn(self) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def aliased(self, alias): <NEW_LINE> <INDENT> return _AliasedColumn(self, alias) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> result = '%s(%r,' % ( type(self).__name__, self.name, ) <NEW_LINE> if self.table: <NEW_LINE> <INDENT> result += 'table=%r' % (self.table,) <NEW_LINE> <DEDENT> result += ')' <NEW_LINE> return result <NEW_LINE> <DEDENT> def __compile_visit__(self, compiler): <NEW_LINE> <INDENT> compiler.visit_column(self) <NEW_LINE> <DEDENT> def __compile_visit_for_create__(self, compiler): <NEW_LINE> <INDENT> compiler.visit_column(self) <NEW_LINE> compiler.emit_keyword( compiler.dialect.datatype_for_column(self) ) <NEW_LINE> if self.primary_key: <NEW_LINE> <INDENT> compiler.emit_column_constraint('PRIMARY KEY') <NEW_LINE> <DEDENT> if self.default is not _marker: <NEW_LINE> <INDENT> compiler.emit_keyword('DEFAULT') <NEW_LINE> compiler.visit_immediate_expression(self.default) <NEW_LINE> <DEDENT> if self.auto_increment: <NEW_LINE> <INDENT> compiler.emit_column_autoincrement() <NEW_LINE> <DEDENT> if not self.nullable: <NEW_LINE> <INDENT> compiler.emit_column_constraint('NOT NULL') <NEW_LINE> <DEDENT> compiler.emit( compiler.dialect.extra_constraints_for_column(self) ) | Defines a column in a table. | 6259903721bff66bcd723dc8 |
class LOOP(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> depth = _swig_property(_x64dbgapi64.LOOP_depth_get, _x64dbgapi64.LOOP_depth_set) <NEW_LINE> start = _swig_property(_x64dbgapi64.LOOP_start_get, _x64dbgapi64.LOOP_start_set) <NEW_LINE> end = _swig_property(_x64dbgapi64.LOOP_end_get, _x64dbgapi64.LOOP_end_set) <NEW_LINE> instrcount = _swig_property(_x64dbgapi64.LOOP_instrcount_get, _x64dbgapi64.LOOP_instrcount_set) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> this = _x64dbgapi64.new_LOOP() <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _x64dbgapi64.delete_LOOP <NEW_LINE> __del__ = lambda self : None; | Proxy of C++ LOOP class | 6259903716aa5153ce40164c |
class ServiceEndpointPolicyListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[ServiceEndpointPolicy]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ServiceEndpointPolicy"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ServiceEndpointPolicyListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None | Response for ListServiceEndpointPolicies API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ServiceEndpointPolicy resources.
:type value: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicy]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str | 62599037d164cc61758220d4 |
class Solution: <NEW_LINE> <INDENT> def wordBreak(self, s, dic): <NEW_LINE> <INDENT> k = len(s) <NEW_LINE> if k == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if len(dic) == 0: <NEW_LINE> <INDENT> return len(s) == 0 <NEW_LINE> <DEDENT> maxLength = max(len(word) for word in dic) <NEW_LINE> if s in dic: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> dp = [False] * (k+ 1) <NEW_LINE> dp[0] = True <NEW_LINE> for i in range(k): <NEW_LINE> <INDENT> for j in range(i + 1 , i + maxLength + 1 ): <NEW_LINE> <INDENT> if j >= k + 1: <NEW_LINE> <INDENT> break ; <NEW_LINE> <DEDENT> if dp[i] == False: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if s[i: j] in dic: <NEW_LINE> <INDENT> dp[j] = True <NEW_LINE> <DEDENT> print(dp) <NEW_LINE> <DEDENT> <DEDENT> return dp[-1] | @param: s: A string
@param: dict: A dictionary of words dict
@return: A boolean | 6259903750485f2cf55dc0e0 |
class CurrencyCode (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurrencyCode') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://ddex.net/xml/20100121/iso4217a.xsd', 3, 4) <NEW_LINE> _Documentation = 'An ISO4217 three-letter code representing a Currency.' | An ISO4217 three-letter code representing a Currency. | 625990371d351010ab8f4c7d |
class ThreeLayerConvNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0, dtype=np.float32): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> self.dtype = dtype <NEW_LINE> C, H, W = input_dim <NEW_LINE> self.params['W1'] = weight_scale * np.random.randn(num_filters, C, filter_size, filter_size) <NEW_LINE> self.params['b1'] = np.zeros(num_filters) <NEW_LINE> self.params['W2'] = weight_scale * np.random.randn(num_filters * H * W / 4, hidden_dim) <NEW_LINE> self.params['b2'] = np.zeros(hidden_dim) <NEW_LINE> self.params['W3'] = weight_scale * np.random.randn(hidden_dim, num_classes) <NEW_LINE> self.params['b3'] = np.zeros(num_classes) <NEW_LINE> for k, v in self.params.iteritems(): <NEW_LINE> <INDENT> self.params[k] = v.astype(dtype) <NEW_LINE> <DEDENT> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> W1, b1 = self.params['W1'], self.params['b1'] <NEW_LINE> W2, b2 = self.params['W2'], self.params['b2'] <NEW_LINE> W3, b3 = self.params['W3'], self.params['b3'] <NEW_LINE> filter_size = W1.shape[2] <NEW_LINE> conv_param = {'stride': 1, 'pad': (filter_size - 1) / 2} <NEW_LINE> pool_param = {'pool_height': 2, 'pool_width': 2, 'stride': 2} <NEW_LINE> scores = None <NEW_LINE> out1, cache1 = conv_relu_pool_forward(X, W1, b1, conv_param, pool_param) <NEW_LINE> out2, cache2 = affine_relu_forward(out1, W2, b2) <NEW_LINE> scores, cache3 = affine_forward(out2, W3, b3) <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, grads = 0, {} <NEW_LINE> loss, dscores = softmax_loss(scores, y) <NEW_LINE> loss += 0.5 * self.reg * (np.sum(W1 * W1) + np.sum(W2 * W2) + np.sum(W3 * W3)) <NEW_LINE> dout2, grads['W3'], grads['b3'] = affine_backward(dscores, cache3) <NEW_LINE> grads['W3'] += self.reg * W3 <NEW_LINE> dout1, grads['W2'], grads['b2'] = affine_relu_backward(dout2, cache2) <NEW_LINE> grads['W2'] += self.reg * W2 <NEW_LINE> dx, grads['W1'], grads['b1'] = conv_relu_pool_backward(dout1, cache1) <NEW_LINE> grads['W1'] += self.reg * W1 <NEW_LINE> return loss, grads | A three-layer convolutional network with the following architecture:
conv - relu - 2x2 max pool - affine - relu - affine - softmax
The network operates on minibatches of data that have shape (N, C, H, W)
consisting of N images, each with height H and width W and with C input
channels. | 62599037596a897236128e00 |
class ListRerunUpdateCartJS(DetailUpdateCartJS): <NEW_LINE> <INDENT> def return_url(self): <NEW_LINE> <INDENT> return self.live_server_url + reverse('menu:menu') | Rerun in List view. | 6259903730c21e258be9996f |
class TestObjectlinkApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = swagger_client.apis.objectlink_api.ObjectlinkApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_find(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_link_artifacts(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_unlink_artifacts(self): <NEW_LINE> <INDENT> pass | ObjectlinkApi unit test stubs | 6259903730c21e258be99970 |
class CompanyListView(UserListView): <NEW_LINE> <INDENT> template_name = 'company_list.html' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = User.objects.order_by('username').select_related('profile').filter(profile__is_company=True) <NEW_LINE> return queryset | Представление для получения списка компаний | 62599037711fe17d825e154d |
class StemmerNotFoundError(Exception): <NEW_LINE> <INDENT> pass | Raised if stemmer is not found. | 6259903776d4e153a661db23 |
class ISession(form.Schema, IImageScaleTraversable): <NEW_LINE> <INDENT> title = schema.TextLine(title=_(u'Session Name')) <NEW_LINE> description = schema.Text( title=_(u'Session summary'), description=_(u'Short description of session topics'), ) <NEW_LINE> start = schema.Datetime( title=_(u'Session starts'), defaultFactory=default_start, ) <NEW_LINE> end = schema.Datetime( title=_(u'Session ends'), defaultFactory=default_end ) <NEW_LINE> accessible = schema.Choice( title=_(u'Accessible?'), values=(_(u'Yes'), _(u'No')), default='Yes', ) <NEW_LINE> form.widget(accessible="z3c.form.browser.radio.RadioFieldWidget") <NEW_LINE> tracks = schema.List( title=_(u'Tracks for this session'), value_type=schema.Choice(title=u'dummy', source=trackVocabulary), ) <NEW_LINE> searchable('details') <NEW_LINE> details = RichText(title=_(u'Details')) <NEW_LINE> presenters = RelationList( title=u"Presenters", default=[], value_type=RelationChoice( title=_(u"Presenter"), source=ObjPathSourceBinder( object_provides='collective.dexterity_class.presenter.IPresenter' ) ), required=False, ) <NEW_LINE> @invariant <NEW_LINE> def checkDates(data): <NEW_LINE> <INDENT> if data.start is not None and data.end is not None: <NEW_LINE> <INDENT> if data.start > data.end: <NEW_LINE> <INDENT> raise StartBeforeEnd(_(u"The start date must be before the end date.")) | Conference Session | 62599037be383301e0254979 |
class LoginFailedError(Exception): <NEW_LINE> <INDENT> pass | Raised when login to Snoonotes API fails. | 625990370a366e3fb87ddb48 |
class Command(BaseCommand): <NEW_LINE> <INDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> from oldproject.models import TypePolygon as Oldtypepolygon, TypePolygonTranslation as oldtrans <NEW_LINE> from forestry.models import TypePolygon <NEW_LINE> logger.info("Start transfering.....") <NEW_LINE> logger.info("Clear table.....") <NEW_LINE> TypePolygon.objects.all().delete() <NEW_LINE> for f in Oldtypepolygon.objects.all(): <NEW_LINE> <INDENT> nf = TypePolygon() <NEW_LINE> nf.old_id=f.pk <NEW_LINE> nf.is_pub = f.is_pub <NEW_LINE> nf.fill_color = f.fill_color <NEW_LINE> nf.border_color = f.border_color <NEW_LINE> for ft in oldtrans.objects.filter(type_polygon_id = f.pk): <NEW_LINE> <INDENT> if ft.lang=='ru': <NEW_LINE> <INDENT> nf.name_ru = ft.name <NEW_LINE> nf.name = ft.name <NEW_LINE> <DEDENT> elif ft.lang=='en': <NEW_LINE> <INDENT> nf.name_en = ft.name <NEW_LINE> <DEDENT> elif ft.lang=='uk': <NEW_LINE> <INDENT> nf.name_uk = ft.name <NEW_LINE> <DEDENT> <DEDENT> nf.save() <NEW_LINE> <DEDENT> logger.info("Finish transfering.....") | перенос лесничеств со старой базы в новую
| 6259903723e79379d538d66f |
class LearningAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, env, learning=False, epsilon=1.0, alpha=0.5, a = 0.9): <NEW_LINE> <INDENT> super(LearningAgent, self).__init__(env) <NEW_LINE> self.planner = RoutePlanner(self.env, self) <NEW_LINE> self.valid_actions = self.env.valid_actions <NEW_LINE> self.learning = learning <NEW_LINE> self.Q = dict() <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = alpha <NEW_LINE> self.a = a <NEW_LINE> <DEDENT> def reset(self, destination=None, testing=False): <NEW_LINE> <INDENT> self.planner.route_to(destination) <NEW_LINE> self.epsilon = self.epsilon*self.a <NEW_LINE> if testing: <NEW_LINE> <INDENT> self.epsilon = 0 <NEW_LINE> self.alpha = 0 <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def build_state(self): <NEW_LINE> <INDENT> waypoint = self.planner.next_waypoint() <NEW_LINE> inputs = self.env.sense(self) <NEW_LINE> deadline = self.env.get_deadline(self) <NEW_LINE> state = (waypoint, inputs['light'], inputs['left'], inputs['oncoming']) <NEW_LINE> if self.learning: <NEW_LINE> <INDENT> if not self.Q.has_key(state): <NEW_LINE> <INDENT> self.Q[state] = dict() <NEW_LINE> for act in self.valid_actions: <NEW_LINE> <INDENT> self.Q[state][act] = 0.0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return state <NEW_LINE> <DEDENT> def get_maxQ(self, state): <NEW_LINE> <INDENT> maxQ = None <NEW_LINE> actions = self.Q[state].keys() <NEW_LINE> for act in actions: <NEW_LINE> <INDENT> if maxQ == None or self.Q[state][act] > maxQ: <NEW_LINE> <INDENT> maxQ = self.Q[state][act] <NEW_LINE> <DEDENT> <DEDENT> return maxQ <NEW_LINE> <DEDENT> def createQ(self, state): <NEW_LINE> <INDENT> if self.learning: <NEW_LINE> <INDENT> if not self.Q.has_key(state): <NEW_LINE> <INDENT> self.Q[state] = dict() <NEW_LINE> for act in self.valid_actions: <NEW_LINE> <INDENT> self.Q[state][act] = 0.0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> def choose_action(self, state): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.next_waypoint = self.planner.next_waypoint() <NEW_LINE> action = None <NEW_LINE> if self.learning: <NEW_LINE> <INDENT> flip = random.random() <NEW_LINE> if flip < self.epsilon: <NEW_LINE> <INDENT> action = random.choice(self.valid_actions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> maxQ = self.get_maxQ(state) <NEW_LINE> best_actions = [act for act in self.valid_actions if self.Q[state][act] == maxQ] <NEW_LINE> action = random.choice(best_actions) <NEW_LINE> <DEDENT> <DEDENT> return action <NEW_LINE> <DEDENT> def learn(self, state, action, reward): <NEW_LINE> <INDENT> self.Q[state][action] = self.Q[state][action] + self.alpha*(reward-self.Q[state][action]) <NEW_LINE> return <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> state = self.build_state() <NEW_LINE> self.createQ(state) <NEW_LINE> action = self.choose_action(state) <NEW_LINE> reward = self.env.act(self, action) <NEW_LINE> self.learn(state, action, reward) <NEW_LINE> return | An agent that learns to drive in the Smartcab world.
This is the object you will be modifying. | 62599037287bf620b6272d4c |
class Mapviewer(ndb.Model): <NEW_LINE> <INDENT> loginuser = ndb.StringProperty(indexed=True) <NEW_LINE> realname = ndb.StringProperty(indexed=False) <NEW_LINE> added = ndb.DateTimeProperty(auto_now=True) | Models an user in a non-chfd domain that can access this application | 62599037d10714528d69ef3c |
class OnlineClustering(): <NEW_LINE> <INDENT> def __init__(self, uri, distance_matrix, threshold=0.5, generator_method='string'): <NEW_LINE> <INDENT> self.uri = uri <NEW_LINE> self.threshold = threshold <NEW_LINE> self.distance_matrix = distance_matrix <NEW_LINE> self.clusters = [] <NEW_LINE> self.generator_method = generator_method <NEW_LINE> if self.generator_method == 'string': <NEW_LINE> <INDENT> from pyannote.core.util import string_generator <NEW_LINE> self.generator = string_generator() <NEW_LINE> <DEDENT> elif self.generator_method == 'int': <NEW_LINE> <INDENT> from pyannote.core.util import int_generator <NEW_LINE> self.generator = int_generator() <NEW_LINE> <DEDENT> <DEDENT> def getLabels(self): <NEW_LINE> <INDENT> return [cluster.label for cluster in self.clusters] <NEW_LINE> <DEDENT> def getAnnotations(self): <NEW_LINE> <INDENT> annotation = Annotation(uri=self.uri, modality='speaker') <NEW_LINE> for cluster in self.clusters: <NEW_LINE> <INDENT> for seg in cluster.segments: <NEW_LINE> <INDENT> annotation[seg] = cluster.label <NEW_LINE> <DEDENT> <DEDENT> return annotation <NEW_LINE> <DEDENT> def addCluster(self,data): <NEW_LINE> <INDENT> label = next(self.generator) <NEW_LINE> cluster = Cluster(label) <NEW_LINE> cluster.updateCluster(data) <NEW_LINE> self.clusters.append(cluster) <NEW_LINE> return <NEW_LINE> <DEDENT> def computeDistances(self, data): <NEW_LINE> <INDENT> distances = [] <NEW_LINE> for cluster in self.clusters: <NEW_LINE> <INDENT> i = cluster.indices <NEW_LINE> j = data['indice'] <NEW_LINE> indexs = list(itertools.product(i,j)) <NEW_LINE> distances.append(np.mean([self.distance_matrix[i] for i in indexs])) <NEW_LINE> <DEDENT> return distances <NEW_LINE> <DEDENT> def computeDistances2(self, data): <NEW_LINE> <INDENT> return [cluster.distance2(data) for cluster in self.clusters] <NEW_LINE> <DEDENT> def upadateCluster(self,data): <NEW_LINE> <INDENT> if len(self.clusters) == 0: <NEW_LINE> <INDENT> self.addCluster(data) <NEW_LINE> return <NEW_LINE> <DEDENT> distances = self.computeDistances(data) <NEW_LINE> if min(distances) > self.threshold: <NEW_LINE> <INDENT> self.addCluster(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indice = distances.index(min(distances)) <NEW_LINE> to_update_cluster = self.clusters[indice] <NEW_LINE> to_update_cluster.updateCluster(data) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def upadateCluster2(self,data): <NEW_LINE> <INDENT> if len(self.clusters) == 0: <NEW_LINE> <INDENT> self.addCluster(data) <NEW_LINE> return <NEW_LINE> <DEDENT> distances = self.computeDistances2(data) <NEW_LINE> if min(distances) > self.threshold: <NEW_LINE> <INDENT> self.addCluster(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indice = distances.index(min(distances)) <NEW_LINE> to_update_cluster = self.clusters[indice] <NEW_LINE> to_update_cluster.updateCluster(data) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def modelDistance(self, model): <NEW_LINE> <INDENT> distances = [] <NEW_LINE> for cluster in self.clusters: <NEW_LINE> <INDENT> distances.append(cluster.distanceModel(model)) <NEW_LINE> <DEDENT> return distances <NEW_LINE> <DEDENT> def modelClusterDistance(self, model): <NEW_LINE> <INDENT> distances = [] <NEW_LINE> for cluster in self.clusters: <NEW_LINE> <INDENT> distances.append(cluster.distanceModelCluster(model)) <NEW_LINE> <DEDENT> return distances <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> if len(self.clusters)==0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | online clustering class
compare new comming segment with clusters, then decide
create a new cluster, or add it to a existing cluster.
When the distance between the new coming segment and
clusters is larger than a predetermined threshold
then it will be added to the closest cluster. Otherwise,
add a new cluster
Parameters
----------
uri: name
threshold: float, optional
distance threhold, when the distance exceding
the threshold, a new cluster created
generator_method: str, optional
generator of names
value should be "string" or "int" | 6259903782261d6c52730776 |
class CaliperCaliQueryTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_caliquery_args(self): <NEW_LINE> <INDENT> target_cmd = [ './ci_test_aggregate' ] <NEW_LINE> query_cmd = [ '../../src/tools/cali-query/cali-query', '--aggregate', 'count(),sum(time.inclusive.duration)', '--aggregate-key=loop.id', '-s', 'loop.id=A', '--json' ] <NEW_LINE> caliper_config = { 'CALI_CONFIG_PROFILE' : 'serial-trace', 'CALI_RECORDER_FILENAME' : 'stdout', 'CALI_LOG_VERBOSITY' : '0', } <NEW_LINE> obj = json.loads( cat.run_test_with_query(target_cmd, query_cmd, caliper_config) ) <NEW_LINE> self.assertEqual(obj[0]["path"], "A") <NEW_LINE> self.assertEqual(obj[0]["count"], 19) <NEW_LINE> self.assertTrue("sum#time.inclusive.duration" in obj[0]) <NEW_LINE> <DEDENT> def test_caliquery_list_services(self): <NEW_LINE> <INDENT> target_cmd = [ '../../src/tools/cali-query/cali-query', '--help=services' ] <NEW_LINE> env = { 'CALI_LOG_VERBOSITY' : '0', } <NEW_LINE> service_targets = [ 'aggregate', 'event', 'recorder', 'report', 'timestamp', 'trace' ] <NEW_LINE> report_out,_ = cat.run_test(target_cmd, env) <NEW_LINE> res = report_out.decode().split(',') <NEW_LINE> for target in service_targets: <NEW_LINE> <INDENT> if not target in res: <NEW_LINE> <INDENT> self.fail('%s not found in log' % target) | cali-query test cases | 62599037d99f1b3c44d06808 |
class PartClassPin(object): <NEW_LINE> <INDENT> well_name = None <NEW_LINE> def __init__(self, names, numbers, type=PinType.UNKNOWN, well=None): <NEW_LINE> <INDENT> self.names = names <NEW_LINE> self.numbers = numbers <NEW_LINE> self.type = type <NEW_LINE> self.well_name = well <NEW_LINE> Plugin.init(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.names[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def number(self): <NEW_LINE> <INDENT> return self.numbers[0] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Pin %s" % (self.name) <NEW_LINE> <DEDENT> __repr__ = __str__ | Pin of a Part, but no particular Part instance.
Contains general information about the pin (but it could be for any
part of that type), nothing related to a specific part instance. | 625990378da39b475be04353 |
class Site(ABC): <NEW_LINE> <INDENT> web: Web <NEW_LINE> def find_table(self, loc: int = 0) -> str: <NEW_LINE> <INDENT> return self.web.soup.select('table')[loc] <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def parse_rows(self, table: Soup) -> List[Any]: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def polls(self, table: int = 0) -> List[Any]: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def stats(self, loc: int = 0): <NEW_LINE> <INDENT> pass | Site Abstract Base Class.
Defines the structure for the objects based on this class and defines the interfaces
that should implemented in order to work properly.
Variables:
web: Web -- The web object stores the information needed to process
the data.
Methods:
find_table: -> str -- Parses the Web object for table elements and
returns the first one that it finds unless an integer representing
the required table is passed.
[abstractmethod]
parse_rows: -> Union[List[LeaderBoard], List[Poll]] -- Parses a BeautifulSoup
table element and returns the text found in the td elements as
namedtuples.
[abstractmethod]
polls: -> Union[List[LeaderBoard], List[Poll]] -- Does the parsing of the table
and rows for you. It takes the table index number if given, otherwise
parses table 0.
[abstractmethod]
stats: -- Formats the results from polls into a more user friendly
representation. | 6259903773bcbd0ca4bcb3ec |
class App(Cmd): <NEW_LINE> <INDENT> def __init__(self, ddp_endpoint, print_raw): <NEW_LINE> <INDENT> Cmd.__init__(self) <NEW_LINE> self.print_raw = print_raw <NEW_LINE> self.ddpclient = DDPClient( 'ws://' + ddp_endpoint + '/websocket', self.print_raw) <NEW_LINE> self.ddpclient.connect() <NEW_LINE> if sys.stdin.isatty(): <NEW_LINE> <INDENT> self.prompt = ddp_endpoint + '> ' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.prompt = '' <NEW_LINE> <DEDENT> self.unique_id = 0 <NEW_LINE> <DEDENT> def do_call(self, params): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> method_name, params = parse_command(params) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> log('Error parsing parameter list - try `help call`') <NEW_LINE> return <NEW_LINE> <DEDENT> self.ddpclient.send({ "msg": "method", "method": method_name, "params": params, "id": self.next_id(), }) <NEW_LINE> <DEDENT> def do_sub(self, params): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sub_name, params = parse_command(params) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> log('Error parsing parameter list - try `help sub`') <NEW_LINE> return <NEW_LINE> <DEDENT> self.ddpclient.send({ "msg": "sub", "name": sub_name, "params": params, "id": self.next_id(), }) <NEW_LINE> <DEDENT> def do_EOF(self, line): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def do_help(self, line): <NEW_LINE> <INDENT> msgs = { 'call': ( 'call <method name> <json array of parameters>\n' ' Calls a remote method\n' ' Example: call vote ["foo.meteor.com"]'), 'sub': ( 'sub <subscription name> [<json array of parameters>]\n' ' Subscribes to a remote dataset\n' ' Examples: `sub allApps` or `sub myApp ' '["foo.meteor.com"]`'), } <NEW_LINE> line = line.strip() <NEW_LINE> if line and line in msgs: <NEW_LINE> <INDENT> return log('\n' + msgs[line]) <NEW_LINE> <DEDENT> for msg in msgs.values(): <NEW_LINE> <INDENT> log('\n' + msg) <NEW_LINE> <DEDENT> <DEDENT> def emptyline(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def next_id(self): <NEW_LINE> <INDENT> self.unique_id = self.unique_id + 1 <NEW_LINE> return str(self.unique_id) | Main input loop. | 62599037cad5886f8bdc592e |
class Concat_Multiword(object): <NEW_LINE> <INDENT> def __init__(self, object): <NEW_LINE> <INDENT> self.arr = object <NEW_LINE> self.has_multiword = any(np.intersect1d(self.arr, ref_lists().multiword_signifiers)) <NEW_LINE> self.signifiers = set(self.arr).intersection(ref_lists().multiword_signifiers) <NEW_LINE> self.ordinal = ref_lists().ordinal_times <NEW_LINE> <DEDENT> def __run__(self): <NEW_LINE> <INDENT> if self.has_multiword == True: <NEW_LINE> <INDENT> for i, j in enumerate(self.arr): <NEW_LINE> <INDENT> if j in self.signifiers: <NEW_LINE> <INDENT> if self.arr[i-1] in self.ordinal: <NEW_LINE> <INDENT> self.arr[i-2:i+1] = [" ".join(self.arr[i-2:i+1])] <NEW_LINE> <DEDENT> elif j in ("journey", "walk") and self.arr[i-2] in ("day", "days"): <NEW_LINE> <INDENT> if self.arr[i-3] in ("Sabbath", "sabbath"): <NEW_LINE> <INDENT> self.arr[i-3:i+1] = ["sabbath day's journey"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.arr[i-2:i+1] = ["days' journey"] <NEW_LINE> <DEDENT> <DEDENT> elif j in ("cubit", "cubits"): <NEW_LINE> <INDENT> if self.arr[i-1] == 'long': <NEW_LINE> <INDENT> self.arr[i-1:i+1] = ["long cubits"] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return self.arr | Method finding and concatenating tokenized multi-word measure words.
attributes:
__init__
__run__ | 62599037d164cc61758220d8 |
class RoleDetailView(RoleDescriptionMixin, TemplateView): <NEW_LINE> <INDENT> template_name = 'saas/profile/roles/role.html' <NEW_LINE> def get_template_names(self): <NEW_LINE> <INDENT> candidates = [] <NEW_LINE> role = self.kwargs.get('role', None) <NEW_LINE> if role: <NEW_LINE> <INDENT> candidates = ['saas/profile/roles/%s.html' % role] <NEW_LINE> <DEDENT> candidates += super(RoleDetailView, self).get_template_names() <NEW_LINE> return candidates <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(RoleDetailView, self).get_context_data(**kwargs) <NEW_LINE> role = self.kwargs.get('role', None) <NEW_LINE> context.update({'role_descr': self.role_description}) <NEW_LINE> urls = { 'api_candidates': reverse('saas_api_search_users'), 'organization': { 'api_roles': reverse( 'saas_api_roles_by_descr', args=( self.organization, role)), }} <NEW_LINE> update_context_urls(context, urls) <NEW_LINE> return context | List of users with a specific role for an organization.
Template:
To edit the layout of this page, create a local ``saas/profile/roles/role.html`` (`example <https://github.com/djaodjin/djaodjin-saas/tree/master/saas/templates/saas/profile/roles.html>`__).
You should insure the page will call back the
:ref:`/api/profile/:organization/roles/:role/ <api_role>`
API end point to fetch the set of users with the specified role.
Template context:
- ``role_descr`` Description of the role that defines
the permissions of users on an organization
- ``organization`` The organization object users have permissions to.
- ``request`` The HTTP request object | 6259903750485f2cf55dc0e4 |
class AbstractPaymentEventType(models.Model): <NEW_LINE> <INDENT> name = models.CharField(_("Name"), max_length=128, unique=True) <NEW_LINE> code = AutoSlugField(_("Code"), max_length=128, unique=True, populate_from='name') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> app_label = 'order' <NEW_LINE> verbose_name = _("Payment Event Type") <NEW_LINE> verbose_name_plural = _("Payment Event Types") <NEW_LINE> ordering = ('name', ) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | Payment event types are things like 'Paid', 'Failed', 'Refunded'.
These are effectively the transaction types. | 625990379b70327d1c57feea |
class MainViewModel(QtCore.QObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> QtCore.QObject.__init__(self) <NEW_LINE> self.__air_on = False <NEW_LINE> self.__chiller_on = False <NEW_LINE> self.__exhaust_on = False <NEW_LINE> self.__is_working = False <NEW_LINE> self.__flow_rate = -255 <NEW_LINE> <DEDENT> @QtCore.Slot() <NEW_LINE> def exit(self): <NEW_LINE> <INDENT> self.onExitClicked.emit() <NEW_LINE> <DEDENT> @QtCore.Slot() <NEW_LINE> def shutdown(self): <NEW_LINE> <INDENT> self.onShutdownClicked.emit() <NEW_LINE> <DEDENT> @QtCore.Slot() <NEW_LINE> def toggle_exhaust(self): <NEW_LINE> <INDENT> self.exhaust = self.__exhaust_on = not self.exhaust <NEW_LINE> <DEDENT> def _get_exhaust_on(self) -> bool: <NEW_LINE> <INDENT> return self.__exhaust_on <NEW_LINE> <DEDENT> def __set_exhaust_on(self, value: bool): <NEW_LINE> <INDENT> self.__exhaust_on = value <NEW_LINE> self.onExhaustChanged.emit() <NEW_LINE> <DEDENT> def __get_is_working(self) -> bool: <NEW_LINE> <INDENT> return self.__is_working <NEW_LINE> <DEDENT> def __set_is_working(self, value: bool): <NEW_LINE> <INDENT> self.__is_working = value <NEW_LINE> self.onWorkingChanged.emit() <NEW_LINE> <DEDENT> @QtCore.Slot() <NEW_LINE> def toggle_chiller(self): <NEW_LINE> <INDENT> self.chiller = not self.chiller <NEW_LINE> <DEDENT> def __get_chiller_on(self) -> bool: <NEW_LINE> <INDENT> return self.__chiller_on <NEW_LINE> <DEDENT> def __set_chiller_on(self, value: bool): <NEW_LINE> <INDENT> self.__chiller_on = value <NEW_LINE> self.onChillerChanged.emit() <NEW_LINE> <DEDENT> @QtCore.Slot() <NEW_LINE> def toggle_air(self): <NEW_LINE> <INDENT> self.__set_air_on(not self.__get_air_on()) <NEW_LINE> <DEDENT> def __get_air_on(self) -> bool: <NEW_LINE> <INDENT> return self.__air_on <NEW_LINE> <DEDENT> def __set_air_on(self, value: bool): <NEW_LINE> <INDENT> self.__air_on = value <NEW_LINE> self.onAirChanged.emit() <NEW_LINE> <DEDENT> onWorkingChanged = QtCore.Signal() <NEW_LINE> onChillerChanged = QtCore.Signal() <NEW_LINE> onExhaustChanged = QtCore.Signal() <NEW_LINE> onAirChanged = QtCore.Signal() <NEW_LINE> onExitClicked = QtCore.Signal() <NEW_LINE> onShutdownClicked = QtCore.Signal() <NEW_LINE> onTemperatureChanged = QtCore.Signal(str, float) <NEW_LINE> onFlowRateChanged = QtCore.Signal(str, float) <NEW_LINE> chiller = QtCore.Property(bool, __get_chiller_on, __set_chiller_on, notify=onChillerChanged) <NEW_LINE> exhaust = QtCore.Property(bool, _get_exhaust_on, __set_exhaust_on, notify=onExhaustChanged) <NEW_LINE> air = QtCore.Property(bool, __get_air_on, __set_air_on, notify=onAirChanged) <NEW_LINE> working = QtCore.Property(bool, __get_is_working, __set_is_working, notify=onWorkingChanged) | View model for the main window | 625990376fece00bbacccb10 |
class Unicorn: <NEW_LINE> <INDENT> def __init__(self, name, color="White"): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.color = color <NEW_LINE> <DEDENT> def say(self, whatever): <NEW_LINE> <INDENT> return "**;* {} *;**".format(whatever) | This is a unicorn object and it has properties. | 62599037287bf620b6272d4d |
class ImageOsList(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Windows = None <NEW_LINE> self.Linux = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Windows = params.get("Windows") <NEW_LINE> self.Linux = params.get("Linux") | Supported operating systems are divided into two categories, Windows and Linux.
| 625990370a366e3fb87ddb4a |
class Output(object): <NEW_LINE> <INDENT> def write(self, s): <NEW_LINE> <INDENT> open(cfg.DEBUG._FILE, 'a+').write(s) <NEW_LINE> sys.__stdout__.write(s) | if you set the debug mode is enable, then the program's all actions that using sys.stdout is logged to file | 6259903726068e7796d4daad |
class InstanceActionViewSet(AuthReadOnlyViewSet): <NEW_LINE> <INDENT> queryset = InstanceAction.valid_actions.all() <NEW_LINE> serializer_class = InstanceActionSerializer | API endpoint that allows instance actions to be viewed | 62599037a8ecb03325872384 |
class CsvHandler(FileHandler): <NEW_LINE> <INDENT> delimiter = Option(str, default=csv.excel.delimiter, required=False) <NEW_LINE> quotechar = Option(str, default=csv.excel.quotechar, required=False) <NEW_LINE> escapechar = Option(str, default=csv.excel.escapechar, required=False) <NEW_LINE> doublequote = Option(str, default=csv.excel.doublequote, required=False) <NEW_LINE> skipinitialspace = Option(str, default=csv.excel.skipinitialspace, required=False) <NEW_LINE> lineterminator = Option(str, default=csv.excel.lineterminator, required=False) <NEW_LINE> quoting = Option(int, default=csv.excel.quoting, required=False) <NEW_LINE> headers = RenamedOption("fields") <NEW_LINE> fields = Option(tuple_or_const, required=False) <NEW_LINE> def get_dialect_kwargs(self): <NEW_LINE> <INDENT> return { "delimiter": self.delimiter, "quotechar": self.quotechar, "escapechar": self.escapechar, "doublequote": self.doublequote, "skipinitialspace": self.skipinitialspace, "lineterminator": self.lineterminator, "quoting": self.quoting, } | .. attribute:: delimiter
The CSV delimiter.
.. attribute:: quotechar
The CSV quote character.
.. attribute:: fields
The list of column names, if the CSV does not contain it as its first line. | 625990378e05c05ec3f6f70d |
class StationViewSet(mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = Station.objects.all() <NEW_LINE> serializer_class = StationSerializer <NEW_LINE> filter_backends = (OrderingFilter, DjangoFilterBackend,) <NEW_LINE> filter_class = StationFilter <NEW_LINE> ordering_fields = '__all__' | The viewset class for stations
retrieve:
Return the given station.
list:
Return a list of all the stations.
update:
Update the station.
create:
Create a new station.
Attributes
----------
station_id : int
the unique identified of the station
short_name : str
the short name of the station
name : str
the name of the station
lat : double
the latitude of the location of the station
lon : double
the longitude of the location of the station
region: int
the region of the station
capacity : int
the capacity of bikes at the station
electric_bike_surcharge_waiver: boolean
whether the electric bike surcharge can be waivered
eightd_has_key_dispenser: boolean
whether the station has key dispenser
has_kiosk: boolean
whether there is a kiosk at the station | 62599037ec188e330fdf99fc |
class Trainer(cntk_py.Trainer): <NEW_LINE> <INDENT> def __init__(self, model, loss_function, eval_function, parameter_learners): <NEW_LINE> <INDENT> model = sanitize_function(model) <NEW_LINE> loss_function = sanitize_function(loss_function) <NEW_LINE> eval_function = sanitize_function(eval_function) <NEW_LINE> super(Trainer, self).__init__(model, loss_function, eval_function, parameter_learners) <NEW_LINE> <DEDENT> def train_minibatch(self, arguments, device=None): <NEW_LINE> <INDENT> if not device: <NEW_LINE> <INDENT> device=DeviceDescriptor.use_default_device() <NEW_LINE> <DEDENT> arguments = sanitize_var_map(self.model().arguments(), arguments) <NEW_LINE> return super(Trainer, self).train_minibatch(arguments, device) <NEW_LINE> <DEDENT> def test_minibatch(self, arguments, device=None): <NEW_LINE> <INDENT> if not device: <NEW_LINE> <INDENT> device=DeviceDescriptor.use_default_device() <NEW_LINE> <DEDENT> arguments = sanitize_var_map(self.model().arguments(), arguments, add_batch_axis=True) <NEW_LINE> return super(Trainer, self).test_minibatch(arguments, device) <NEW_LINE> <DEDENT> def save_checkpoint(self, filename): <NEW_LINE> <INDENT> super(Trainer, self).save_checkpoint(filename) <NEW_LINE> <DEDENT> def restore_from_checkpoint(self, filename): <NEW_LINE> <INDENT> super(Trainer, self).restore_from_checkpoint(filename) <NEW_LINE> <DEDENT> @typemap <NEW_LINE> def model(self): <NEW_LINE> <INDENT> return super(Trainer, self).model() <NEW_LINE> <DEDENT> @typemap <NEW_LINE> def loss_function(self): <NEW_LINE> <INDENT> return super(Trainer, self).loss_function() <NEW_LINE> <DEDENT> @typemap <NEW_LINE> def evaluation_function(self): <NEW_LINE> <INDENT> return super(Trainer, self).evaluation_function() <NEW_LINE> <DEDENT> @typemap <NEW_LINE> def parameter_learners(self): <NEW_LINE> <INDENT> return super(Trainer, self).parameter_learners() <NEW_LINE> <DEDENT> def previous_minibatch_loss_average(self): <NEW_LINE> <INDENT> return super(Trainer, self).previous_minibatch_loss_average() <NEW_LINE> <DEDENT> def previous_minibatch_evaluation_average(self): <NEW_LINE> <INDENT> return super(Trainer, self).previous_minibatch_evaluation_average() <NEW_LINE> <DEDENT> def previous_minibatch_sample_count(self): <NEW_LINE> <INDENT> return super(Trainer, self).previous_minibatch_sample_count() | Trainer to train the specified `model` with the specified `training_loss`
as the training criterion, the specified `evaluation_function` as the
criterion for evaluating the trained model's quality, and using the
specified set of `parameter_learners` for updating the model's parameters
using computed gradients.
Args:
model (`:class:cntk.ops.Function`): root node of the function to train
loss_function (`:class:cntk.ops.Function`): loss function
eval_function (`:class:cntk.ops.Function`): evaluation function
parameter_learners (`list`): list of learners from `:cntk:cntk.learner` | 625990371d351010ab8f4c81 |
class Share: <NEW_LINE> <INDENT> def __init__(self, name, value, yield_): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.value = float(value) <NEW_LINE> self.yield_ = float(yield_) <NEW_LINE> self.profit = self.value * self.yield_ / 100 | This class is used to store the informations of a share | 62599037baa26c4b54d5040e |
class PastesSerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> author = UserProfileSerializer(read_only=True) <NEW_LINE> shared_user = UserProfileSerializer(source='allowed_user', read_only=True, many=True) <NEW_LINE> expired = serializers.ReadOnlyField(source='is_expired') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Pastes <NEW_LINE> exclude = 'allowed_user', 'expire_date', 'updated', <NEW_LINE> extra_kwargs = { 'url': { 'lookup_field': "shortcode", "view_name": "snippet:pastes-detail", } } <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> auth_user = self.context.get('auth_user') <NEW_LINE> if auth_user.is_authenticated: <NEW_LINE> <INDENT> validated_data.update({ 'author': auth_user.profile }) <NEW_LINE> <DEDENT> return super().create(validated_data) <NEW_LINE> <DEDENT> def validate(self, data): <NEW_LINE> <INDENT> auth_user = self.context.get('auth_user') <NEW_LINE> if not auth_user.is_authenticated and data['privacy'] != Pastes.PUBLIC: <NEW_LINE> <INDENT> raise serializers.ValidationError("Only allowed privacy is Public") <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def to_representation(self, instance): <NEW_LINE> <INDENT> context = super().to_representation(instance) <NEW_LINE> if instance.privacy != Pastes.SHARED: <NEW_LINE> <INDENT> del context['shared_user'] <NEW_LINE> <DEDENT> context.update({ 'expiration': instance.get_expiration_display(), 'privacy': instance.get_privacy_display(), }) <NEW_LINE> return context | serializer class related to snippet.Pastes model | 62599037ac7a0e7691f7364e |
class TemplateExportByNameResources(object): <NEW_LINE> <INDENT> openapi_types = { 'kind': 'TemplateKind', 'name': 'str' } <NEW_LINE> attribute_map = { 'kind': 'kind', 'name': 'name' } <NEW_LINE> def __init__(self, kind=None, name=None): <NEW_LINE> <INDENT> self._kind = None <NEW_LINE> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.kind = kind <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> if kind is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `kind`, must not be `None`") <NEW_LINE> <DEDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, must not be `None`") <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TemplateExportByNameResources): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 6259903730c21e258be99974 |
class Cache(): <NEW_LINE> <INDENT> pass | An abstract Cache class for use in the repartition function
Derived classes must implement the following methods:
def insert(self, read_block, dry_run):
raise Exception('Implement in sub-class')
def mem_usage(self):
raise Exception('Implement in sub-class') | 62599037287bf620b6272d4f |
class TestSeeker(unittest.TestCase): <NEW_LINE> <INDENT> def test_parsePom(self): <NEW_LINE> <INDENT> props = IzProperties(pom) <NEW_LINE> self.assertEquals(props['izpack.version'], '5.0.0-rc2') <NEW_LINE> self.assertEquals(props['project.build.sourceEncoding'], 'UTF-8') <NEW_LINE> <DEDENT> def test_parseIzProperties(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_substituteStrings(self): <NEW_LINE> <INDENT> props = IzProperties(pom) <NEW_LINE> s1 = "${izpack.version}-A.B.C" <NEW_LINE> r1 = props.substitute(s1) <NEW_LINE> self.assertEquals("5.0.0-rc2-A.B.C", r1) <NEW_LINE> s2 = "${project.version}-A.B.C" <NEW_LINE> r2 = props.substitute(s2) <NEW_LINE> self.assertEquals("5.0.0-rc2-SNAPSHOT-A.B.C", r2) <NEW_LINE> s3 = "${undefined.prop}-A.B.C" <NEW_LINE> r3 = props.substitute(s3) <NEW_LINE> self.assertEquals("${undefined.prop}-A.B.C", r3) <NEW_LINE> s4 = "${mistyped.prop-A.B.C" <NEW_LINE> r4 = props.substitute(s4) <NEW_LINE> self.assertEquals("${mistyped.prop-A.B.C", r4) | Basic testing of izproperties class. | 62599037e76e3b2f99fd9b73 |
class PersistenceDataFrameIO(abc.ABC): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def read_dataframe(self) -> pd.DataFrame: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def save_dataframe(self, dataframe: pd.DataFrame): <NEW_LINE> <INDENT> pass | Interface that is able to read tabular data from a persistent source as a dataframe, and then save it back. | 625990370a366e3fb87ddb4c |
class Request: <NEW_LINE> <INDENT> def __init__(self, *, headers, querystring, body): <NEW_LINE> <INDENT> self.headers=Headers(headers) <NEW_LINE> self.querystring=QueryString(querystring) <NEW_LINE> self.body=Body(body) | An HTTP Request, part of a step | 62599037a8ecb03325872386 |
class NLUPipeline(Pipeline): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_yml(cls): <NEW_LINE> <INDENT> return cls | NLU管道 | 6259903773bcbd0ca4bcb3ef |
class Riff: <NEW_LINE> <INDENT> def __init__(self, measures=None, filename=None): <NEW_LINE> <INDENT> if measures is None: <NEW_LINE> <INDENT> if filename is None: <NEW_LINE> <INDENT> raise RiffError("Filename not defined!") <NEW_LINE> <DEDENT> self.measures = self.load_riff(filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.measures = measures <NEW_LINE> <DEDENT> self.riff_dict = {} <NEW_LINE> self.runtime = 0 <NEW_LINE> for item in self.measures: <NEW_LINE> <INDENT> for key in item.note_dict: <NEW_LINE> <INDENT> note = item.note_dict[key] <NEW_LINE> self.riff_dict[key * item.position_length + self.runtime] = (note.frequency, item.to_seconds(note)) <NEW_LINE> <DEDENT> self.runtime += float(item.max_position + 1) * item.position_length <NEW_LINE> <DEDENT> <DEDENT> def load_riff(self, filename): <NEW_LINE> <INDENT> in_list = [] <NEW_LINE> measures = [] <NEW_LINE> with open(filename, 'rb+') as file_obj: <NEW_LINE> <INDENT> in_list = pickle.load(file_obj) <NEW_LINE> <DEDENT> for item in in_list: <NEW_LINE> <INDENT> sscale = item['key'] <NEW_LINE> scale = Scale(sscale['base_frequency'], sscale['semitones'], sscale['octaves'], sscale['chromatic']) <NEW_LINE> measure = Measure(item['time_signature'], item['tempo'], key=scale) <NEW_LINE> for pos in item['notes']: <NEW_LINE> <INDENT> snote = item['notes'][pos] <NEW_LINE> note = Note(snote["rel_pitch"], scale, snote['abs_duration']) <NEW_LINE> measure.add(note, pos) <NEW_LINE> <DEDENT> measures.append(measure) <NEW_LINE> <DEDENT> return measures <NEW_LINE> <DEDENT> def write_riff(self, filename): <NEW_LINE> <INDENT> out_list = [] <NEW_LINE> for item in self.measures: <NEW_LINE> <INDENT> out_list.append(item.serial()) <NEW_LINE> <DEDENT> with open(filename, 'wb+') as file_obj: <NEW_LINE> <INDENT> pickle.dump(out_list, file_obj) <NEW_LINE> <DEDENT> <DEDENT> def create_sample(self): <NEW_LINE> <INDENT> big = BigSample(self.runtime) <NEW_LINE> rate = big.rate <NEW_LINE> for key in self.riff_dict: <NEW_LINE> <INDENT> freq = self.riff_dict[key][0] <NEW_LINE> length = self.riff_dict[key][1] <NEW_LINE> samp = SmallSample(freq, length, rate=big.rate).sample <NEW_LINE> big.add_inplace2(samp, key * rate) <NEW_LINE> <DEDENT> return big | A riff is a collection of one or more measures.
It only cares about instances of Hz * Seconds.
It is readable from and writable to a .riff file.
This means it can be instantiated via either a list of
Measure objects or a filename.
riff_dict is the key data structure. It is composed of
time : (frequency, duration) pairs.
Ex:
.25 : (256, .5) would mean
256 Hz for .5 seconds, starting at .25 seconds from the beginning of the sample | 62599037b5575c28eb71357d |
class Layer2(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if "layer1" in kwargs: <NEW_LINE> <INDENT> self.layer1 = kwargs["layer1"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.layer1 = Layer1(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def create_vault(self, name): <NEW_LINE> <INDENT> self.layer1.create_vault(name) <NEW_LINE> return self.get_vault(name) <NEW_LINE> <DEDENT> def delete_vault(self, name): <NEW_LINE> <INDENT> return self.layer1.delete_vault(name) <NEW_LINE> <DEDENT> def get_vault(self, name): <NEW_LINE> <INDENT> response_data = self.layer1.describe_vault(name) <NEW_LINE> return Vault(self.layer1, response_data) <NEW_LINE> <DEDENT> def list_vaults(self): <NEW_LINE> <INDENT> vaults = [] <NEW_LINE> marker = None <NEW_LINE> while True: <NEW_LINE> <INDENT> response_data = self.layer1.list_vaults(marker=marker, limit=1000) <NEW_LINE> vaults.extend([Vault(self.layer1, rd) for rd in response_data['VaultList']]) <NEW_LINE> marker = response_data.get('Marker') <NEW_LINE> if not marker: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return vaults | Provides a more pythonic and friendly interface to Glacier based on Layer1 | 62599037711fe17d825e154f |
class TGetCatalogsResp(object): <NEW_LINE> <INDENT> def __init__(self, status=None, operationHandle=None,): <NEW_LINE> <INDENT> self.status = status <NEW_LINE> self.operationHandle = operationHandle <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.status = TStatus() <NEW_LINE> self.status.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.operationHandle = TOperationHandle() <NEW_LINE> self.operationHandle.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('TGetCatalogsResp') <NEW_LINE> if self.status is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('status', TType.STRUCT, 1) <NEW_LINE> self.status.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.operationHandle is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2) <NEW_LINE> self.operationHandle.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.status is None: <NEW_LINE> <INDENT> raise TProtocolException(message='Required field status is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- status
- operationHandle | 62599037596a897236128e06 |
class Only(Directive): <NEW_LINE> <INDENT> has_content = True <NEW_LINE> required_arguments = 1 <NEW_LINE> optional_arguments = 0 <NEW_LINE> final_argument_whitespace = True <NEW_LINE> option_spec = {} <NEW_LINE> def run(self): <NEW_LINE> <INDENT> node = addnodes.only() <NEW_LINE> node.document = self.state.document <NEW_LINE> set_source_info(self, node) <NEW_LINE> node['expr'] = self.arguments[0] <NEW_LINE> surrounding_title_styles = self.state.memo.title_styles <NEW_LINE> surrounding_section_level = self.state.memo.section_level <NEW_LINE> self.state.memo.title_styles = [] <NEW_LINE> self.state.memo.section_level = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> result = self.state.nested_parse(self.content, self.content_offset, node, match_titles=1) <NEW_LINE> title_styles = self.state.memo.title_styles <NEW_LINE> if (not surrounding_title_styles or not title_styles or title_styles[0] not in surrounding_title_styles or not self.state.parent): <NEW_LINE> <INDENT> return [node] <NEW_LINE> <DEDENT> current_depth = 0 <NEW_LINE> parent = self.state.parent <NEW_LINE> while parent: <NEW_LINE> <INDENT> current_depth += 1 <NEW_LINE> parent = parent.parent <NEW_LINE> <DEDENT> current_depth -= 2 <NEW_LINE> title_style = title_styles[0] <NEW_LINE> nested_depth = len(surrounding_title_styles) <NEW_LINE> if title_style in surrounding_title_styles: <NEW_LINE> <INDENT> nested_depth = surrounding_title_styles.index(title_style) <NEW_LINE> <DEDENT> n_sects_to_raise = current_depth - nested_depth + 1 <NEW_LINE> parent = self.state.parent <NEW_LINE> for i in xrange(n_sects_to_raise): <NEW_LINE> <INDENT> if parent.parent: <NEW_LINE> <INDENT> parent = parent.parent <NEW_LINE> <DEDENT> <DEDENT> parent.append(node) <NEW_LINE> return [] <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.state.memo.title_styles = surrounding_title_styles <NEW_LINE> self.state.memo.section_level = surrounding_section_level | Directive to only include text if the given tag(s) are enabled. | 62599037d4950a0f3b1116f3 |
class WikiPageAttachment(BasePolarion): <NEW_LINE> <INDENT> _cls_suds_map = {"author": {"field_name": "author", "cls": User}, "file_name": "fileName", "wiki_page_attachment_id": "id", "length": "length", "title": "title", "updated": "updated", "url": "url", "uri": "_uri", "_unresolved": "_unresolved"} <NEW_LINE> _obj_client = "tracker_client" <NEW_LINE> _obj_struct = "tns3:WikiPageAttachment" | Object to handle the Polarion WSDL tns3:WikiPageAttachment class
Attributes:
author (User)
file_name (string)
wiki_page_attachment_id (string)
length (long)
title (string)
updated (dateTime)
url (string) | 62599037e76e3b2f99fd9b75 |
class FilenameDescriptor(BasePremapDescriptor): <NEW_LINE> <INDENT> def doTransform(self, value, arg): <NEW_LINE> <INDENT> if hasattr(value, '_already_recoded_filename'): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif isinstance(value, str) or isinstance(value, unicode): <NEW_LINE> <INDENT> value = util.filename.toLocale( value, self.getCharset(arg), self.getFileCharset(arg) ) <NEW_LINE> class RecodedFilename(type(value)): <NEW_LINE> <INDENT> _already_recoded_filename = True <NEW_LINE> <DEDENT> value = RecodedFilename(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( "Supplied value must be string or unicode, not %r" % type(value).__name__ ) <NEW_LINE> <DEDENT> return value | Filename storage | 6259903715baa72349463103 |
class ProcedureList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._map = {} <NEW_LINE> self.dummy = ProcedureMetaData(None) <NEW_LINE> <DEDENT> def ref(self, key): <NEW_LINE> <INDENT> if not key in self._map: <NEW_LINE> <INDENT> self._map[key] = ProcedureMetaData(key) <NEW_LINE> <DEDENT> return self._map[key] <NEW_LINE> <DEDENT> def load_proclist(self, calls): <NEW_LINE> <INDENT> for (segname, segcalls) in calls.items(): <NEW_LINE> <INDENT> for (procidx, procname) in segcalls.items(): <NEW_LINE> <INDENT> meta = self.ref((segname, procidx)) <NEW_LINE> if isinstance(procname, tuple): <NEW_LINE> <INDENT> (meta.name,meta.ins,meta.outs) = procname <NEW_LINE> if meta.ins is not None and meta.outs is not None: <NEW_LINE> <INDENT> meta.delta = meta.outs - meta.ins <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> meta.name = procname <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def load_deltas(self, deltas): <NEW_LINE> <INDENT> for (key, data) in deltas.items(): <NEW_LINE> <INDENT> meta = self.ref(key) <NEW_LINE> (meta.delta, meta.ins, meta.outs, meta.num_locals) = data <NEW_LINE> <DEDENT> <DEDENT> def load_hierarchy(self, hierarchy): <NEW_LINE> <INDENT> for (key, data) in hierarchy.items(): <NEW_LINE> <INDENT> meta = self.ref(key) <NEW_LINE> (meta.parent) = self.ref(data) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if key in self._map: <NEW_LINE> <INDENT> return self._map[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.dummy | Store known metadata about procedures in a format accessible
by analysis tools. | 62599037507cdc57c63a5f02 |
class ImportVisitor(ast.NodeVisitor): <NEW_LINE> <INDENT> def __init__(self, filename, options): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.options = options or {} <NEW_LINE> self.imports = [] <NEW_LINE> self.application_import_names = set( self.options.get("application_import_names", []) ) <NEW_LINE> <DEDENT> def visit_Import(self, node): <NEW_LINE> <INDENT> if node.col_offset != 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.imports.append(node) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> def visit_ImportFrom(self, node): <NEW_LINE> <INDENT> if node.col_offset != 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.imports.append(node) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> def node_sort_key(self, node): <NEW_LINE> <INDENT> if isinstance(node, ast.Import): <NEW_LINE> <INDENT> names = [nm.name for nm in node.names] <NEW_LINE> <DEDENT> elif isinstance(node, ast.ImportFrom): <NEW_LINE> <INDENT> names = [node.module] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError(type(node)) <NEW_LINE> <DEDENT> import_type = self._import_type(node, names[0]) <NEW_LINE> for name in names[1:]: <NEW_LINE> <INDENT> name_type = self._import_type(node, name) <NEW_LINE> if import_type != name_type: <NEW_LINE> <INDENT> import_type = IMPORT_MIXED <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> imported_names = [[nm.name, nm.asname] for nm in node.names] <NEW_LINE> is_star_import = not any(nm == "*" for nm, asnm in imported_names) <NEW_LINE> from_level = getattr(node, "level", -1) <NEW_LINE> n = ( import_type, names, from_level, is_star_import, imported_names, ) <NEW_LINE> if n[0] == IMPORT_FUTURE: <NEW_LINE> <INDENT> group = (n[0], None, None, None, n[4]) <NEW_LINE> <DEDENT> elif n[0] == IMPORT_STDLIB: <NEW_LINE> <INDENT> group = (n[0], n[2], n[1], n[3], n[4]) <NEW_LINE> <DEDENT> elif n[0] == IMPORT_STDLIB: <NEW_LINE> <INDENT> group = (n[0], n[2], n[1], n[3], n[4]) <NEW_LINE> <DEDENT> elif n[0] == IMPORT_3RD_PARTY: <NEW_LINE> <INDENT> group = (n[0], n[1], n[2], n[3], n[4]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> group = n <NEW_LINE> <DEDENT> return group <NEW_LINE> <DEDENT> def _import_type(self, node, name): <NEW_LINE> <INDENT> if isinstance(name, int): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> pkg = root_package_name(name) <NEW_LINE> if pkg == "__future__": <NEW_LINE> <INDENT> return IMPORT_FUTURE <NEW_LINE> <DEDENT> elif pkg in STDLIB_NAMES: <NEW_LINE> <INDENT> return IMPORT_STDLIB <NEW_LINE> <DEDENT> elif ( pkg in self.application_import_names or (isinstance(node, ast.ImportFrom) and node.level > 0) ): <NEW_LINE> <INDENT> return IMPORT_APP <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return IMPORT_3RD_PARTY | This class visits all the import nodes at the root of tree and generates
sort keys for each import node.
In practice this means that they are sorted according to something like
this tuple.
(stdlib, site_packages, names) | 62599037c432627299fa4161 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.