code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ControlFrame(NamedTuple): <NEW_LINE> <INDENT> label_types: Tuple[ValType, ...] <NEW_LINE> end_types: Tuple[ValType, ...] <NEW_LINE> height: int <NEW_LINE> is_unreachable: bool <NEW_LINE> def mark_unreachable(self) -> 'ControlFrame': <NEW_LINE> <INDENT> return type(self)( self.label_types, self.end_types, self.height, True, )
Represents the equivalent of a label during expression validation.
625990311d351010ab8f4bd8
class HeatmapMaxDetBlock(nn.Layer): <NEW_LINE> <INDENT> def __init__(self, tune=True, data_format="channels_last", **kwargs): <NEW_LINE> <INDENT> super(HeatmapMaxDetBlock, self).__init__(**kwargs) <NEW_LINE> self.tune = tune <NEW_LINE> self.data_format = data_format <NEW_LINE> <DEDENT> def call(self, x, training=None): <NEW_LINE> <INDENT> vector_dim = 2 <NEW_LINE> x_shape = x.get_shape().as_list() <NEW_LINE> batch = x_shape[0] <NEW_LINE> if is_channels_first(self.data_format): <NEW_LINE> <INDENT> channels = x_shape[1] <NEW_LINE> in_size = x_shape[2:] <NEW_LINE> heatmap_vector = tf.reshape(x, shape=(batch, channels, -1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> channels = x_shape[3] <NEW_LINE> in_size = x_shape[1:3] <NEW_LINE> heatmap_vector = tf.reshape(x, shape=(batch, -1, channels)) <NEW_LINE> heatmap_vector = tf.transpose(heatmap_vector, perm=(0, 2, 1)) <NEW_LINE> <DEDENT> indices = tf.cast(tf.expand_dims(tf.cast(tf.math.argmax(heatmap_vector, axis=vector_dim), np.int32), axis=vector_dim), np.float32) <NEW_LINE> scores = tf.math.reduce_max(heatmap_vector, axis=vector_dim, keepdims=True) <NEW_LINE> scores_mask = tf.cast(tf.math.greater(scores, 0.0), np.float32) <NEW_LINE> pts_x = (indices % in_size[1]) * scores_mask <NEW_LINE> pts_y = (indices // in_size[1]) * scores_mask <NEW_LINE> pts = tf.concat([pts_x, pts_y, scores], axis=vector_dim) <NEW_LINE> if self.tune: <NEW_LINE> <INDENT> pts = pts.numpy() <NEW_LINE> for b in range(batch): <NEW_LINE> <INDENT> for k in range(channels): <NEW_LINE> <INDENT> hm = x[b, k, :, :] if is_channels_first(self.data_format) else x[b, :, :, k] <NEW_LINE> px = int(pts[b, k, 0]) <NEW_LINE> py = int(pts[b, k, 1]) <NEW_LINE> if (0 < px < in_size[1] - 1) and (0 < py < in_size[0] - 1): <NEW_LINE> <INDENT> pts[b, k, 0] += np.sign(hm[py, px + 1] - hm[py, px - 1]) * 0.25 <NEW_LINE> pts[b, k, 1] += np.sign(hm[py + 1, px] - hm[py - 1, px]) * 0.25 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> pts = tf.convert_to_tensor(pts) <NEW_LINE> <DEDENT> return pts
Heatmap maximum detector block (for human pose estimation task). Parameters: ---------- tune : bool, default True Whether to tune point positions. data_format : str, default 'channels_last' The ordering of the dimensions in tensors.
62599031a8ecb033258722dd
class GameNumPyLight(BaseGameNumPy): <NEW_LINE> <INDENT> def _step(self): <NEW_LINE> <INDENT> con = convolve(self.cells, self.WEIGHTS, mode='wrap') <NEW_LINE> self.cells.fill(0) <NEW_LINE> self.cells[(con == 3) | (con == 12) | (con == 13)] = 1 <NEW_LINE> <DEDENT> def fate(self, row, col): <NEW_LINE> <INDENT> line = self.cells.take(row, axis=0, mode='wrap') <NEW_LINE> cell = line.take(col, mode='wrap') <NEW_LINE> return Fate.Survive if cell == 1 else Fate.StayDead <NEW_LINE> <DEDENT> def age(self, row, col): <NEW_LINE> <INDENT> return 1000
Light version of the NumPy/SciPy-based implementation of the Game of Life.
6259903150485f2cf55dc03e
class ExperimentUnit(AuthUserDetail, CreateUpdateTime): <NEW_LINE> <INDENT> slug = models.SlugField(max_length=250, unique=True, blank=True) <NEW_LINE> exp_unit_code = models.CharField(max_length=20, unique=True, verbose_name='Experiment unit code') <NEW_LINE> experimentunitcategory = models.ForeignKey( ExperimentUnitCategory, on_delete=models.PROTECT, verbose_name='Unit categories' ) <NEW_LINE> common_name = models.CharField(max_length=250) <NEW_LINE> latin_name = models.CharField(max_length=250, blank=True, null=True) <NEW_LINE> objects = ExperimentUnitManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.common_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.common_name <NEW_LINE> <DEDENT> def get_api_url(self): <NEW_LINE> <INDENT> return reverse('research_api:experiment_unit_detail', kwargs={'slug': self.slug}) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['-time_created', '-last_update'] <NEW_LINE> verbose_name_plural = 'Experiment Units' <NEW_LINE> <DEDENT> @property <NEW_LINE> def research_experiment_unit_relation(self): <NEW_LINE> <INDENT> instance = self <NEW_LINE> qs = ResearchExperimentUnit.objects.filter_by_model_type(instance) <NEW_LINE> return qs
Experiment unit model. Creates experiment unit entity.
6259903173bcbd0ca4bcb352
class TorchServable(BaseServable): <NEW_LINE> <INDENT> def _build(self): <NEW_LINE> <INDENT> self.model = torch.load(self.dlhub['files']['model'], map_location='cpu') <NEW_LINE> self.model.eval() <NEW_LINE> self.input_type = self.servable['methods']['run']['input'] <NEW_LINE> self.is_multiinput = self.input_type['type'] == 'tuple' <NEW_LINE> if self.is_multiinput: <NEW_LINE> <INDENT> logger.info('Loading a multi-input model') <NEW_LINE> self.input_type = [x['item_type']['type'] for x in self.input_type['element_types']] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info('Loading a single-input model') <NEW_LINE> self.input_type = self.input_type['item_type']['type'] <NEW_LINE> <DEDENT> self.is_multioutput = self.servable['methods']['run']['output']['type'] == 'tuple' <NEW_LINE> <DEDENT> def _run(self, inputs, **parameters): <NEW_LINE> <INDENT> if self.is_multiinput: <NEW_LINE> <INDENT> inputs = [ torch.tensor(i).to(getattr(torch, dt)) for i, dt in zip(inputs, self.input_type) ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inputs = [torch.tensor(inputs).to(getattr(torch, self.input_type))] <NEW_LINE> <DEDENT> outputs = self.model(*inputs, **parameters) <NEW_LINE> if self.is_multioutput: <NEW_LINE> <INDENT> return [o.detach().numpy() for o in outputs] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return outputs.detach().numpy()
Servable for Torch models
62599031e76e3b2f99fd9acd
class DynamicDocument(mongoengine.DynamicDocument): <NEW_LINE> <INDENT> meta = {'abstract': True, 'queryset_class': BaseQuerySet} <NEW_LINE> test = 1
Abstract Dynamic document with extra helpers in the queryset class
6259903123e79379d538d5cb
class ZCatalogIndexes(IFAwareObjectManager, Folder, Persistent, Implicit): <NEW_LINE> <INDENT> _product_interfaces = (IPluggableIndex, ) <NEW_LINE> meta_type = "ZCatalogIndex" <NEW_LINE> manage_options = () <NEW_LINE> security = ClassSecurityInfo() <NEW_LINE> security.declareObjectProtected(manage_zcatalog_indexes) <NEW_LINE> security.setPermissionDefault(manage_zcatalog_indexes, ('Manager', )) <NEW_LINE> security.declareProtected(manage_zcatalog_indexes, 'addIndexForm') <NEW_LINE> addIndexForm= DTMLFile('dtml/addIndexForm', globals()) <NEW_LINE> def manage_main(self, REQUEST, RESPONSE): <NEW_LINE> <INDENT> RESPONSE.redirect('../manage_catalogIndexes') <NEW_LINE> <DEDENT> manage_workspace = manage_main <NEW_LINE> def _setOb(self, id, object): <NEW_LINE> <INDENT> indexes = aq_parent(self)._catalog.indexes <NEW_LINE> indexes[id] = object <NEW_LINE> aq_base(aq_parent(self))._indexes = indexes <NEW_LINE> <DEDENT> def _delOb(self, id): <NEW_LINE> <INDENT> indexes = aq_parent(self)._catalog.indexes <NEW_LINE> del indexes[id] <NEW_LINE> aq_base(aq_parent(self))._indexes = indexes <NEW_LINE> <DEDENT> def _getOb(self, id, default=_marker): <NEW_LINE> <INDENT> indexes = aq_parent(self)._catalog.indexes <NEW_LINE> if default is _marker: <NEW_LINE> <INDENT> return indexes.get(id) <NEW_LINE> <DEDENT> return indexes.get(id, default) <NEW_LINE> <DEDENT> security.declareProtected(manage_zcatalog_indexes, 'objectIds') <NEW_LINE> def objectIds(self, spec=None): <NEW_LINE> <INDENT> indexes = aq_parent(self)._catalog.indexes <NEW_LINE> if spec is not None: <NEW_LINE> <INDENT> if isinstance(spec, str): <NEW_LINE> <INDENT> spec = [spec] <NEW_LINE> <DEDENT> result = [] <NEW_LINE> for ob in indexes.keys(): <NEW_LINE> <INDENT> o = indexes.get(ob) <NEW_LINE> meta = getattr(o, 'meta_type', None) <NEW_LINE> if meta is not None and meta in spec: <NEW_LINE> <INDENT> result.append(ob) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> return indexes.keys() <NEW_LINE> <DEDENT> def _setObject(self, id, object, roles=None, user=None, set_owner=1): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __bobo_traverse__(self, REQUEST, name): <NEW_LINE> <INDENT> indexes = aq_parent(self)._catalog.indexes <NEW_LINE> o = indexes.get(name, None) <NEW_LINE> if o is not None: <NEW_LINE> <INDENT> if getattr(o, 'manage_workspace', None) is None: <NEW_LINE> <INDENT> o = OldCatalogWrapperObject(o) <NEW_LINE> <DEDENT> return o.__of__(self) <NEW_LINE> <DEDENT> return getattr(self, name)
A mapping object, responding to getattr requests by looking up the requested indexes in an object manager.
62599031711fe17d825e14fb
@dataclass(frozen=True) <NEW_LINE> class Carrier: <NEW_LINE> <INDENT> carrier_id: str <NEW_LINE> name: str
Класс с данными о перевозчике.
62599031ac7a0e7691f735a9
class PreSubmitEnterprise(models.Model): <NEW_LINE> <INDENT> content_id = models.CharField(max_length=50, primary_key=True, default=lambda: str(uuid.uuid4()), verbose_name="初审报告唯一ID") <NEW_LINE> project_id = models.ForeignKey(ProjectSingle) <NEW_LINE> original = models.ForeignKey(ProjectEnterpriseOrigin, blank=False, null=True, verbose_name=u"项目来源") <NEW_LINE> maturity = models.ForeignKey(ProjectEnterpriseMaturity, blank=False, null=True, verbose_name=u"项目技术成熟度") <NEW_LINE> enterpriseTeacher = models.OneToOneField(Teacher_Enterprise, blank=False, null=False, verbose_name=u"企业导师") <NEW_LINE> background = models.TextField(blank=False, null=True, verbose_name=u"创业团队介绍") <NEW_LINE> innovation = models.TextField(blank=False, null=True, verbose_name=u"项目的基本情况及创新内容") <NEW_LINE> industry = models.TextField(blank=False, null=True, verbose_name=u"行业及市场前景") <NEW_LINE> product = models.TextField(blank=False, null=True, verbose_name=u"产品制造") <NEW_LINE> funds_plan = models.TextField(blank=False, null=True, verbose_name=u"项目投资预算及融资计划") <NEW_LINE> operating_mode = models.TextField(blank=False, null=True, verbose_name=u"项目运营模式") <NEW_LINE> risk_management = models.TextField(blank=False, null=True, verbose_name=u"项目风险预测及应对措施") <NEW_LINE> financial_pred = models.TextField(blank=False, null=True, verbose_name=u"财务预测") <NEW_LINE> inspector_comments = models.TextField(blank=True, null=True, verbose_name="指导教师意见") <NEW_LINE> instutite_comments = models.TextField(blank=True, null=True, verbose_name="学部学院评审意见") <NEW_LINE> school_comments = models.TextField(blank=True, null=True, verbose_name="学校评审意见") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "创业项目申报书" <NEW_LINE> verbose_name_plural = "创业项目申报书" <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.project_id.title
inheribit table, which use ProjectSingle to show pre-submit content for Enterprise project
625990318a43f66fc4bf3249
class UndoWrongStateError(UndoError): <NEW_LINE> <INDENT> pass
Exception related to the current state of the undo/redo stack.
625990318c3a8732951f761a
class DescriptorSetHeader(usb_descriptors.DescriptorContainer): <NEW_LINE> <INDENT> pass
Microsoft OS 2.0 descriptor set header.
62599031d18da76e235b79af
class ExperienceReplay(object): <NEW_LINE> <INDENT> memory = 1000 <NEW_LINE> def __init__(self, model, experiences=[]): <NEW_LINE> <INDENT> self.experiences = experiences <NEW_LINE> self.model = model <NEW_LINE> <DEDENT> def collect(self, strategy, epochs=10, verbose=False): <NEW_LINE> <INDENT> max_score = 0 <NEW_LINE> for i in range(epochs): <NEW_LINE> <INDENT> score, experience_set = play(strategy, verbose, False) <NEW_LINE> self.experiences += experience_set <NEW_LINE> max_score = max(max_score, score) <NEW_LINE> if len(self.experiences) > self.memory: <NEW_LINE> <INDENT> for j in range(len(self.experiences) - self.memory): <NEW_LINE> <INDENT> self.experiences.pop(0) <NEW_LINE> <DEDENT> <DEDENT> print("Game {0} completed: Experiences - {1}; Score - {2}" .format(i, len(experience_set), score)) <NEW_LINE> self.train() <NEW_LINE> <DEDENT> return self.experiences, max_score <NEW_LINE> <DEDENT> def experiences_to_batches(self): <NEW_LINE> <INDENT> experiences = self.experiences <NEW_LINE> batch_size = len(experiences) <NEW_LINE> state_batch = np.zeros((batch_size, 16), dtype=np.float) <NEW_LINE> next_state_batch = np.zeros((batch_size, 16), dtype=np.float) <NEW_LINE> actions = np.zeros((batch_size,), dtype=np.int) <NEW_LINE> reward_batch = np.zeros((batch_size,), dtype=np.float) <NEW_LINE> bad_action_batch = np.zeros((batch_size,), dtype=np.bool) <NEW_LINE> available_actions_batch = np.zeros((batch_size, 4), dtype=np.bool) <NEW_LINE> merged = np.zeros((batch_size,), dtype=np.float) <NEW_LINE> for i, experience in enumerate(experiences): <NEW_LINE> <INDENT> state_batch[i, :] = (experience.state.flatten()) <NEW_LINE> next_state_batch[i, :] = (experience.next_state.flatten()) <NEW_LINE> actions[i] = experience.action <NEW_LINE> reward_batch[i] = experience.reward <NEW_LINE> bad_action_batch[i] = experience.game_over or experience.not_available <NEW_LINE> available_actions_batch[i, experience.next_state_available_actions] = True <NEW_LINE> merged[i] = (np.count_nonzero(experience.state) - np.count_nonzero(experience.next_state) + 1) <NEW_LINE> <DEDENT> targets = compute_targets(reward_batch, state_batch, next_state_batch, actions, merged, self.model) <NEW_LINE> return state_batch, targets, actions <NEW_LINE> <DEDENT> def train(self): <NEW_LINE> <INDENT> (train_x, train_y, action) = self.experiences_to_batches() <NEW_LINE> self.model.train_on_batch(np.divide(train_x, NORMALIZING_FACTOR), np.divide(train_y, NORMALIZING_FACTOR)) <NEW_LINE> <DEDENT> def get_model(self): <NEW_LINE> <INDENT> return self.model
Class to encapsulate functions acting on a batch of experiences.
62599031d164cc6175822034
class Chromecast(object): <NEW_LINE> <INDENT> def __init__(self, host): <NEW_LINE> <INDENT> self.logger = logging.getLogger(__name__) <NEW_LINE> self.host = host <NEW_LINE> self.logger.info("Querying device status") <NEW_LINE> self.device = get_device_status(self.host) <NEW_LINE> if not self.device: <NEW_LINE> <INDENT> raise ChromecastConnectionError( "Could not connect to {}".format(self.host)) <NEW_LINE> <DEDENT> self.status = None <NEW_LINE> self.media_status = None <NEW_LINE> self.socket_client = socket_client.SocketClient(host) <NEW_LINE> self.socket_client.receiver_controller.register_status_listener(self) <NEW_LINE> self.set_volume = self.socket_client.receiver_controller.set_volume <NEW_LINE> self.play_media = self.socket_client.media_controller.play_media <NEW_LINE> self.register_handler = self.socket_client.register_handler <NEW_LINE> self.socket_client.start() <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_idle(self): <NEW_LINE> <INDENT> return self.status is None or self.status.app_id == APP_BACKDROP <NEW_LINE> <DEDENT> @property <NEW_LINE> def app_id(self): <NEW_LINE> <INDENT> return self.status.app_id if self.status else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def app_display_name(self): <NEW_LINE> <INDENT> return self.status.display_name if self.status else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def media_controller(self): <NEW_LINE> <INDENT> return self.socket_client.media_controller <NEW_LINE> <DEDENT> def new_cast_status(self, status): <NEW_LINE> <INDENT> self.status = status <NEW_LINE> <DEDENT> def start_app(self, app_id): <NEW_LINE> <INDENT> self.logger.info("Starting app {}".format(app_id)) <NEW_LINE> self.socket_client.receiver_controller.launch_app(app_id) <NEW_LINE> <DEDENT> def quit_app(self): <NEW_LINE> <INDENT> self.logger.info("Quiting current app") <NEW_LINE> self.socket_client.receiver_controller.stop_app() <NEW_LINE> <DEDENT> def reboot(self): <NEW_LINE> <INDENT> reboot(self.host) <NEW_LINE> <DEDENT> def volume_up(self): <NEW_LINE> <INDENT> volume = self.status.volume_level <NEW_LINE> return self.set_volume(volume + 0.1) <NEW_LINE> <DEDENT> def volume_down(self): <NEW_LINE> <INDENT> volume = self.status.volume_level <NEW_LINE> return self.set_volume(volume - 0.1) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.socket_client.stop.set() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Chromecast({}, {}, {}, {}, api={}.{})".format( self.host, self.device.friendly_name, self.device.model_name, self.device.manufacturer, self.device.api_version[0], self.device.api_version[1])
Class to interface with a ChromeCast.
625990316fece00bbaccca71
@implementer(IOrdering) <NEW_LINE> @adapter(IOrderableFolder) <NEW_LINE> class UnorderedOrdering(object): <NEW_LINE> <INDENT> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> def notifyAdded(self, obj_id): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def notifyRemoved(self, obj_id): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def idsInOrder(self): <NEW_LINE> <INDENT> return aq_base(self.context).objectIds(ordered=False) <NEW_LINE> <DEDENT> def getObjectPosition(self, obj_id): <NEW_LINE> <INDENT> return None
This implementation provides no ordering.
625990315e10d32532ce4164
class TestSetDiscountEffectProps(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return SetDiscountEffectProps( name = '0', value = 1.337, scope = '0' ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return SetDiscountEffectProps( name = '0', value = 1.337, ) <NEW_LINE> <DEDENT> <DEDENT> def testSetDiscountEffectProps(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
SetDiscountEffectProps unit test stubs
6259903163f4b57ef00865d4
class PoemSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> genre = serializers.SlugRelatedField(queryset=Genre.objects.all(), slug_field='name') <NEW_LINE> user = UserSerializer(read_only=True) <NEW_LINE> categories = serializers.SlugRelatedField(queryset=Category.objects.all(), slug_field='name', many=True) <NEW_LINE> timesince = serializers.SerializerMethodField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Poem <NEW_LINE> fields = ('id', 'user', 'title', 'summary', 'content', 'is_published', 'genre', 'categories', 'timesince', 'created') <NEW_LINE> read_only_fields = ('created', ) <NEW_LINE> <DEDENT> def get_timesince(self, obj): <NEW_LINE> <INDENT> return f'{timesince(obj.created)} ago'
Serializer for creating a poem
625990319b70327d1c57fe47
class _LegacyRebatchDataset(dataset_ops.UnaryDataset): <NEW_LINE> <INDENT> def __init__(self, input_dataset, num_replicas): <NEW_LINE> <INDENT> def recalculate_batch_size(type_spec): <NEW_LINE> <INDENT> output_shape = type_spec._to_legacy_output_shapes() <NEW_LINE> if not isinstance(output_shape, tensor_shape.TensorShape): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if output_shape.rank is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if len(output_shape) < 1: <NEW_LINE> <INDENT> raise ValueError("Expected a dataset whose elements have rank >= 1 " "but found a dataset whose elements are scalars. " "You can fix the issue by adding the `batch` " "transformation to the dataset.") <NEW_LINE> <DEDENT> output_dims = [d.value for d in output_shape.dims] <NEW_LINE> if output_dims[0] is not None and output_dims[0] % num_replicas == 0: <NEW_LINE> <INDENT> return output_dims[0] // num_replicas <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def rebatch(type_spec): <NEW_LINE> <INDENT> batch_size = recalculate_batch_size(type_spec) <NEW_LINE> return type_spec._unbatch()._batch(batch_size) <NEW_LINE> <DEDENT> self._element_spec = nest.map_structure( rebatch, dataset_ops.get_structure(input_dataset)) <NEW_LINE> input_dataset = dataset_ops.normalize_to_dense(input_dataset) <NEW_LINE> variant_tensor = ged_ops.rebatch_dataset( input_dataset._variant_tensor, num_replicas=num_replicas, **self._flat_structure) <NEW_LINE> super(_LegacyRebatchDataset, self).__init__(input_dataset, variant_tensor) <NEW_LINE> <DEDENT> @property <NEW_LINE> def element_spec(self): <NEW_LINE> <INDENT> return self._element_spec
A `Dataset` that divides its input batches into `num_replicas` sub-batches. For each batch in the input dataset, _LegacyRebatchDataset will produce `num_replicas` smaller batches whose sizes add up to the original batch size. For example: ```python ds = tf.data.Dataset.range(8) ds = ds.batch(4) ds = _LegacyRebatchDataset(ds, num_replicas=3) for elem in ds: print(elem) >> [0, 1], [2, 3], [], [4, 5], [6, 7], [] ```
6259903191af0d3eaad3aef0
class QtWindowLayout(QLayout): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(QtWindowLayout, self).__init__(*args, **kwargs) <NEW_LINE> self._layout_item = None <NEW_LINE> <DEDENT> def addItem(self, item): <NEW_LINE> <INDENT> self._layout_item = item <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> return 0 if self._layout_item is None else 1 <NEW_LINE> <DEDENT> def itemAt(self, idx): <NEW_LINE> <INDENT> if idx == 0: <NEW_LINE> <INDENT> return self._layout_item <NEW_LINE> <DEDENT> <DEDENT> def takeAt(self, idx): <NEW_LINE> <INDENT> if idx == 0: <NEW_LINE> <INDENT> res = self._layout_item <NEW_LINE> self._layout_item = None <NEW_LINE> return res <NEW_LINE> <DEDENT> <DEDENT> def sizeHint(self): <NEW_LINE> <INDENT> return QSize(600, 100) <NEW_LINE> <DEDENT> def setGeometry(self, rect): <NEW_LINE> <INDENT> super(QtWindowLayout, self).setGeometry(rect) <NEW_LINE> item = self._layout_item <NEW_LINE> if item is not None: <NEW_LINE> <INDENT> item.widget().setGeometry(rect)
A QLayout subclass which can have at most one layout item. This layout item is expanded to fit the allowable space, regardless of its size policy settings. This is similar to how central widgets behave in a QMainWindow. The class is designed for use by QtWindow/QtDialog, other uses are at the user's own risk.
62599032ac7a0e7691f735ab
class LatestDiscussions(DiscussionFeed): <NEW_LINE> <INDENT> def items(self): <NEW_LINE> <INDENT> content_type = ContentType.objects.get_for_model(Entry) <NEW_LINE> return comments.get_model().objects.filter( content_type=content_type, is_public=True).order_by( '-submit_date')[:FEEDS_MAX_ITEMS] <NEW_LINE> <DEDENT> def link(self): <NEW_LINE> <INDENT> return reverse('zinnia_entry_archive_index') <NEW_LINE> <DEDENT> def get_title(self, obj): <NEW_LINE> <INDENT> return _('Latest discussions') <NEW_LINE> <DEDENT> def description(self): <NEW_LINE> <INDENT> return _('The latest discussions for the site %s') % self.site.name
Feed for the latest discussions.
62599032ec188e330fdf9957
class HDAExtended(HDADetailed): <NEW_LINE> <INDENT> tool_version: str = Field( ..., title="Tool Version", description="The version of the tool that produced this dataset.", ) <NEW_LINE> parent_id: Optional[EncodedDatabaseIdField] = Field( None, title="Parent ID", description="TODO", ) <NEW_LINE> designation: Optional[str] = Field( None, title="Designation", description="TODO", )
History Dataset Association extended information.
62599032d10714528d69eeed
class CustomerModelAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> pass
Customer admin class.
6259903296565a6dacd2d7ef
@add_common_get_method <NEW_LINE> class WorksService(ServiceBase): <NEW_LINE> <INDENT> path = 'works' <NEW_LINE> allowed_params = ['fields', 'filter_ids', 'filter_season', 'filter_title', 'page', 'per_page', 'sort_id' 'sort_season', 'sort_watchers_count'] <NEW_LINE> payload_type = 'work'
:reference: https://annict.wikihub.io/wiki/api/works
625990328a349b6b43687301
class GetXlsxStylesResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'successful': 'bool', 'cell_styles': 'list[DocxCellStyle]' } <NEW_LINE> attribute_map = { 'successful': 'Successful', 'cell_styles': 'CellStyles' } <NEW_LINE> def __init__(self, successful=None, cell_styles=None): <NEW_LINE> <INDENT> self._successful = None <NEW_LINE> self._cell_styles = None <NEW_LINE> self.discriminator = None <NEW_LINE> if successful is not None: <NEW_LINE> <INDENT> self.successful = successful <NEW_LINE> <DEDENT> if cell_styles is not None: <NEW_LINE> <INDENT> self.cell_styles = cell_styles <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def successful(self): <NEW_LINE> <INDENT> return self._successful <NEW_LINE> <DEDENT> @successful.setter <NEW_LINE> def successful(self, successful): <NEW_LINE> <INDENT> self._successful = successful <NEW_LINE> <DEDENT> @property <NEW_LINE> def cell_styles(self): <NEW_LINE> <INDENT> return self._cell_styles <NEW_LINE> <DEDENT> @cell_styles.setter <NEW_LINE> def cell_styles(self, cell_styles): <NEW_LINE> <INDENT> self._cell_styles = cell_styles <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(GetXlsxStylesResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, GetXlsxStylesResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599032d53ae8145f919527
class ReflexAgent(Agent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> legalMoves = gameState.getLegalActions() <NEW_LINE> scores = [self.evaluationFunction(gameState, action) for action in legalMoves] <NEW_LINE> bestScore = max(scores) <NEW_LINE> bestIndices = [index for index in range(len(scores)) if scores[index] == bestScore] <NEW_LINE> chosenIndex = random.choice(bestIndices) <NEW_LINE> "Add more of your code here if you want to" <NEW_LINE> return legalMoves[chosenIndex] <NEW_LINE> <DEDENT> def evaluationFunction(self, currentGameState, action): <NEW_LINE> <INDENT> successorGameState = currentGameState.generatePacmanSuccessor(action) <NEW_LINE> newPos = successorGameState.getPacmanPosition() <NEW_LINE> newFood = successorGameState.getFood() <NEW_LINE> newGhostStates = successorGameState.getGhostStates() <NEW_LINE> newScaredTimes = [ghostState.scaredTimer for ghostState in newGhostStates] <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> curPos = currentGameState.getPacmanPosition() <NEW_LINE> score = 0 <NEW_LINE> if curPos == newPos: <NEW_LINE> <INDENT> score -= 100 <NEW_LINE> <DEDENT> if len(newFood.asList()) != 0: <NEW_LINE> <INDENT> score += 10 * max([1.0 / manhattanDistance(newPos, x) for x in newFood.asList()]) <NEW_LINE> <DEDENT> score += sum([manhattanDistance(newPos, x.getPosition()) for x in newGhostStates]) <NEW_LINE> return score + successorGameState.getScore()
A reflex agent chooses an action at each choice point by examining its alternatives via a state evaluation function. The code below is provided as a guide. You are welcome to change it in any way you see fit, so long as you don't touch our method headers.
625990321d351010ab8f4bdc
class AASTexHeader(LatexHeader): <NEW_LINE> <INDENT> header_start = r'\tablehead' <NEW_LINE> splitter_class = AASTexHeaderSplitter <NEW_LINE> def start_line(self, lines): <NEW_LINE> <INDENT> return find_latex_line(lines, r'\tablehead') <NEW_LINE> <DEDENT> def write(self, lines): <NEW_LINE> <INDENT> if not 'col_align' in self.latex: <NEW_LINE> <INDENT> self.latex['col_align'] = len(self.cols) * 'c' <NEW_LINE> <DEDENT> if 'tablealign' in self.latex: <NEW_LINE> <INDENT> align = '[' + self.latex['tablealign'] + ']' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> align = '' <NEW_LINE> <DEDENT> lines.append(r'\begin{' + self.latex['tabletype'] + r'}{' + self.latex['col_align'] + r'}' + align) <NEW_LINE> add_dictval_to_list(self.latex, 'preamble', lines) <NEW_LINE> if 'caption' in self.latex: <NEW_LINE> <INDENT> lines.append(r'\tablecaption{' + self.latex['caption'] + '}') <NEW_LINE> <DEDENT> tablehead = ' & '.join([r'\colhead{' + x.name + '}' for x in self.cols]) <NEW_LINE> if 'units' in self.latex: <NEW_LINE> <INDENT> tablehead += r'\\ ' + (self.splitter.join([self.latex[ 'units'].get(x.name, ' ') for x in self.cols])) <NEW_LINE> <DEDENT> lines.append(r'\tablehead{' + tablehead + '}')
In a `deluxetable <http://fits.gsfc.nasa.gov/standard30/deluxetable.sty>`_ some header keywords differ from standard LaTeX. This header is modified to take that into account.
62599032cad5886f8bdc58dd
class IGeoTrackViewLayer(Interface): <NEW_LINE> <INDENT> pass
A layer specific to collective.geo.trackview
625990326fece00bbaccca73
class Case_Construct(BlockBase): <NEW_LINE> <INDENT> subclass_names = [] <NEW_LINE> use_names = ['Select_Case_Stmt', 'Case_Stmt', 'End_Select_Stmt', 'Execution_Part_Construct'] <NEW_LINE> @staticmethod <NEW_LINE> def match(reader): <NEW_LINE> <INDENT> return BlockBase.match(Select_Case_Stmt, [Case_Stmt, Execution_Part_Construct, Case_Stmt], End_Select_Stmt, reader, match_names = True, enable_case_construct_hook = True ) <NEW_LINE> <DEDENT> def tofortran(self, tab='', isfix=None): <NEW_LINE> <INDENT> l = [] <NEW_LINE> start = self.content[0] <NEW_LINE> end = self.content[-1] <NEW_LINE> l.append(start.tofortran(tab=tab,isfix=isfix)) <NEW_LINE> for item in self.content[1:-1]: <NEW_LINE> <INDENT> if isinstance(item, Case_Stmt): <NEW_LINE> <INDENT> l.append(item.tofortran(tab=tab,isfix=isfix)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l.append(item.tofortran(tab=tab+' ',isfix=isfix)) <NEW_LINE> <DEDENT> <DEDENT> l.append(end.tofortran(tab=tab,isfix=isfix)) <NEW_LINE> return '\n'.join(l)
<case-construct> = <select-case-stmt> [ <case-stmt> <block> == [<execution-part-construct>].. ].. <end-select-stmt>
625990325166f23b2e24449b
class EventCategory(SimpleTranslationMixin, models.Model): <NEW_LINE> <INDENT> position = models.PositiveIntegerField( verbose_name=_('Position'), null=True, blank=True, ) <NEW_LINE> slug = models.CharField( max_length=32, verbose_name=_('Slug'), ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.get_translation().title
Events are grouped in categories. For translateable fields see ``EventCategoryTitle``. :position: Use this if you want to change the ordering of categories.
62599032b57a9660fecd2b49
class State(Base): <NEW_LINE> <INDENT> __tablename__ = 'states' <NEW_LINE> id = Column(Integer, primary_key=True, autoincrement=True, nullable=False) <NEW_LINE> name = Column(String(128), nullable=False)
make base
6259903207d97122c4217d6d
class TestForexPair(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testForexPair(self): <NEW_LINE> <INDENT> pass
ForexPair unit test stubs
625990321d351010ab8f4bde
class Enforcer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.default_rule = CONF.policy_default_rule <NEW_LINE> self.policy_path = self._find_policy_file() <NEW_LINE> self.policy_file_mtime = None <NEW_LINE> self.policy_file_contents = None <NEW_LINE> <DEDENT> def set_rules(self, rules): <NEW_LINE> <INDENT> rules_obj = policy.Rules(rules, self.default_rule) <NEW_LINE> policy.set_rules(rules_obj) <NEW_LINE> <DEDENT> def load_rules(self): <NEW_LINE> <INDENT> if self.policy_path: <NEW_LINE> <INDENT> rules = self._read_policy_file() <NEW_LINE> rule_type = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rules = DEFAULT_RULES <NEW_LINE> rule_type = "default " <NEW_LINE> <DEDENT> text_rules = dict((k, str(v)) for k, v in rules.items()) <NEW_LINE> LOG.debug(_('Loaded %(rule_type)spolicy rules: %(text_rules)s') % locals()) <NEW_LINE> self.set_rules(rules) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _find_policy_file(): <NEW_LINE> <INDENT> policy_file = CONF.find_file(CONF.policy_file) <NEW_LINE> if policy_file: <NEW_LINE> <INDENT> return policy_file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.warn(_('Unable to find policy file')) <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> def _read_policy_file(self): <NEW_LINE> <INDENT> mtime = os.path.getmtime(self.policy_path) <NEW_LINE> if not self.policy_file_contents or mtime != self.policy_file_mtime: <NEW_LINE> <INDENT> LOG.debug(_("Loading policy from %s") % self.policy_path) <NEW_LINE> with open(self.policy_path) as fap: <NEW_LINE> <INDENT> raw_contents = fap.read() <NEW_LINE> rules_dict = json.loads(raw_contents) <NEW_LINE> self.policy_file_contents = dict( (k, policy.parse_rule(v)) for k, v in rules_dict.items()) <NEW_LINE> <DEDENT> self.policy_file_mtime = mtime <NEW_LINE> <DEDENT> return self.policy_file_contents <NEW_LINE> <DEDENT> def _check(self, context, rule, target, *args, **kwargs): <NEW_LINE> <INDENT> self.load_rules() <NEW_LINE> credentials = { 'roles': context.roles, 'user': context.user, 'tenant': context.tenant, } <NEW_LINE> return policy.check(rule, target, credentials, *args, **kwargs) <NEW_LINE> <DEDENT> def enforce(self, context, action, target): <NEW_LINE> <INDENT> LOG.debug("== policy.enforce satisfied ==") <NEW_LINE> return self._check(context, action, target, exception.Forbidden, action=action) <NEW_LINE> <DEDENT> def check(self, context, action, target): <NEW_LINE> <INDENT> return self._check(context, action, target) <NEW_LINE> <DEDENT> def check_is_admin(self, context): <NEW_LINE> <INDENT> target = context.to_dict() <NEW_LINE> return self.check(context, 'context_is_admin', target)
Responsible for loading and enforcing rules
62599032b830903b9686ecdd
class geo_app(): <NEW_LINE> <INDENT> gmaps = googlemaps.Client(key=maps_token) <NEW_LINE> def __init__(self, filename="../data/database.csv"): <NEW_LINE> <INDENT> self.data = pd.read_csv(filename) <NEW_LINE> self.name = "DisAtBot - Report database" <NEW_LINE> <DEDENT> def append_data(self, lat, lon): <NEW_LINE> <INDENT> n = len(self.data) <NEW_LINE> lat_long = pd.DataFrame([[n, lat, lon]], columns=['ID', 'Lat', 'Long']) <NEW_LINE> self.data = self.data.append(lat_long, ignore_index=True) <NEW_LINE> self.data.to_csv("../data/database.csv") <NEW_LINE> return <NEW_LINE> <DEDENT> def latlong_to_coords(self, filename=None, tags=None): <NEW_LINE> <INDENT> self.data['Coordinates'] = [ Point(xy) for xy in zip(self.data.Long, self.data.Lat)] <NEW_LINE> return <NEW_LINE> <DEDENT> def get_geo(self): <NEW_LINE> <INDENT> return(list(self.data['Coordinates'])) <NEW_LINE> <DEDENT> def get_ID(self): <NEW_LINE> <INDENT> return self.data.ID <NEW_LINE> <DEDENT> def get_gdf(self): <NEW_LINE> <INDENT> crs = {'init': 'epsg:4326'} <NEW_LINE> return GeoDataFrame(self.get_ID(), crs=crs, geometry=self.get_geo()) <NEW_LINE> <DEDENT> def visualize(self): <NEW_LINE> <INDENT> geovis = self.get_gdf() <NEW_LINE> display(geovis.to_json())
Geolocation application class to interact with VIObot, making him able to access and append data from the report database.
625990328c3a8732951f761f
class TestStateNeedsDirectorReview(UWOshOIETestCase): <NEW_LINE> <INDENT> def afterSetUp(self): <NEW_LINE> <INDENT> self.acl_users = self.portal.acl_users <NEW_LINE> self.portal_workflow = self.portal.portal_workflow <NEW_LINE> self.portal_registration = self.portal.portal_registration <NEW_LINE> self.mockMailHost() <NEW_LINE> self.createUsers() <NEW_LINE> <DEDENT> def createNeedsDirectorReviewApplication(self): <NEW_LINE> <INDENT> self.login(self._default_user) <NEW_LINE> self.portal.invokeFactory(type_name="OIEStudentApplication", id="testapplication") <NEW_LINE> app = self.portal['testapplication'] <NEW_LINE> self.fill_out_application(app) <NEW_LINE> self.portal_workflow.doActionFor(app, 'submit') <NEW_LINE> self.logout() <NEW_LINE> self.login('front_line_advisor') <NEW_LINE> self.portal_workflow.doActionFor(app, 'waitForPrintedMaterials') <NEW_LINE> app.setWithdrawalRefund(True) <NEW_LINE> app.setApplicationFeeOK(True) <NEW_LINE> app.setUWSystemStatementOK(True) <NEW_LINE> app.setUWOshkoshStatementOK(True) <NEW_LINE> app.setTranscriptsOK(True) <NEW_LINE> self.portal_workflow.doActionFor(app, 'sendForDirectorReview') <NEW_LINE> self.logout() <NEW_LINE> return app <NEW_LINE> <DEDENT> def test_should_be_able_to_addComment(self): <NEW_LINE> <INDENT> app = self.createNeedsDirectorReviewApplication() <NEW_LINE> self.login(self._default_user) <NEW_LINE> self.portal_workflow.doActionFor(app, 'addComment') <NEW_LINE> self.assertEquals('needsDirectorReview', self.getState(app)) <NEW_LINE> <DEDENT> def test_should_be_able_to_decline(self): <NEW_LINE> <INDENT> app = self.createNeedsDirectorReviewApplication() <NEW_LINE> self.login('director') <NEW_LINE> self.portal_workflow.doActionFor(app, 'decline') <NEW_LINE> self.assertEquals('declined', self.getState(app)) <NEW_LINE> <DEDENT> def test_should_be_able_to_sendForProgramManagerReview(self): <NEW_LINE> <INDENT> app = self.createNeedsDirectorReviewApplication() <NEW_LINE> self.login('director') <NEW_LINE> self.portal_workflow.doActionFor(app, 'sendForProgramManagerReview') <NEW_LINE> self.assertEquals('needsProgramManagerReview', self.getState(app)) <NEW_LINE> <DEDENT> def test_should_be_able_to_withdraw(self): <NEW_LINE> <INDENT> app = self.createNeedsDirectorReviewApplication() <NEW_LINE> self.login(self._default_user) <NEW_LINE> self.portal_workflow.doActionFor(app, 'withdraw') <NEW_LINE> self.assertEquals('withdrawn', self.getState(app))
Ensure product is properly installed
6259903296565a6dacd2d7f1
class CNNText_plus(CNNText): <NEW_LINE> <INDENT> def __init__(self,num_classes,embed=None,bert_model =bert_model_real, input_dims =config.hidden_size,kernel_nums=(30, 40, 50),kernel_sizes=(1, 3, 5),dpot =0.5): <NEW_LINE> <INDENT> super(CNNText_plus, self).__init__(embed =embed, num_classes=num_classes, bert_model=bert_model, input_dims= input_dims, kernel_nums=kernel_nums, kernel_sizes=kernel_sizes, dropout=dpot) <NEW_LINE> self.cuda()
基于 使用CNN进行文本分类的模型 'Yoon Kim. 2014. Convolution Neural Networks for Sentence Classification.' 更改了CNNText模型的初始化参数,变为bert+CNNText模型
625990321f5feb6acb163cb8
class TestKiraSensor(unittest.TestCase): <NEW_LINE> <INDENT> DEVICES = [] <NEW_LINE> def add_devices(self, devices): <NEW_LINE> <INDENT> for device in devices: <NEW_LINE> <INDENT> self.DEVICES.append(device) <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.hass = get_test_home_assistant() <NEW_LINE> self.mock_kira = MagicMock() <NEW_LINE> self.hass.data[kira.DOMAIN] = {kira.CONF_REMOTE: {}} <NEW_LINE> self.hass.data[kira.DOMAIN][kira.CONF_REMOTE]['kira'] = self.mock_kira <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.hass.stop() <NEW_LINE> <DEDENT> def test_service_call(self): <NEW_LINE> <INDENT> kira.setup_platform(self.hass, TEST_CONFIG, self.add_devices, DISCOVERY_INFO) <NEW_LINE> assert len(self.DEVICES) == 1 <NEW_LINE> remote = self.DEVICES[0] <NEW_LINE> assert remote.name == 'kira' <NEW_LINE> command = ["FAKE_COMMAND"] <NEW_LINE> device = "FAKE_DEVICE" <NEW_LINE> commandTuple = (command[0], device) <NEW_LINE> remote.send_command(device=device, command=command) <NEW_LINE> self.mock_kira.sendCode.assert_called_with(commandTuple)
Tests the Kira Sensor platform.
62599032d18da76e235b79b2
class TopicContinuumIterator(GenIterator): <NEW_LINE> <INDENT> def __init__(self, topic_continuum): <NEW_LINE> <INDENT> self.__objs = [] <NEW_LINE> self.__topic_continuum = topic_continuum <NEW_LINE> for vcs_id in topic_continuum.get_vcs_commit_ids(): <NEW_LINE> <INDENT> self.__objs.append([vcs_id, topic_continuum.get_topic_set( vcs_id.get_commit())]) <NEW_LINE> <DEDENT> GenIterator.__init__(self, self.__objs.__iter__()) <NEW_LINE> <DEDENT> def has_child(self): <NEW_LINE> <INDENT> return self._current[1].get_topic_set().get_master_topic() != None
This class provides an iterator interface for all the subelements of a topic continuum.
625990321d351010ab8f4be0
class HelperTests(UnitTest): <NEW_LINE> <INDENT> def test_logErrorsInThreads(self): <NEW_LINE> <INDENT> self.pool, self.doThreadWork = deterministicPool() <NEW_LINE> def divideByZero(): <NEW_LINE> <INDENT> return 1 / 0 <NEW_LINE> <DEDENT> self.pool.callInThread(divideByZero) <NEW_LINE> self.doThreadWork() <NEW_LINE> self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)
Tests for error cases of helpers used in this module.
6259903226238365f5fadc1a
class Ship(Sprite): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> super(Ship, self).__init__(filename) <NEW_LINE> self.bullets = [] <NEW_LINE> for i in range(0, 10): <NEW_LINE> <INDENT> self.bullets.append(Bullet('shot.png')) <NEW_LINE> <DEDENT> self.cool_down = 0.15 <NEW_LINE> self.cool_down_t = 0 <NEW_LINE> self.right = Image('ship_right.png') <NEW_LINE> self.left = Image('ship_left.png') <NEW_LINE> self.bounding_box = Box(25, 10, self.width-50, self.height-18) <NEW_LINE> self.destroyed = False <NEW_LINE> self.total_score = 0 <NEW_LINE> <DEDENT> def fix_position(self): <NEW_LINE> <INDENT> if self.x < 0: <NEW_LINE> <INDENT> self.x = 0 <NEW_LINE> <DEDENT> if self.x+self.width > window.width-1: <NEW_LINE> <INDENT> self.x = window.width-self.width-1 <NEW_LINE> <DEDENT> if self.y < 0: <NEW_LINE> <INDENT> self.y = 0 <NEW_LINE> <DEDENT> if self.y+self.height > window.height-1: <NEW_LINE> <INDENT> self.y = window.height-self.height-1 <NEW_LINE> <DEDENT> <DEDENT> def manage_shot(self): <NEW_LINE> <INDENT> if self.cool_down_t > 0: <NEW_LINE> <INDENT> self.cool_down_t -= window.delta_time <NEW_LINE> <DEDENT> if self.cool_down_t > 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not window.get_key(Key.SPACE): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for bullet in self.bullets: <NEW_LINE> <INDENT> if bullet.enabled: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> bullet.x = self.x + self.width/2 - bullet.width/2 <NEW_LINE> bullet.y = self.y + self.height/2 <NEW_LINE> bullet.enabled = True <NEW_LINE> self.cool_down_t = self.cool_down <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.destroyed: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> speed = 200 <NEW_LINE> image = self <NEW_LINE> if window.get_key(Key.RIGHT): <NEW_LINE> <INDENT> self.x += speed * window.delta_time <NEW_LINE> image = self.right <NEW_LINE> <DEDENT> if window.get_key(Key.LEFT): <NEW_LINE> <INDENT> self.x -= speed * window.delta_time <NEW_LINE> image = self.left <NEW_LINE> <DEDENT> if window.get_key(Key.UP): <NEW_LINE> <INDENT> self.y += speed * window.delta_time <NEW_LINE> <DEDENT> if window.get_key(Key.DOWN): <NEW_LINE> <INDENT> self.y -= speed * window.delta_time <NEW_LINE> <DEDENT> self.manage_shot() <NEW_LINE> self.fix_position() <NEW_LINE> self.draw(image) <NEW_LINE> self.bounding_box.draw(self.x, self.y) <NEW_LINE> if self.check_collisions(): <NEW_LINE> <INDENT> self.destroyed = True <NEW_LINE> <DEDENT> <DEDENT> def check_collisions(self): <NEW_LINE> <INDENT> for enemy in self.enemies: <NEW_LINE> <INDENT> if not enemy.enabled: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if enemy.bounding_box.intersects(self.bounding_box): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
The Player class
6259903266673b3332c314b9
class SmartItem(base.ItemBase): <NEW_LINE> <INDENT> def __init__(self, key, value, host): <NEW_LINE> <INDENT> super(SmartItem, self).__init__(key, value, host) <NEW_LINE> self._data = {} <NEW_LINE> self._generate() <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> def _generate(self): <NEW_LINE> <INDENT> self._data['key'] = self.key <NEW_LINE> self._data['value'] = self.value <NEW_LINE> self._data['host'] = self.host <NEW_LINE> self._data['clock'] = self.clock
Enqued item.
625990323eb6a72ae038b72f
class AIController(Controller): <NEW_LINE> <INDENT> def __init__(self,mode="AlphaBeta",max_depth=5): <NEW_LINE> <INDENT> super().__init__(is_ai = True) <NEW_LINE> self.__engine = SearchEngine(mode = mode, max_depth = max_depth) <NEW_LINE> self.average_time = 0 <NEW_LINE> self.average_nodes = 0 <NEW_LINE> self.moves = 0 <NEW_LINE> <DEDENT> def play_move(self,state): <NEW_LINE> <INDENT> self.__engine.set_state(state) <NEW_LINE> result = self.__engine.getNextState() <NEW_LINE> time_elapsed = self.__engine.get_time_elapsed() <NEW_LINE> num_nodes = self.__engine.get_num_explored() <NEW_LINE> if self.moves == 0: <NEW_LINE> <INDENT> self.average_time = time_elapsed <NEW_LINE> self.average_nodes = num_nodes <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.average_time = ( (self.average_time * self.moves) + time_elapsed ) / (self.moves+1) <NEW_LINE> self.average_nodes = ( (self.average_nodes * self.moves) + num_nodes ) / (self.moves+1) <NEW_LINE> <DEDENT> self.moves += 1 <NEW_LINE> return result <NEW_LINE> <DEDENT> def get_engine(self): <NEW_LINE> <INDENT> return self.__engine
Utilizes AlphaBeta pruning to determine the next state
625990329b70327d1c57fe4d
class Magic(BaseField): <NEW_LINE> <INDENT> def __init__(self, expected_sequence, **kwargs): <NEW_LINE> <INDENT> BaseField.__init__(self, **kwargs) <NEW_LINE> self.expected_sequence = expected_sequence <NEW_LINE> self.bytes_required = len(self.expected_sequence) <NEW_LINE> <DEDENT> def getval(self): <NEW_LINE> <INDENT> return self.expected_sequence <NEW_LINE> <DEDENT> def setval(self, *args): <NEW_LINE> <INDENT> raise SuitcaseProgrammingError("One does not simply modify Magic") <NEW_LINE> <DEDENT> def pack(self, stream): <NEW_LINE> <INDENT> stream.write(self.expected_sequence) <NEW_LINE> <DEDENT> def unpack(self, data, **kwargs): <NEW_LINE> <INDENT> if not data == self.expected_sequence: <NEW_LINE> <INDENT> raise SuitcaseParseError( "Expected sequence %r for magic field but got %r on " "message %r" % (self.expected_sequence, data, self._parent)) <NEW_LINE> <DEDENT> return b'' <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Magic(%r)" % (self.expected_sequence,)
Represent Byte Magic (fixed, expected sequence of bytes)
625990320a366e3fb87ddaaf
class ChannelAttn(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, reduction_rate=16): <NEW_LINE> <INDENT> super(ChannelAttn, self).__init__() <NEW_LINE> assert in_channels%reduction_rate == 0 <NEW_LINE> self.conv1 = ConvBlock(in_channels, in_channels // reduction_rate, 1) <NEW_LINE> self.conv2 = ConvBlock(in_channels // reduction_rate, in_channels, 1) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = F.avg_pool2d(x, x.size()[2:]) <NEW_LINE> x = self.conv1(x) <NEW_LINE> x = self.conv2(x) <NEW_LINE> return x
Channel Attention (Sec. 3.1.I.2)
62599032ec188e330fdf995d
class IsAuthenticatedAndOwner(BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> return bool(request.user and request.user.is_authenticated and (obj.user == request.user))
Разрешает доступ только аутентифицированным пользователям и владельцам.
62599032d6c5a102081e31ef
class InventoryResponse(Response): <NEW_LINE> <INDENT> def __init__(self, address, command, status, tags): <NEW_LINE> <INDENT> super().__init__(address, command, status, None) <NEW_LINE> self.tags = tags <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s %s tags>" % (self.__class__.__name__, len(self.tags))
Handles decoding data properly, and also potentially errors or multiple returns.
6259903276d4e153a661dad5
class PlottingView(QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(PlottingView, self).__init__() <NEW_LINE> self.setWindowIcon(QIcon('viraclogo.png')) <NEW_LINE> self.center() <NEW_LINE> self.grid = QGridLayout() <NEW_LINE> <DEDENT> def add_widget(self, widget, row, colomn): <NEW_LINE> <INDENT> self.grid.addWidget(widget, row, colomn) <NEW_LINE> <DEDENT> def center(self): <NEW_LINE> <INDENT> frame_geometry = self.frameGeometry() <NEW_LINE> centre_position = QDesktopWidget().availableGeometry().center() <NEW_LINE> frame_geometry.moveCenter(centre_position) <NEW_LINE> self.move(frame_geometry.topLeft())
Base class for all views
62599032d10714528d69eef0
class BooleanToolParameter(ToolParameter): <NEW_LINE> <INDENT> def __init__(self, tool, input_source): <NEW_LINE> <INDENT> input_source = ensure_input_source(input_source) <NEW_LINE> ToolParameter.__init__(self, tool, input_source) <NEW_LINE> self.truevalue = input_source.get('truevalue', 'true') <NEW_LINE> self.falsevalue = input_source.get('falsevalue', 'false') <NEW_LINE> self.checked = input_source.get_bool('checked', False) <NEW_LINE> <DEDENT> def from_json(self, value, trans=None, other_values={}): <NEW_LINE> <INDENT> return self.to_python(value) <NEW_LINE> <DEDENT> def to_python(self, value, app=None): <NEW_LINE> <INDENT> return (value in [True, 'True', 'true']) <NEW_LINE> <DEDENT> def to_json(self, value, app, use_security): <NEW_LINE> <INDENT> if self.to_python(value, app): <NEW_LINE> <INDENT> return 'true' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'false' <NEW_LINE> <DEDENT> <DEDENT> def get_initial_value(self, trans, other_values): <NEW_LINE> <INDENT> return self.checked <NEW_LINE> <DEDENT> def to_param_dict_string(self, value, other_values={}): <NEW_LINE> <INDENT> if self.to_python(value): <NEW_LINE> <INDENT> return self.truevalue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.falsevalue <NEW_LINE> <DEDENT> <DEDENT> def to_dict(self, trans, other_values={}): <NEW_LINE> <INDENT> d = super(BooleanToolParameter, self).to_dict(trans) <NEW_LINE> d['truevalue'] = self.truevalue <NEW_LINE> d['falsevalue'] = self.falsevalue <NEW_LINE> return d <NEW_LINE> <DEDENT> @property <NEW_LINE> def legal_values(self): <NEW_LINE> <INDENT> return [self.truevalue, self.falsevalue]
Parameter that takes one of two values. >>> from galaxy.util.bunch import Bunch >>> trans = Bunch(app=None, history=Bunch()) >>> p = BooleanToolParameter(None, XML('<param name="_name" type="boolean" checked="yes" truevalue="_truevalue" falsevalue="_falsevalue" />')) >>> print(p.name) _name >>> sorted(p.to_dict(trans).items()) [('argument', None), ('falsevalue', '_falsevalue'), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'BooleanToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('truevalue', '_truevalue'), ('type', 'boolean'), ('value', 'true')] >>> print(p.from_json('true')) True >>> print(p.to_param_dict_string(True)) _truevalue >>> print(p.from_json('false')) False >>> print(p.to_param_dict_string(False)) _falsevalue
625990328c3a8732951f7622
class DNAME(dns.rdtypes.nsbase.UncompressedNS): <NEW_LINE> <INDENT> pass
DNAME record
6259903207d97122c4217d71
class ProofRequest(IndyServiceRep): <NEW_LINE> <INDENT> _fields = ( ("data", dict), ("wql_filters", dict, None), )
A message representing an Indy proof request
62599032d164cc617582203c
class TargetLessTestCase(TestCaseWithFactory): <NEW_LINE> <INDENT> layer = DatabaseFunctionalLayer <NEW_LINE> def test_project_group_structural_subscription(self): <NEW_LINE> <INDENT> subscriber = self.factory.makePerson() <NEW_LINE> product = self.factory.makeProduct() <NEW_LINE> self.factory.makeBug(target=product) <NEW_LINE> with person_logged_in(product.owner): <NEW_LINE> <INDENT> project_group = self.factory.makeProject(owner=product.owner) <NEW_LINE> product.project = project_group <NEW_LINE> <DEDENT> with person_logged_in(subscriber): <NEW_LINE> <INDENT> project_group.addBugSubscription(subscriber, subscriber) <NEW_LINE> <DEDENT> params = BugTaskSearchParams( user=None, structural_subscriber=subscriber) <NEW_LINE> bugtask_set = getUtility(IBugTaskSet) <NEW_LINE> found_bugtasks = bugtask_set.search(params) <NEW_LINE> self.assertEqual(1, found_bugtasks.count())
Test that do not call setTarget() in the BugTaskSearchParams.
62599032be8e80087fbc0146
class PipeToLoggerMixin(): <NEW_LINE> <INDENT> from logging import DEBUG, INFO <NEW_LINE> DEFAULT_LINE_TIMEOUT = 10 * 60 <NEW_LINE> DEFAULT_STDOUT = "INFO" <NEW_LINE> DEFAULT_STDERR = "DEBUG" <NEW_LINE> def pipe(self, out_level=None, err_level=None, prefix=None, line_timeout=None, **kw): <NEW_LINE> <INDENT> class LogPipe(object): <NEW_LINE> <INDENT> def __rand__(_, cmd): <NEW_LINE> <INDENT> popen = cmd if hasattr(cmd, "iter_lines") else cmd.popen() <NEW_LINE> for typ, lines in popen.iter_lines(line_timeout=line_timeout, mode=BY_TYPE, **kw): <NEW_LINE> <INDENT> if not lines: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> level = levels[typ] <NEW_LINE> for line in lines.splitlines(): <NEW_LINE> <INDENT> if prefix: <NEW_LINE> <INDENT> line = "%s: %s" % (prefix, line) <NEW_LINE> <DEDENT> self.log(level, line) <NEW_LINE> <DEDENT> <DEDENT> return popen.returncode <NEW_LINE> <DEDENT> <DEDENT> levels = {1: getattr(self, self.DEFAULT_STDOUT), 2: getattr(self, self.DEFAULT_STDERR)} <NEW_LINE> if line_timeout is None: <NEW_LINE> <INDENT> line_timeout = self.DEFAULT_LINE_TIMEOUT <NEW_LINE> <DEDENT> if out_level is not None: <NEW_LINE> <INDENT> levels[1] = out_level <NEW_LINE> <DEDENT> if err_level is not None: <NEW_LINE> <INDENT> levels[2] = err_level <NEW_LINE> <DEDENT> return LogPipe() <NEW_LINE> <DEDENT> def pipe_info(self, prefix=None, **kw): <NEW_LINE> <INDENT> return self.pipe(self.INFO, self.INFO, prefix=prefix, **kw) <NEW_LINE> <DEDENT> def pipe_debug(self, prefix=None, **kw): <NEW_LINE> <INDENT> return self.pipe(self.DEBUG, self.DEBUG, prefix=prefix, **kw) <NEW_LINE> <DEDENT> def __rand__(self, cmd): <NEW_LINE> <INDENT> return cmd & self.pipe(getattr(self, self.DEFAULT_STDOUT), getattr(self, self.DEFAULT_STDERR))
This mixin allows piping plumbum commands' output into a logger. The logger must implement a ``log(level, msg)`` method, as in ``logging.Logger`` Example:: class MyLogger(logging.Logger, PipeToLoggerMixin): pass logger = MyLogger("example.app") Here we send the output of an install.sh script into our log:: local['./install.sh'] & logger We can choose the log-level for each stream:: local['./install.sh'] & logger.pipe(out_level=logging.DEBUG, err_level=logging.DEBUG) Or use a convenience method for it:: local['./install.sh'] & logger.pipe_debug() A prefix can be added to each line:: local['./install.sh'] & logger.pipe(prefix="install.sh: ") If the command fails, an exception is raised as usual. This can be modified:: local['install.sh'] & logger.pipe_debug(retcode=None) An exception is also raised if too much time (``DEFAULT_LINE_TIMEOUT``) passed between lines in the stream, This can also be modified:: local['install.sh'] & logger.pipe(line_timeout=10) If we happen to use logbook:: class MyLogger(logbook.Logger, PipeToLoggerMixin): from logbook import DEBUG, INFO # hook up with logbook's levels
62599032b830903b9686ecdf
@ui.register_ui(field_username=ui.TextField(By.NAME, 'username'), field_password=ui.TextField(By.NAME, 'password')) <NEW_LINE> class FormLogin(_ui.Form): <NEW_LINE> <INDENT> pass
Form to login user.
6259903230c21e258be998d7
@zope.interface.implementer(interfaces.IAuthenticator) <NEW_LINE> @zope.interface.provider(interfaces.IPluginFactory) <NEW_LINE> class Authenticator(dns_route53.Authenticator): <NEW_LINE> <INDENT> hidden = True <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> warnings.warn("The 'authenticator' module was renamed 'dns_route53'", DeprecationWarning) <NEW_LINE> super(Authenticator, self).__init__(*args, **kwargs)
Shim around `~certbot_dns_route53.dns_route53.Authenticator` for backwards compatibility.
625990323eb6a72ae038b731
class TravellingSalesmanProblem(Annealer): <NEW_LINE> <INDENT> def __init__(self, state, module, graph, coordinates): <NEW_LINE> <INDENT> self.graph = graph <NEW_LINE> self.total_pr_entropy = sum([entropy1(graph.node[node][PAGE_RANK]) for node in graph]) <NEW_LINE> self.module = [Module(module_id, mod, graph) for (module_id, mod) in enumerate(state)] <NEW_LINE> d = 0 <NEW_LINE> for mod in module: <NEW_LINE> <INDENT> for elem in range(len(mod)): <NEW_LINE> <INDENT> mod[elem] = int(mod[elem]) <NEW_LINE> <DEDENT> <DEDENT> for mod in module: <NEW_LINE> <INDENT> m = coordinates.loc[mod,] <NEW_LINE> d += np.mean(pairwise_distances(m, metric='euclidean')) <NEW_LINE> <DEDENT> self.d = d <NEW_LINE> super(TravellingSalesmanProblem, self).__init__(state) <NEW_LINE> <DEDENT> def move(self): <NEW_LINE> <INDENT> a = random.randint(0, len(self.state) - 1) <NEW_LINE> b = random.randint(0, len(self.state) - 1) <NEW_LINE> self.state[a][0], self.state[b][0] = self.state[b][0], self.state[a][0] <NEW_LINE> <DEDENT> def energy(self): <NEW_LINE> <INDENT> total_qout = 0 <NEW_LINE> total_qout_entropy = 0 <NEW_LINE> total_both_entropy = 0 <NEW_LINE> for mod in self.module: <NEW_LINE> <INDENT> q_out = mod.q_out <NEW_LINE> total_qout += q_out <NEW_LINE> total_qout_entropy += entropy1(q_out) <NEW_LINE> total_both_entropy += entropy1(mod.q_plus_p) <NEW_LINE> <DEDENT> term1 = entropy1(total_qout) <NEW_LINE> term2 = -2 * total_qout_entropy <NEW_LINE> term3 = -self.total_pr_entropy <NEW_LINE> term4 = total_both_entropy <NEW_LINE> term5 = self.d <NEW_LINE> return term1 + term2 + term3 + term4 + term5
Test annealer with a travelling salesman problem.
625990320a366e3fb87ddab1
class BaseError(Exception): <NEW_LINE> <INDENT> def __init__(self, message: typing.Optional[str] = None) -> None: <NEW_LINE> <INDENT> self.msg = message <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return repr(self.msg)
For generic errors not covered by other exceptions
62599032e76e3b2f99fd9ad7
class TestsPerfilesConsumo(TestCase): <NEW_LINE> <INDENT> def test_0_perfiles_estimados_2017(self): <NEW_LINE> <INDENT> from esiosdata.perfilesconsumopvpc import get_data_perfiles_estimados_2017, get_data_perfiles_finales_mes <NEW_LINE> perfiles_2017 = get_data_perfiles_estimados_2017(force_download=False) <NEW_LINE> print(perfiles_2017) <NEW_LINE> self.assertIs(perfiles_2017.empty, False) <NEW_LINE> self.assertEqual(round(perfiles_2017.sum().sum(), 3), 4.) <NEW_LINE> perfiles_2017_02 = get_data_perfiles_finales_mes(2017, 2) <NEW_LINE> print(perfiles_2017_02.head()) <NEW_LINE> self.assertIs(perfiles_2017_02.empty, False) <NEW_LINE> perfiles_2017_bis = get_data_perfiles_estimados_2017(force_download=True) <NEW_LINE> perfiles_2017_bis2 = get_data_perfiles_estimados_2017(force_download=False) <NEW_LINE> assert pd.DataFrame(perfiles_2017 == perfiles_2017_bis).all().all() <NEW_LINE> assert pd.DataFrame(perfiles_2017_bis2 == perfiles_2017_bis).all().all() <NEW_LINE> <DEDENT> def test_perfiles_finales(self): <NEW_LINE> <INDENT> from esiosdata.perfilesconsumopvpc import get_data_perfiles_finales_mes <NEW_LINE> perfiles_finales_2016_11 = get_data_perfiles_finales_mes(2016, 11) <NEW_LINE> print(perfiles_finales_2016_11) <NEW_LINE> self.assertIs(perfiles_finales_2016_11.empty, False) <NEW_LINE> <DEDENT> def test_perfiles_estimacion_consumo_horario(self): <NEW_LINE> <INDENT> from esiosdata.perfilesconsumopvpc import perfiles_consumo_en_intervalo <NEW_LINE> ts_0, ts_f = '2016-10-29', '2017-01-24' <NEW_LINE> consumo_total_interv_kwh = 836.916 <NEW_LINE> print('Consumo horario estimado para el intervalo {} -> {}, con E={:.3f} kWh' .format(ts_0, ts_f, consumo_total_interv_kwh)) <NEW_LINE> perfs_interv = perfiles_consumo_en_intervalo(ts_0, ts_f) <NEW_LINE> print(perfs_interv.head()) <NEW_LINE> print(perfs_interv.tail()) <NEW_LINE> perfs_interv = perfiles_consumo_en_intervalo(ts_0, '2016-10-30') <NEW_LINE> print(perfs_interv.head()) <NEW_LINE> print(perfs_interv.tail()) <NEW_LINE> suma_perfiles_interv = perfs_interv['COEF. PERFIL A'].sum() <NEW_LINE> consumo_estimado = pd.Series(perfs_interv['COEF. PERFIL A'] * consumo_total_interv_kwh / suma_perfiles_interv) <NEW_LINE> print(consumo_estimado) <NEW_LINE> self.assertIs(consumo_estimado.empty, False)
Tests para el cálculo de los perfiles de consumo.
6259903291af0d3eaad3aef8
class AuthTokenSerializer(serializers.Serializer): <NEW_LINE> <INDENT> def create(self, validated_data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> username = serializers.CharField() <NEW_LINE> password = serializers.CharField( style={'input_type': 'password'}, trim_whitespace=False, ) <NEW_LINE> def validate(self, attrs): <NEW_LINE> <INDENT> username = attrs.get('username') <NEW_LINE> password = attrs.get('password') <NEW_LINE> user = authenticate( request=self.context.get('request'), username=username, password=password ) <NEW_LINE> if not user: <NEW_LINE> <INDENT> msg = 'Usuário não autenticado, login e/ou senha incorretos' <NEW_LINE> raise serializers.ValidationError(msg, code='authentication') <NEW_LINE> <DEDENT> attrs['user'] = user <NEW_LINE> return attrs
Serializer para o objetos de autenticação de usuários
6259903230c21e258be998d8
class NowProlog(IpythonCommandMagic): <NEW_LINE> <INDENT> def add_arguments(self): <NEW_LINE> <INDENT> super(NowProlog, self).add_arguments() <NEW_LINE> add_arg = self.add_argument <NEW_LINE> add_arg("--result", type=str, help="""The variable in which the result will be stored""") <NEW_LINE> add_arg("trials", nargs=argparse.REMAINDER, help="export trial facts") <NEW_LINE> <DEDENT> def execute(self, func, line, cell, magic_cls): <NEW_LINE> <INDENT> formatter = DollarFormatter() <NEW_LINE> cell = formatter.vformat(cell, args=[], kwargs=magic_cls.shell.user_ns.copy()) <NEW_LINE> _, args = self.arguments(func, line) <NEW_LINE> for trial_ref in args.trials: <NEW_LINE> <INDENT> trial = Trial(trial_ref=trial_ref) <NEW_LINE> trial.prolog.load_cli_facts() <NEW_LINE> <DEDENT> result = TrialProlog.prolog_query(cell) <NEW_LINE> if args.result: <NEW_LINE> <INDENT> magic_cls.shell.user_ns[args.result] = result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return list(result)
Query the provenance database with Prolog Examples -------- :: In [1]: %%now_prolog 1 ...: duration(1, z, X) Out [1]: [{'X': 0.10173702239990234}, ...: {'X': 0.10082292556762695}, ...: {'X': 0.1021270751953125}, ...: {'X': 0.10217714309692383}] In [2]: %%now_prolog --result tupleit ...: duration(1, z, X) In [3]: duration(1, z, X) Out [3]: <generator object __call__ at 0x7f79ed329f50>
6259903271ff763f4b5e8863
class ManagedZonesListResponse(_messages.Message): <NEW_LINE> <INDENT> header = _messages.MessageField('ResponseHeader', 1) <NEW_LINE> kind = _messages.StringField(2, default=u'dns#managedZonesListResponse') <NEW_LINE> managedZones = _messages.MessageField('ManagedZone', 3, repeated=True) <NEW_LINE> nextPageToken = _messages.StringField(4)
A ManagedZonesListResponse object. Fields: header: A ResponseHeader attribute. kind: Type of resource. managedZones: The managed zone resources. nextPageToken: The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your page token. In this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a consistent snapshot of a collection larger than the maximum page size.
625990328a349b6b43687309
class DatabaseClient: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def connect(self, **args): <NEW_LINE> <INDENT> return True
Base database client
625990328a43f66fc4bf3253
@total_ordering <NEW_LINE> class SavedRoll(Base): <NEW_LINE> <INDENT> __tablename__ = 'saved_rolls' <NEW_LINE> id = sqla.Column(sqla.Integer, primary_key=True) <NEW_LINE> name = sqla.Column(sqla.String(LEN_NAME)) <NEW_LINE> roll_str = sqla.Column(sqla.String(LEN_ROLLSTR)) <NEW_LINE> user_id = sqla.Column(sqla.String(LEN_DID), sqla.ForeignKey('discord_users.id'), nullable=False) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> keys = ['id', 'user_id', 'name', 'roll_str'] <NEW_LINE> kwargs = ['{}={!r}'.format(key, getattr(self, key)) for key in keys] <NEW_LINE> return "SavedRoll({})".format(', '.join(kwargs)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, SavedRoll) and ( self.user_id, self.name, self.roll_str) == (other.user_id, other.name, other.roll_str) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return (self.user_id, self.name) < (other.user_id, other.name)
Represents a saved dice roll associated to a name.
625990326fece00bbaccca7c
class DbIndexNameTestCase(TestCase): <NEW_LINE> <INDENT> LIMIT = 65 <NEW_LINE> def test_index_name_length(self): <NEW_LINE> <INDENT> db_name = "st2" <NEW_LINE> for model in ALL_MODELS: <NEW_LINE> <INDENT> collection_name = model._get_collection_name() <NEW_LINE> model_indexes = model._meta["index_specs"] <NEW_LINE> for index_specs in model_indexes: <NEW_LINE> <INDENT> index_name = index_specs.get("name", None) <NEW_LINE> if index_name: <NEW_LINE> <INDENT> index_field_name = index_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> index_fields = dict(index_specs["fields"]).keys() <NEW_LINE> index_field_name = ".".join(index_fields) <NEW_LINE> <DEDENT> index_name = "%s.%s.%s" % (db_name, collection_name, index_field_name) <NEW_LINE> if len(index_name) > self.LIMIT: <NEW_LINE> <INDENT> self.fail( 'Index name "%s" for model "%s" is longer than %s characters. ' "Please manually define name for this index so it's shorter than " "that" % (index_name, model.__name__, self.LIMIT) )
Test which verifies that model index name are not longer than the specified limit.
6259903226068e7796d4da17
class MarkdownUtil(object): <NEW_LINE> <INDENT> def head(self,text,level = 1,enter_num = 1): <NEW_LINE> <INDENT> return '{mark} {text}{enter}'.format(mark = '#' * level,text = text,enter = '\n' * enter_num) <NEW_LINE> <DEDENT> def bold(self,text,enter_num = 1): <NEW_LINE> <INDENT> return '**{text}**{enter}'.format(text = text,enter = '\n' * enter_num) <NEW_LINE> <DEDENT> def item(self,text,retract = 0,enter_num = 1): <NEW_LINE> <INDENT> return '{retract}* {text}{enter}'.format(retract = ' ' * retract,text = text,enter = '\n' * enter_num) <NEW_LINE> <DEDENT> def items(self,text_list,retract = 0,enter_num = 1): <NEW_LINE> <INDENT> return '\n'.join(item(text,retract,enter_num = 0) for text in text_list if text) <NEW_LINE> <DEDENT> def link(self,text,href,enter_num = 1): <NEW_LINE> <INDENT> return '[{text}]({href}){enter}'.format(text = text,href = href,enter = '\n' * enter_num) <NEW_LINE> <DEDENT> def enter(self,text,num = 1): <NEW_LINE> <INDENT> return '{text}{enter}'.format(text = text,enter = '\n' * num) <NEW_LINE> <DEDENT> def refer(self,text,enter_num = 1): <NEW_LINE> <INDENT> return '> {text}{enter}'.format(text = text,enter = '\n' * enter_num)
markdown文本处理工具类
625990329b70327d1c57fe51
class BaseHandler(webapp2.RequestHandler): <NEW_LINE> <INDENT> @webapp2.cached_property <NEW_LINE> def jinja2(self): <NEW_LINE> <INDENT> return jinja2.get_jinja2(app=self.app) <NEW_LINE> <DEDENT> def render_template(self, filename, template_args): <NEW_LINE> <INDENT> self.response.write(self.jinja2.render_template(filename, **template_args))
The other handlers inherit from this class. Provides some helper methods for rendering a template.
62599032d4950a0f3b1116a4
class TestListenable(unittest.TestCase): <NEW_LINE> <INDENT> class _IntListenable(Listenable[int]): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self._listenable = TestListenable._IntListenable() <NEW_LINE> <DEDENT> def test_add_listener_and_get_update_listeners(self): <NEW_LINE> <INDENT> listeners = TestListenable._add_n_mock_listeners_to_listenable(5, self._listenable) <NEW_LINE> self.assertCountEqual(self._listenable.get_listeners(), listeners) <NEW_LINE> <DEDENT> def test_remove_listener_with_existing_listeners(self): <NEW_LINE> <INDENT> listeners = TestListenable._add_n_mock_listeners_to_listenable(5, self._listenable) <NEW_LINE> for listener in listeners: <NEW_LINE> <INDENT> self._listenable.remove_listener(listener) <NEW_LINE> <DEDENT> self.assertEqual(len(self._listenable.get_listeners()), 0) <NEW_LINE> <DEDENT> def test_remove_listener_with_non_existing_listener(self): <NEW_LINE> <INDENT> TestListenable._add_n_mock_listeners_to_listenable(5, self._listenable) <NEW_LINE> self.assertRaises(ValueError, self._listenable.remove_listener, MagicMock()) <NEW_LINE> <DEDENT> def test_notify_listener(self): <NEW_LINE> <INDENT> listeners = TestListenable._add_n_mock_listeners_to_listenable(5, self._listenable) <NEW_LINE> self._listenable.notify_listeners(123) <NEW_LINE> for listener in listeners: <NEW_LINE> <INDENT> listener.assert_called_once_with(123) <NEW_LINE> <DEDENT> <DEDENT> def test_notify_listener_with_no_data(self): <NEW_LINE> <INDENT> listenable = Listenable() <NEW_LINE> listeners = TestListenable._add_n_mock_listeners_to_listenable(5, listenable) <NEW_LINE> listenable.notify_listeners() <NEW_LINE> for listener in listeners: <NEW_LINE> <INDENT> listener.assert_called_once_with() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _add_n_mock_listeners_to_listenable(number_of_listeners_to_add: int, listenable: Listenable) -> List[MagicMock]: <NEW_LINE> <INDENT> listeners = [] <NEW_LINE> for i in range(number_of_listeners_to_add): <NEW_LINE> <INDENT> listener = MagicMock() <NEW_LINE> listenable.add_listener(listener) <NEW_LINE> listeners.append(listener) <NEW_LINE> <DEDENT> return listeners
Tests for `Listenable` model.
6259903230c21e258be998d9
class ProductInfoServicer(object): <NEW_LINE> <INDENT> def getProductDetails(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
6259903250485f2cf55dc04a
class TransformedRNNModel(TransformedModel): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(TransformedRNNModel, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def default( cls, environment, dim_hidden_state, base_model=None, model_kind="dynamics", transformations=None, deterministic=True, *args, **kwargs, ): <NEW_LINE> <INDENT> if base_model is None: <NEW_LINE> <INDENT> if model_kind == "dynamics": <NEW_LINE> <INDENT> base_model = FullEnsembleNN.default( dim_hidden_state, environment, deterministic=deterministic, *args, **kwargs, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> <DEDENT> if transformations is None: <NEW_LINE> <INDENT> transformations = [] <NEW_LINE> <DEDENT> return cls( base_model=base_model, transformations=transformations, *args, **kwargs ) <NEW_LINE> <DEDENT> def forward(self, state, action, next_state=None, prev_hidden_state=None): <NEW_LINE> <INDENT> return self.predict( state, action[..., : self.dim_action[0]], next_state, prev_hidden_state ) <NEW_LINE> <DEDENT> def predict(self, state, action, next_state=None, prev_hidden_state=None): <NEW_LINE> <INDENT> none = torch.tensor(0) <NEW_LINE> if next_state is None: <NEW_LINE> <INDENT> next_state = none <NEW_LINE> <DEDENT> obs = Observation( state, action, none, next_state, none, none, none, none, none, none ) <NEW_LINE> for transformation in self.transformations: <NEW_LINE> <INDENT> obs = transformation(obs) <NEW_LINE> <DEDENT> if self.model_kind == "dynamics": <NEW_LINE> <INDENT> reward, done = (none, none), none <NEW_LINE> if prev_hidden_state is None: <NEW_LINE> <INDENT> next_state = self.base_model(obs.state, obs.action, obs.next_state) <NEW_LINE> hidden_state = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mean_next_state, std_next_state, hidden_state = self.base_model( obs.state, obs.action, obs.next_state, prev_hidden_state ) <NEW_LINE> next_state = mean_next_state, std_next_state <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f"{self.model_kind} not in {self.allowed_model_kind}") <NEW_LINE> <DEDENT> if obs.state.shape != next_state[0].shape and isinstance( self.base_model, EnsembleModel ): <NEW_LINE> <INDENT> state = obs.state.unsqueeze(-2).repeat_interleave( self.base_model.num_heads, -2 ) <NEW_LINE> action = obs.action.unsqueeze(-2).repeat_interleave( self.base_model.num_heads, -2 ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state = obs.state <NEW_LINE> action = obs.action <NEW_LINE> <DEDENT> obs = Observation( state=state, action=action, reward=reward[0], done=done, next_action=none, log_prob_action=none, entropy=none, state_scale_tril=none, next_state=next_state[0], next_state_scale_tril=next_state[1], reward_scale_tril=reward[1], ) <NEW_LINE> for transformation in reversed(list(self.transformations)): <NEW_LINE> <INDENT> obs = transformation.inverse(obs) <NEW_LINE> <DEDENT> if prev_hidden_state is None: <NEW_LINE> <INDENT> return obs.next_state, obs.next_state_scale_tril <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return obs.next_state, obs.next_state_scale_tril, hidden_state
Transformed Model computes the next state distribution.
625990321d351010ab8f4be7
class Chi5Class(ResidueFixedBins): <NEW_LINE> <INDENT> _setup = _mdt.mdt_feature_chi5_class
Residue chi5 dihedral class.
625990329b70327d1c57fe53
class NamespaceNotFoundError(CumulusCIUsageError): <NEW_LINE> <INDENT> pass
Raise when namespace is not found in project includes
6259903263f4b57ef00865da
class Evaluator: <NEW_LINE> <INDENT> def __init__(self, X, options, parameters=None, definitions=None, features=None, distances=None, precompute=None, labels=None, verbose=True): <NEW_LINE> <INDENT> self.generator = ClusterGenerator(X, options=options, parameters=parameters, definitions=definitions, features=features, distances=distances, precompute=precompute) <NEW_LINE> self.labels = labels <NEW_LINE> self.verbose = verbose <NEW_LINE> <DEDENT> def evaluate_configs(self, configs, criteria=('silhouette',)): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for i, config in enumerate(configs): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('\rConfig {}/{}'.format(i+1, len(configs)), end='\n' if i == len(configs)-1 else '', flush=True) <NEW_LINE> <DEDENT> clustering = self.generator.cluster(config) <NEW_LINE> scores = {criter: Scorer.score(clustering, criter, self.labels) for criter in criteria} <NEW_LINE> results.append({'config': config, 'scores': scores}) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def random_search(self, n_samples=100, criterion='silhouette'): <NEW_LINE> <INDENT> scored_configs = [] <NEW_LINE> best_score = -np.inf <NEW_LINE> best_model = None <NEW_LINE> for i in range(n_samples): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('\rSample {}/{}'.format(i+1, n_samples), end='\n' if i == n_samples-1 else '', flush=True) <NEW_LINE> <DEDENT> clustering = self.generator.sample() <NEW_LINE> score = Scorer.score(clustering, criterion, self.labels) <NEW_LINE> scored_configs.append((clustering['config'], score)) <NEW_LINE> if score > best_score: <NEW_LINE> <INDENT> best_score = score <NEW_LINE> best_model = clustering['model'] <NEW_LINE> <DEDENT> <DEDENT> scored_configs.sort(key=lambda x: x[1], reverse=True) <NEW_LINE> return {'best_model': best_model, 'scored_configs': scored_configs}
Evaluate given or sampled clusterings. The Evaluator can be used in two ways. The first is to provide a list of clustering configurations and evaluation metrics to be computed for each one. The second way is to sample clustering configurations and score them according to a given metric, performing a random search in the space of given parameters. The clusterings are produced by a ClusterGenerator object. Arguments: X : ndarray of shape (n_observations, n_features) The data to be clustered. options : dict Dict specifying the parameter values for the clustering algorithms, the feature transformations, the distance metrics, and the data subsampling rate. Values of type dict specify a categorical distribution over possible values. parameters : dict Dict specifying the required parameters for the clustering algorithms, feature transformations and distance metrics. Extends and/or overwrites the default parameters. definitions : dict Dict mapping function names of clustering algorithms, feature transformations, and distance metrics to function objects. Extends and/or overwrites the default definitions. features : dict Dict mapping feature config strings to feature matrices. Feature matrices are ndarrays of shape (n_observations, n_features). distances : dict Dict mapping distance config strings to distance matrices. Distance matrices are ndarrays of shape (n_observations, n_observations). precompute : {'features', 'distances', None} If 'features', all the feature transformations are precomputed and stored in the features dictionary. If 'distances', all feature transformations and all distances are precomputed and stored in the distances dictionary. labels : array-like of int The ground-truth cluster assignments. verbose : Boolean If True, a message of progress will be printed during evaluation.
62599032a8ecb033258722ec
class DispatchProcessingHandler: <NEW_LINE> <INDENT> def __init__( self, *, messenger: 'MessengerBase', messages: Optional[List[MessageTuple]] = None ): <NEW_LINE> <INDENT> self.messenger = messenger <NEW_LINE> self.dispatches = ( chain.from_iterable(( dispatch for dispatch in (item.dispatches for item in messages) )) if messages else None ) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> messenger = self.messenger <NEW_LINE> with messenger._exception_handling(self.dispatches): <NEW_LINE> <INDENT> messenger._init_delivery_statuses_dict() <NEW_LINE> messenger.before_send() <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> messenger = self.messenger <NEW_LINE> with messenger._exception_handling(self.dispatches): <NEW_LINE> <INDENT> messenger.after_send() <NEW_LINE> <DEDENT> messenger._update_dispatches() <NEW_LINE> return True
Context manager to facilitate exception handling on various messages processing stages.
62599032711fe17d825e1502
class TestV1RBDVolumeSource(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1RBDVolumeSource(self): <NEW_LINE> <INDENT> model = lib_openshift.models.v1_rbd_volume_source.V1RBDVolumeSource()
V1RBDVolumeSource unit test stubs
625990328c3a8732951f7628
class LinkToOrderViewletTestCase(IntegrationTestCase): <NEW_LINE> <INDENT> def test_subclass(self): <NEW_LINE> <INDENT> from plone.app.layout.viewlets.common import ViewletBase as Base <NEW_LINE> self.assertTrue(issubclass(LinkToOrderViewlet, Base)) <NEW_LINE> from collective.base.interfaces import IViewlet as Base <NEW_LINE> self.assertTrue(issubclass(ILinkToOrderViewlet, Base)) <NEW_LINE> <DEDENT> def test_verifyObject(self): <NEW_LINE> <INDENT> from zope.interface.verify import verifyObject <NEW_LINE> instance = self.create_viewlet(LinkToOrderViewlet) <NEW_LINE> self.assertTrue(verifyObject(ILinkToOrderViewlet, instance)) <NEW_LINE> <DEDENT> @mock.patch('slt.theme.browser.viewlet.getToolByName') <NEW_LINE> def test_order_url(self, getToolByName): <NEW_LINE> <INDENT> view = mock.Mock() <NEW_LINE> view.order_id = 2 <NEW_LINE> getToolByName().getHomeUrl.return_value = 'HOME_URL' <NEW_LINE> instance = self.create_viewlet(LinkToOrderViewlet, view=view) <NEW_LINE> self.assertEqual(instance.order_url(), 'HOME_URL?order_number=2')
TestCase for LinkToOrderViewlet
62599032cad5886f8bdc58e3
class TipoLugar(models.Model): <NEW_LINE> <INDENT> nombre = models.CharField(max_length=50, blank=True) <NEW_LINE> codigo = models.CharField(max_length=50, default='', unique=True, blank=True) <NEW_LINE> def save(self): <NEW_LINE> <INDENT> self.nombre = self.nombre.lower() <NEW_LINE> self.codigo = self.codigo.lower() <NEW_LINE> if self.nombre and not self.codigo: <NEW_LINE> <INDENT> self.codigo = self.nombre.replace(' ', '_') <NEW_LINE> <DEDENT> if self.codigo and not self.nombre: <NEW_LINE> <INDENT> self.nombre = self.codigo.replace('_', ' ') <NEW_LINE> <DEDENT> self.nombre = self.nombre.capitalize() <NEW_LINE> super(TipoLugar, self).save() <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.nombre <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'tipos de lugar'
Sirve para clasificar los lugares que encuentra el ciclista, como talleres o biciestacionamientos
625990323eb6a72ae038b737
class case_always_fail(object): <NEW_LINE> <INDENT> def test(self, handler): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def act(self, handler): <NEW_LINE> <INDENT> raise ServerException("Unknown object '{0}'".format(handler.path[1:]))
所有情况都不符合时的默认处理类
6259903223e79379d538d5da
class MetaEgg(type): <NEW_LINE> <INDENT> accessors = { 'get': ('getter', 0), 'is': ('getter', 0), 'set': ('setter', 1), 'del': ('deller', 0), } <NEW_LINE> def __init__(cls, name, bases, dict): <NEW_LINE> <INDENT> cls.createProperties(dict) <NEW_LINE> super(MetaEgg, cls).__init__(name, bases, dict) <NEW_LINE> <DEDENT> def createProperties(cls, dict): <NEW_LINE> <INDENT> props = {} <NEW_LINE> for name, object in dict.iteritems(): <NEW_LINE> <INDENT> type, prop_name = cls.getPropertyDesc(name, object) <NEW_LINE> if type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> item = props.setdefault(prop_name, {}) <NEW_LINE> if type in item: <NEW_LINE> <INDENT> raise ValueError('More than one access method (%r) for property %r' % (item[type], prop_name)) <NEW_LINE> <DEDENT> item[type] = name <NEW_LINE> <DEDENT> for prop_name in props: <NEW_LINE> <INDENT> d = cls.getAccessors(prop_name) <NEW_LINE> getter = d['getter'] <NEW_LINE> setter = d['setter'] <NEW_LINE> deller = d['deller'] <NEW_LINE> setattr(cls, prop_name, property(getter, setter, deller)) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def getPropertyDesc(cls, name, object): <NEW_LINE> <INDENT> NO_PROPERTY = None, None <NEW_LINE> if not inspect.isfunction(object): <NEW_LINE> <INDENT> return NO_PROPERTY <NEW_LINE> <DEDENT> protected = False <NEW_LINE> if name[0] == '_': <NEW_LINE> <INDENT> protected = True <NEW_LINE> name = name[1:] <NEW_LINE> <DEDENT> for prefix, (type, argcount) in cls.accessors.iteritems(): <NEW_LINE> <INDENT> obj_argcount = object.func_code.co_argcount - 1 <NEW_LINE> if name.startswith(prefix) and obj_argcount == argcount: <NEW_LINE> <INDENT> name = name[len(prefix):] <NEW_LINE> if not name: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return NO_PROPERTY <NEW_LINE> <DEDENT> name = cls._suffixToProperty(name) <NEW_LINE> if protected: <NEW_LINE> <INDENT> name = '_' + name <NEW_LINE> <DEDENT> return type, name <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _suffixToProperty(cls, suffix): <NEW_LINE> <INDENT> if len(suffix) == 1: <NEW_LINE> <INDENT> return suffix.lower() <NEW_LINE> <DEDENT> elif suffix[0].isupper() and suffix[1].islower(): <NEW_LINE> <INDENT> return suffix[0].lower() + suffix[1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return suffix <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _propertyToSuffix(cls, pname): <NEW_LINE> <INDENT> return pname[0].upper() + pname[1:] <NEW_LINE> <DEDENT> def getAccessors(cls, name): <NEW_LINE> <INDENT> if name[0] == '_': <NEW_LINE> <INDENT> name = name[1:] <NEW_LINE> prefix = '_' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = '' <NEW_LINE> <DEDENT> pname = cls._propertyToSuffix(name) <NEW_LINE> ret = {} <NEW_LINE> for method, (type, argcount) in cls.accessors.iteritems(): <NEW_LINE> <INDENT> accessor = getattr(cls, '%s%s%s' % (prefix, method, pname), None) <NEW_LINE> ret[type] = accessor <NEW_LINE> <DEDENT> return ret
PythonEgg metaclass
625990328e05c05ec3f6f6c3
class Datetime(Field): <NEW_LINE> <INDENT> type = 'datetime' <NEW_LINE> @staticmethod <NEW_LINE> def now(*args): <NEW_LINE> <INDENT> return datetime.now().strftime(DATETIME_FORMAT) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def context_timestamp(record, timestamp): <NEW_LINE> <INDENT> assert isinstance(timestamp, datetime), 'Datetime instance expected' <NEW_LINE> tz_name = record._context.get('tz') or record.env.user.tz <NEW_LINE> if tz_name: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> utc = pytz.timezone('UTC') <NEW_LINE> context_tz = pytz.timezone(tz_name) <NEW_LINE> utc_timestamp = utc.localize(timestamp, is_dst=False) <NEW_LINE> return utc_timestamp.astimezone(context_tz) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> _logger.debug("failed to compute context/client-specific timestamp, " "using the UTC value", exc_info=True) <NEW_LINE> <DEDENT> <DEDENT> return timestamp <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_string(value): <NEW_LINE> <INDENT> value = value[:DATETIME_LENGTH] <NEW_LINE> if len(value) == DATE_LENGTH: <NEW_LINE> <INDENT> value += " 00:00:00" <NEW_LINE> <DEDENT> return datetime.strptime(value, DATETIME_FORMAT) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def to_string(value): <NEW_LINE> <INDENT> return value.strftime(DATETIME_FORMAT) <NEW_LINE> <DEDENT> def convert_to_cache(self, value, env): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if isinstance(value, basestring): <NEW_LINE> <INDENT> value = self.from_string(value) <NEW_LINE> <DEDENT> return value.strftime(DATETIME_FORMAT) <NEW_LINE> <DEDENT> def convert_to_export(self, value, env): <NEW_LINE> <INDENT> if value and env.context.get('export_raw_data'): <NEW_LINE> <INDENT> return self.from_string(value) <NEW_LINE> <DEDENT> return bool(value) and ustr(value)
Datetime field.
6259903230c21e258be998de
class BERTScope_TCP(BERTScope, Mainframe, Detector, ErrorQueueImplementation, ErrorQueueInstrument, IEEE4882SubsetMixin, IORateLimiterMixin, TCPDriver): <NEW_LINE> <INDENT> ENCODING = 'latin1'
Tektronix BERTScope TCP Driver
62599032d53ae8145f919535
class TaskLocation(models.Model): <NEW_LINE> <INDENT> location = models.ForeignKey( "location.MapLocation", on_delete=models.CASCADE, related_name="tasklocation_location", ) <NEW_LINE> address = models.TextField() <NEW_LINE> zip = models.CharField(max_length=6,)
Generated Model
6259903271ff763f4b5e886a
class Tower: <NEW_LINE> <INDENT> def __init__(self, frequency, earth_resistivity): <NEW_LINE> <INDENT> self.freq = frequency <NEW_LINE> self.ro = earth_resistivity <NEW_LINE> self.lines = list() <NEW_LINE> self.conductors = list() <NEW_LINE> self.positions = list() <NEW_LINE> self.neutral = None <NEW_LINE> self.neutral_pos = None <NEW_LINE> <DEDENT> def addLine(self, line): <NEW_LINE> <INDENT> self.lines.append(line) <NEW_LINE> <DEDENT> def addNeutral(self, cond, x, y): <NEW_LINE> <INDENT> self.neutral = cond <NEW_LINE> self.neutral_pos = x + y*1j <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> self.Compile() <NEW_LINE> for p in self.positions: <NEW_LINE> <INDENT> plt.plot(p.real, p.imag, 'o') <NEW_LINE> <DEDENT> <DEDENT> def Kron(self, z): <NEW_LINE> <INDENT> n = len(self.conductors) <NEW_LINE> zij = z[0:n-1, 0:n-1] <NEW_LINE> zin = z[n-1:n, 0:n-1] <NEW_LINE> znn = z[n-1, n-1] <NEW_LINE> znj = z[0:n-1, n-1] <NEW_LINE> return zij - zin*(1/znn) * znj <NEW_LINE> <DEDENT> def D(self, i, j): <NEW_LINE> <INDENT> return abs(self.positions[i] - self.positions[j]) <NEW_LINE> <DEDENT> def Compile(self): <NEW_LINE> <INDENT> self.conductors = list() <NEW_LINE> self.positions = list() <NEW_LINE> for line in self.lines: <NEW_LINE> <INDENT> self.conductors += line.conductors <NEW_LINE> self.positions += line.positions <NEW_LINE> <DEDENT> if self.neutral is not None: <NEW_LINE> <INDENT> self.conductors.append(self.neutral) <NEW_LINE> self.positions.append(self.neutral_pos) <NEW_LINE> <DEDENT> <DEDENT> def Z(self): <NEW_LINE> <INDENT> self.Compile() <NEW_LINE> eq = CarsonEquations(self.freq, self.ro) <NEW_LINE> n = len(self.conductors) <NEW_LINE> z = np.zeros((n,n),'complex') <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> for j in range(n): <NEW_LINE> <INDENT> if i==j: <NEW_LINE> <INDENT> z[i,j] = eq.zii(self.conductors[i].r,self.conductors[i].GMR) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> z[i,j] = eq.zij(self.D(i,j)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.neutral is not None: <NEW_LINE> <INDENT> return self.Kron(z) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return z
Tower that can contain many parallel circuits
62599032a4f1c619b294f6c9
class BaseSeries: <NEW_LINE> <INDENT> is_2Dline = False <NEW_LINE> is_3Dline = False <NEW_LINE> is_3Dsurface = False <NEW_LINE> is_contour = False <NEW_LINE> is_implicit = False <NEW_LINE> is_parametric = False <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_3D(self): <NEW_LINE> <INDENT> flags3D = [ self.is_3Dline, self.is_3Dsurface ] <NEW_LINE> return any(flags3D) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_line(self): <NEW_LINE> <INDENT> flagslines = [ self.is_2Dline, self.is_3Dline ] <NEW_LINE> return any(flagslines)
Base class for the data objects containing stuff to be plotted. Explanation =========== The backend should check if it supports the data series that it's given. (eg TextBackend supports only LineOver1DRange). It's the backend responsibility to know how to use the class of data series that it's given. Some data series classes are grouped (using a class attribute like is_2Dline) according to the api they present (based only on convention). The backend is not obliged to use that api (eg. The LineOver1DRange belongs to the is_2Dline group and presents the get_points method, but the TextBackend does not use the get_points method).
625990326e29344779b01723
class AzureFirewallNatRule(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'}, 'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'}, 'destination_ports': {'key': 'destinationPorts', 'type': '[str]'}, 'protocols': {'key': 'protocols', 'type': '[str]'}, 'translated_address': {'key': 'translatedAddress', 'type': 'str'}, 'translated_port': {'key': 'translatedPort', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, source_addresses: Optional[List[str]] = None, destination_addresses: Optional[List[str]] = None, destination_ports: Optional[List[str]] = None, protocols: Optional[List[Union[str, "AzureFirewallNetworkRuleProtocol"]]] = None, translated_address: Optional[str] = None, translated_port: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(AzureFirewallNatRule, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.source_addresses = source_addresses <NEW_LINE> self.destination_addresses = destination_addresses <NEW_LINE> self.destination_ports = destination_ports <NEW_LINE> self.protocols = protocols <NEW_LINE> self.translated_address = translated_address <NEW_LINE> self.translated_port = translated_port
Properties of a NAT rule. :param name: Name of the NAT rule. :type name: str :param description: Description of the rule. :type description: str :param source_addresses: List of source IP addresses for this rule. :type source_addresses: list[str] :param destination_addresses: List of destination IP addresses for this rule. :type destination_addresses: list[str] :param destination_ports: List of destination ports. :type destination_ports: list[str] :param protocols: Array of AzureFirewallNetworkRuleProtocols applicable to this NAT rule. :type protocols: list[str or ~azure.mgmt.network.v2018_08_01.models.AzureFirewallNetworkRuleProtocol] :param translated_address: The translated address for this NAT rule. :type translated_address: str :param translated_port: The translated port for this NAT rule. :type translated_port: str
625990328c3a8732951f762b
class TestTokenizationPreAuthPagedMetadata(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testTokenizationPreAuthPagedMetadata(self): <NEW_LINE> <INDENT> model = billforward.models.tokenization_pre_auth_paged_metadata.TokenizationPreAuthPagedMetadata()
TokenizationPreAuthPagedMetadata unit test stubs
625990328a349b6b43687311
class MaterialSchema(Model): <NEW_LINE> <INDENT> def __init__(self, type: str=None): <NEW_LINE> <INDENT> self.swagger_types = { 'type': str } <NEW_LINE> self.attribute_map = { 'type': 'type' } <NEW_LINE> self._type = type <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'MaterialSchema': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self) -> str: <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type: str): <NEW_LINE> <INDENT> self._type = type
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599032c432627299fa40ca
class SignupHandler(AreaRequestHandler): <NEW_LINE> <INDENT> def get(self, **kwargs): <NEW_LINE> <INDENT> if self.current_user is not None: <NEW_LINE> <INDENT> return redirect(request.args.get('redirect', '/')) <NEW_LINE> <DEDENT> current_user = users.get_current_user() <NEW_LINE> values = {} <NEW_LINE> if current_user is not None: <NEW_LINE> <INDENT> values['username'] = current_user.email().split('@')[0] <NEW_LINE> values['email'] = current_user.email() <NEW_LINE> <DEDENT> form, use_password = get_signup_form(**values) <NEW_LINE> context = { 'form': form, } <NEW_LINE> return self.render_response('users/signup.html', **context) <NEW_LINE> <DEDENT> def post(self, **kwargs): <NEW_LINE> <INDENT> if self.current_user is not None: <NEW_LINE> <INDENT> return redirect(request.args.get('redirect', '/')) <NEW_LINE> <DEDENT> user = None <NEW_LINE> error = None <NEW_LINE> form, use_password = get_signup_form() <NEW_LINE> username = form.data['username'] <NEW_LINE> email = form.data['email'] <NEW_LINE> kwargs = {'email': email} <NEW_LINE> if use_password: <NEW_LINE> <INDENT> kwargs['password'] = request.form.get('password') <NEW_LINE> if kwargs['password'] != request.form.get('confirm_password'): <NEW_LINE> <INDENT> error = True <NEW_LINE> self.messages.add_form_error(_("Passwords didn't match.")) <NEW_LINE> <DEDENT> <DEDENT> if error is None: <NEW_LINE> <INDENT> kwargs['is_admin'] = False <NEW_LINE> if use_password: <NEW_LINE> <INDENT> auth_id = 'own|%s' % username <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_user = users.get_current_user() <NEW_LINE> if current_user is not None: <NEW_LINE> <INDENT> auth_id = 'gae|%s' % current_user.user_id() <NEW_LINE> kwargs['is_admin'] = users.is_current_user_admin() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> <DEDENT> user = get_auth_system().create_user(username, auth_id, **kwargs) <NEW_LINE> if user is None: <NEW_LINE> <INDENT> self.messages.add_form_error(_('Username already exists. ' 'Please try a different one.')) <NEW_LINE> <DEDENT> <DEDENT> if user is not None: <NEW_LINE> <INDENT> redirect_url = request.args.get('redirect', '/') <NEW_LINE> if use_password: <NEW_LINE> <INDENT> return redirect(create_login_url(redirect_url)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect(redirect_url) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> context = { 'form': form, 'messages': self.messages, } <NEW_LINE> return self.render_response('users/signup.html', **context)
Performs signup after first login or creates a new account. The difference is that new accounts require a password and extenal auth not.
625990321d351010ab8f4bed
class ZWSwitch(ZWDevice): <NEW_LINE> <INDENT> async def __init__(self, id_, zwid, endpoint=1, mqtt_prefix="zwave"): <NEW_LINE> <INDENT> await super().__init__(id_, zwid, mqtt_prefix) <NEW_LINE> await super().init_state({'switch': False, "power": 0}) <NEW_LINE> self.zwstates["switch"] = f"37/{endpoint}/0" <NEW_LINE> self.zwstates["power"] = f"50/{endpoint}/2" <NEW_LINE> await self.subscribe() <NEW_LINE> <DEDENT> async def on(self): <NEW_LINE> <INDENT> await self.set_state({'switch': True}) <NEW_LINE> <DEDENT> async def off(self): <NEW_LINE> <INDENT> await self.set_state({'switch': False}) <NEW_LINE> <DEDENT> async def toggle(self): <NEW_LINE> <INDENT> await (self.off() if self.state['switch'] else self.on()) <NEW_LINE> <DEDENT> def ui(self): <NEW_LINE> <INDENT> return {"rightIcon": "indeterminate_check_box", "rightAction": "toggle", "ui": [ {"class": "Switch", "props": {"label": "On"}, "state": "switch"}, {"class": "Text", "props": {"label": "Power", "format": "{:1} W"}, "state": "power"}, ]}
ZWave Switch.
6259903291af0d3eaad3af02
class TCPStream(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.src_addr = None <NEW_LINE> self.src_port = None <NEW_LINE> self.dest_addr = None <NEW_LINE> self.dest_port = None <NEW_LINE> self.packets = [] <NEW_LINE> self.first_timestamp = 0 <NEW_LINE> self.last_timestamp = 0 <NEW_LINE> self.packet_count = 0 <NEW_LINE> self.byte_count = 0 <NEW_LINE> self.request_count = 0 <NEW_LINE> self.response_count = 0 <NEW_LINE> self.request_byte_count = 0 <NEW_LINE> self.response_byte_count = 0 <NEW_LINE> self.conn_close = 0 <NEW_LINE> self.identifier = None <NEW_LINE> self.type = "tcp" <NEW_LINE> self.ts_all = [] <NEW_LINE> self.ts_out = [] <NEW_LINE> self.ts_in = [] <NEW_LINE> self.last_timestamps = [0,0] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.packets) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.packets[index] <NEW_LINE> <DEDENT> def index(self, packet): <NEW_LINE> <INDENT> return self.packets.index(packet) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.packets) <NEW_LINE> <DEDENT> def add_packet(self, packet): <NEW_LINE> <INDENT> self.packets.append(packet) <NEW_LINE> self.packet_count = self.packet_count + 1 <NEW_LINE> self.byte_count = self.byte_count + packet["eth"].size <NEW_LINE> if len(self.packets) == 1: <NEW_LINE> <INDENT> self.src_addr = packet["ip"].src_addr <NEW_LINE> self.dest_addr = packet["ip"].dest_addr <NEW_LINE> self.src_port = packet["tcp"].src_port <NEW_LINE> self.dest_port = packet["tcp"].dest_port <NEW_LINE> self.last_timestamps[0] = packet.timestamp <NEW_LINE> self.last_timestamps[1] = packet.timestamp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ts_all.append(packet.timestamp) <NEW_LINE> <DEDENT> self.last_timestamp = packet.timestamp <NEW_LINE> if self.src_addr == packet["ip"].src_addr: <NEW_LINE> <INDENT> self.request_count = self.request_count + 1 <NEW_LINE> self.request_byte_count = self.request_byte_count + packet["eth"].size <NEW_LINE> self.ts_out.append(packet.timestamp) <NEW_LINE> self.last_timestamps[0] = packet.timestamp <NEW_LINE> <DEDENT> if self.src_addr == packet["ip"].dest_addr: <NEW_LINE> <INDENT> self.response_count = self.response_count + 1 <NEW_LINE> self.response_byte_count = self.response_byte_count + packet["eth"].size <NEW_LINE> self.ts_in.append(packet.timestamp) <NEW_LINE> self.last_timestamps[1] = packet.timestamp <NEW_LINE> <DEDENT> <DEDENT> def remove_packet(self,packet): <NEW_LINE> <INDENT> self.packets.remove(packet) <NEW_LINE> self.packet_count = self.packet_count - 1 <NEW_LINE> <DEDENT> def is_equal(self, src_addr, dest_addr, src_port, dest_port): <NEW_LINE> <INDENT> if self.src_addr == src_addr: <NEW_LINE> <INDENT> if self.dest_addr == dest_addr: <NEW_LINE> <INDENT> if self.src_port == src_port: <NEW_LINE> <INDENT> if self.dest_port == dest_port: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> elif self.src_addr == dest_addr: <NEW_LINE> <INDENT> if self.dest_addr == src_addr: <NEW_LINE> <INDENT> if self.src_port == dest_port: <NEW_LINE> <INDENT> if self.dest_port == src_port: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def sort_packets(self): <NEW_LINE> <INDENT> self.packets = sorted(self.packets,key=self.get_key) <NEW_LINE> <DEDENT> def get_key(self, packet): <NEW_LINE> <INDENT> return packet["tcp"].seq_num
TCP packet allocation
625990325e10d32532ce416d
class RemoveDatatype: <NEW_LINE> <INDENT> def filter(self, node): <NEW_LINE> <INDENT> return is_operator_app(node, 'declare-datatypes') <NEW_LINE> <DEDENT> def mutations(self, node): <NEW_LINE> <INDENT> if len(node) != 3 or len(node[1]) != len(node[2]): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for i in range(len(node[1])): <NEW_LINE> <INDENT> yield Simplification({ node[1][i].id: None, node[2][i].id: None }, []) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'remove datatype'
Remove a datatype from a recursive datatype declaration.
625990328c3a8732951f762d
class DBController(): <NEW_LINE> <INDENT> def __init__(self, host="localhost", user="root", passwd="root", port=3306, db="spider_data"): <NEW_LINE> <INDENT> import pymysql <NEW_LINE> from pymysql.err import IntegrityError <NEW_LINE> self._conn = pymysql.connect(host=host,port=port,user=user, passwd=passwd,db=db,charset='utf8') <NEW_LINE> self.cur = self._conn.cursor() <NEW_LINE> self.IntegrityError = IntegrityError <NEW_LINE> <DEDENT> def execute(self, SQL): <NEW_LINE> <INDENT> self.cur.execute(SQL) <NEW_LINE> self._conn.commit() <NEW_LINE> <DEDENT> @property <NEW_LINE> def close(self): <NEW_LINE> <INDENT> self._conn.close() <NEW_LINE> self.cur.close()
数据库操作模块 可访问成员(函数): - cur - IntegrityError - execute(sql) - close
6259903230c21e258be998e2
@datatasks("information retrieval") <NEW_LINE> class Adhoc(Base): <NEW_LINE> <INDENT> documents: Param[AdhocDocuments] <NEW_LINE> topics: Param[AdhocTopics] <NEW_LINE> assessments: Param[AdhocAssessments]
An Adhoc IR collection
625990328c3a8732951f762e
class AnnouncementFeed(Feed): <NEW_LINE> <INDENT> def get_object(self, request, course_slug): <NEW_LINE> <INDENT> return get_object_or_404(Course, slug=course_slug) <NEW_LINE> <DEDENT> def title(self, obj): <NEW_LINE> <INDENT> return _("Announcements of %(course_title)s") % {"course_title": obj.name} <NEW_LINE> <DEDENT> def link(self, obj): <NEW_LINE> <INDENT> return obj.get_absolute_url() <NEW_LINE> <DEDENT> def description(self, obj): <NEW_LINE> <INDENT> return _('Announcements of the online course "%(course_title)s" on the platform "%(site_name)s".') % {"course_title": obj.name, "site_name": Site.objects.get_current().name} <NEW_LINE> <DEDENT> def items(self, obj): <NEW_LINE> <INDENT> return Announcement.objects.filter(course=obj).order_by('datetime').reverse() <NEW_LINE> <DEDENT> def item_title(self, item): <NEW_LINE> <INDENT> return item.title <NEW_LINE> <DEDENT> def item_description(self, item): <NEW_LINE> <INDENT> return item.content <NEW_LINE> <DEDENT> def item_pubdate(self, item): <NEW_LINE> <INDENT> return item.datetime
Default RSS feed for the course announcements. :returns: RSS Feed .. versionadded:: 0.1
625990326fece00bbaccca85
class EvenStShotCt(ShotEventTallyBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(EvenStShotCt, self).__init__( count_play=lambda play: isinstance(play.event, EV.Shot) and play.strength == St.Even )
Tallies even strength shots on goal for each team. Increments if * the play event inherits from :py:class:`.Shot` * play happened at even strength
62599032d18da76e235b79b9
class SwarmMissingNodeError(Exception): <NEW_LINE> <INDENT> pass
Raised if some nodes have not joined the cluster
62599032d164cc6175822048
class ImportManager: <NEW_LINE> <INDENT> def __init__(self, imports): <NEW_LINE> <INDENT> self.dynamic_registration = any( statement.module == '__gin__.dynamic_registration' for statement in imports) <NEW_LINE> self.imports = [] <NEW_LINE> self.module_selectors = {} <NEW_LINE> self.names = set() <NEW_LINE> for statement in sorted(imports, key=lambda s: (s.module, not s.is_from)): <NEW_LINE> <INDENT> self.add_import(statement) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def sorted_imports(self): <NEW_LINE> <INDENT> return sorted(self.imports, key=lambda s: s.module) <NEW_LINE> <DEDENT> def add_import(self, statement: config_parser.ImportStatement): <NEW_LINE> <INDENT> if statement.module in self.module_selectors: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> unique_name = _uniquify_name(statement.bound_name(), self.names) <NEW_LINE> if unique_name != statement.bound_name(): <NEW_LINE> <INDENT> statement = statement._replace(alias=unique_name) <NEW_LINE> <DEDENT> if statement.is_from or statement.alias: <NEW_LINE> <INDENT> selector = statement.bound_name() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> selector = statement.module <NEW_LINE> <DEDENT> self.module_selectors[statement.module] = selector <NEW_LINE> self.names.add(statement.bound_name()) <NEW_LINE> self.imports.append(statement) <NEW_LINE> <DEDENT> def require_configurable(self, configurable_: Configurable): <NEW_LINE> <INDENT> if not self.dynamic_registration: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if configurable_.wrapped == macro: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if configurable_.import_source: <NEW_LINE> <INDENT> self.add_import(configurable_.import_source[0]) <NEW_LINE> <DEDENT> elif hasattr(configurable_.wrapped, '__module__'): <NEW_LINE> <INDENT> module = configurable_.wrapped.__module__ <NEW_LINE> import_statement = config_parser.ImportStatement( module=module, is_from='.' in module, alias=None, location=config_parser.Location(None, 0, None, '')) <NEW_LINE> self.add_import(import_statement) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.warning( 'Configurable %r was not imported using dynamic registration and has ' 'no __module__ attribute; dynamic registration will not be used in ' 'the resulting config string. This is likely because the initial set ' 'of parsed Gin files included a mix of files with and without ' 'dynamic registration.', configurable_) <NEW_LINE> for statement in self.imports: <NEW_LINE> <INDENT> if statement.module == '__gin__.dynamic_registration': <NEW_LINE> <INDENT> self.imports.remove(statement) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.dynamic_registration = False <NEW_LINE> <DEDENT> <DEDENT> def minimal_selector(self, configurable_: Configurable) -> str: <NEW_LINE> <INDENT> if self.dynamic_registration: <NEW_LINE> <INDENT> if configurable_.import_source: <NEW_LINE> <INDENT> import_statement, name = configurable_.import_source <NEW_LINE> module = import_statement.module <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> module = configurable_.wrapped.__module__ <NEW_LINE> name = configurable_.wrapped.__qualname__ <NEW_LINE> <DEDENT> return f'{self.module_selectors[module]}.{name}' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> minimal_selector = _REGISTRY.minimal_selector(configurable_.selector) <NEW_LINE> if configurable_.is_method: <NEW_LINE> <INDENT> if '.' not in minimal_selector: <NEW_LINE> <INDENT> minimal_selector = '.'.join(configurable_.selector.split('.')[-2:]) <NEW_LINE> <DEDENT> <DEDENT> return minimal_selector
Manages imports required when writing out a full config string. This class does bookkeeping to ensure each import is only output once, and that each import receives a unique name/alias to avoid collisions.
62599032ac7a0e7691f735be
class CloakMiddleware(inherit_from): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> request.user.is_cloaked = False <NEW_LINE> if SESSION_USER_KEY in request.session: <NEW_LINE> <INDENT> User = get_user_model() <NEW_LINE> try: <NEW_LINE> <INDENT> user = User._default_manager.get(pk=request.session[SESSION_USER_KEY]) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if can_cloak_as(request.user, user): <NEW_LINE> <INDENT> request.user = user <NEW_LINE> request.user.is_cloaked = True
This middleware class checks to see if a cloak session variable is set, and overrides the request.user object with the cloaked user
62599032d6c5a102081e31fd
class App_fltk(App_base): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> import fltk as fl <NEW_LINE> import types <NEW_LINE> def dummyrun(*args, **kwargs): <NEW_LINE> <INDENT> print_mainloop_warning() <NEW_LINE> <DEDENT> fl.Fl.run = types.MethodType(dummyrun, fl.Fl) <NEW_LINE> self.app = fl.Fl <NEW_LINE> self.app._in_event_loop = "Pyzo" <NEW_LINE> fl._in_event_loop = "Pyzo" <NEW_LINE> <DEDENT> def process_events(self): <NEW_LINE> <INDENT> self.app.wait(0)
Hijack fltk 1. This one is easy. Just call fl.wait(0.0) now and then. Note that both tk and fltk try to bind to PyOS_InputHook. Fltk will warn about not being able to and Tk does not, so we should just hijack (import) fltk first. The hook that they try to fetch is not required in pyzo, because the pyzo interpreter will keep all GUI backends updated when idle.
62599032d53ae8145f91953b
class RainCommand(commands.Command): <NEW_LINE> <INDENT> def __init__(self, callback: Callable, **kwargs: Any) -> None: <NEW_LINE> <INDENT> super().__init__(callback, **kwargs) <NEW_LINE> self.perm_level = kwargs.get('perm_level', 0) <NEW_LINE> self.checks.append(check_perm_level) <NEW_LINE> <DEDENT> @property <NEW_LINE> def signature(self) -> str: <NEW_LINE> <INDENT> result = [] <NEW_LINE> parent = self.full_parent_name <NEW_LINE> name = self.name if not parent else parent + ' ' + self.name <NEW_LINE> result.append(name) <NEW_LINE> if self.usage: <NEW_LINE> <INDENT> result.append(self.usage) <NEW_LINE> return ' '.join(result) <NEW_LINE> <DEDENT> params = self.clean_params <NEW_LINE> if not params: <NEW_LINE> <INDENT> return ' '.join(result) <NEW_LINE> <DEDENT> for name, param in params.items(): <NEW_LINE> <INDENT> if param.default is not param.empty: <NEW_LINE> <INDENT> should_print = param.default if isinstance(param.default, str) else param.default is not None <NEW_LINE> if should_print: <NEW_LINE> <INDENT> result.append('[%s=%s]' % (name, param.default)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append('[%s]' % name) <NEW_LINE> <DEDENT> <DEDENT> elif param.kind == param.VAR_POSITIONAL: <NEW_LINE> <INDENT> result.append('[%s...]' % name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append('<%s>' % name) <NEW_LINE> <DEDENT> <DEDENT> return ' '.join(result)
Overwrites the default Command to use permission levels, overwrites signature to hide aliases
625990326fece00bbaccca87
class LibraryImportManager(ImportManager): <NEW_LINE> <INDENT> store_class = LibraryXMLModuleStore <NEW_LINE> def get_dest_id(self, courselike_key): <NEW_LINE> <INDENT> if self.target_id is not None: <NEW_LINE> <INDENT> dest_id = self.target_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dest_id = LibraryLocator(self.target_id.org, self.target_id.library) <NEW_LINE> <DEDENT> existing_lib = self.store.get_library(dest_id, ignore_case=True) <NEW_LINE> runtime = None <NEW_LINE> if existing_lib: <NEW_LINE> <INDENT> dest_id = existing_lib.location.library_key <NEW_LINE> runtime = existing_lib.runtime <NEW_LINE> <DEDENT> if self.create_if_not_present and not existing_lib: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> library = self.store.create_library( org=self.target_id.org, library=self.target_id.library, user_id=self.user_id, fields={"display_name": ""}, ) <NEW_LINE> runtime = library.runtime <NEW_LINE> <DEDENT> except DuplicateCourseError: <NEW_LINE> <INDENT> log.debug( "Skipping import of Library with id %s, " "since it collides with an existing one", dest_id ) <NEW_LINE> if self.status: <NEW_LINE> <INDENT> self.status.fail(_('Aborting import since a library with this id already exists.')) <NEW_LINE> <DEDENT> raise <NEW_LINE> <DEDENT> <DEDENT> return dest_id, runtime <NEW_LINE> <DEDENT> def get_courselike(self, courselike_key, runtime, dest_id): <NEW_LINE> <INDENT> source_library = self.xml_module_store.get_library(courselike_key) <NEW_LINE> library, library_data_path = self.import_courselike( runtime, courselike_key, dest_id, source_library, ) <NEW_LINE> return source_library, library, library_data_path <NEW_LINE> <DEDENT> def static_updater(self, course, source_courselike, courselike_key, dest_id, runtime): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def import_children(self, source_courselike, courselike, courselike_key, dest_id): <NEW_LINE> <INDENT> self.recursive_build(source_courselike, courselike, courselike_key, dest_id) <NEW_LINE> <DEDENT> def import_drafts(self, courselike, courselike_key, data_path, dest_id): <NEW_LINE> <INDENT> return courselike
Import manager for Libraries
62599032d99f1b3c44d0677a
class ObjectCategories(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> fname = "ModelCategoryMapping.csv" <NEW_LINE> self.model_to_categories = {} <NEW_LINE> root_dir = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> model_cat_file = f"{root_dir}/{fname}" <NEW_LINE> with open(model_cat_file, "r") as f: <NEW_LINE> <INDENT> categories = csv.reader(f) <NEW_LINE> for l in categories: <NEW_LINE> <INDENT> self.model_to_categories[l[1]] = [l[2],l[3],l[5]] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_fine_category(self, model_id): <NEW_LINE> <INDENT> model_id = model_id.replace("_mirror","") <NEW_LINE> return self.model_to_categories[model_id][0] <NEW_LINE> <DEDENT> def get_coarse_category(self, model_id): <NEW_LINE> <INDENT> model_id = model_id.replace("_mirror","") <NEW_LINE> return self.model_to_categories[model_id][1] <NEW_LINE> <DEDENT> def get_final_category(self, model_id): <NEW_LINE> <INDENT> model_id = model_id.replace("_mirror","") <NEW_LINE> category = self.model_to_categories[model_id][0] <NEW_LINE> if model_id == "199": <NEW_LINE> <INDENT> category = "dressing_table_with_stool" <NEW_LINE> <DEDENT> if category == "nightstand": <NEW_LINE> <INDENT> category = "stand" <NEW_LINE> <DEDENT> if category == "bookshelf": <NEW_LINE> <INDENT> category = "shelving" <NEW_LINE> <DEDENT> return category
Determine which categories does each object belong to
625990321d351010ab8f4bf0
class GhdlPragmaHandler: <NEW_LINE> <INDENT> _PRAGMA = re.compile( r"\s*--\s*ghdl\s+translate_off[\r\n].*?[\n\r]\s*--\s*ghdl\s+translate_on", flags=re.DOTALL | re.I | re.MULTILINE, ) <NEW_LINE> def run(self, code, file_name): <NEW_LINE> <INDENT> for word in ("ghdl", "translate_on", "translate_off"): <NEW_LINE> <INDENT> if word not in code: <NEW_LINE> <INDENT> return code <NEW_LINE> <DEDENT> <DEDENT> result = self._PRAGMA.sub(r"", code) <NEW_LINE> return result
Removes code between arbitraty pragmas -- ghdl translate_off this is ignored -- ghdl translate_on
625990321f5feb6acb163cc8
class UpdateSession(VapiInterface): <NEW_LINE> <INDENT> RESOURCE_TYPE = "com.vmware.content.library.item.UpdateSession" <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> VapiInterface.__init__(self, config, _UpdateSessionStub) <NEW_LINE> <DEDENT> def create(self, create_spec, client_token=None, ): <NEW_LINE> <INDENT> return self._invoke('create', { 'client_token': client_token, 'create_spec': create_spec, }) <NEW_LINE> <DEDENT> def get(self, update_session_id, ): <NEW_LINE> <INDENT> return self._invoke('get', { 'update_session_id': update_session_id, }) <NEW_LINE> <DEDENT> def list(self, library_item_id=None, ): <NEW_LINE> <INDENT> return self._invoke('list', { 'library_item_id': library_item_id, }) <NEW_LINE> <DEDENT> def complete(self, update_session_id, ): <NEW_LINE> <INDENT> return self._invoke('complete', { 'update_session_id': update_session_id, }) <NEW_LINE> <DEDENT> def keep_alive(self, update_session_id, client_progress=None, ): <NEW_LINE> <INDENT> return self._invoke('keep_alive', { 'update_session_id': update_session_id, 'client_progress': client_progress, }) <NEW_LINE> <DEDENT> def cancel(self, update_session_id, ): <NEW_LINE> <INDENT> return self._invoke('cancel', { 'update_session_id': update_session_id, }) <NEW_LINE> <DEDENT> def fail(self, update_session_id, client_error_message, ): <NEW_LINE> <INDENT> return self._invoke('fail', { 'update_session_id': update_session_id, 'client_error_message': client_error_message, }) <NEW_LINE> <DEDENT> def delete(self, update_session_id, ): <NEW_LINE> <INDENT> return self._invoke('delete', { 'update_session_id': update_session_id, })
The ``UpdateSession`` class manipulates sessions that are used to upload content into the Content Library Service, and/or to remove files from a library item. An update session is a resource which tracks changes to content. An update session is created with a set of files that are intended to be uploaded to a specific :class:`com.vmware.content.library_client.ItemModel`, or removed from an item. The session object can be used to track the uploads and inspect the changes that are being made to the item by that upload. It can also serve as a channel to check on the result of the upload, and status messages such as errors and warnings for the upload. Modifications are not visible to other clients unless the session is completed and all necessary files have been received. The management of the files within the session is done through the :class:`com.vmware.content.library.item.updatesession_client.File` class.
6259903226238365f5fadc2a