code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class AssociatedObject(Object): <NEW_LINE> <INDENT> superclass = Object <NEW_LINE> def __init__(self, defined_object=None, type_=None, association_type=None): <NEW_LINE> <INDENT> super(AssociatedObject, self).__init__(defined_object, type_) <NEW_LINE> self.association_type = association_type <NEW_LINE> <DEDENT> def to_obj(self, return_obj=None, ns_info=None): <NEW_LINE> <INDENT> self._collect_ns_info(ns_info) <NEW_LINE> obj = super(AssociatedObject, self).to_obj(return_obj=core_binding.AssociatedObjectType(), ns_info=ns_info) <NEW_LINE> if self.association_type is not None: <NEW_LINE> <INDENT> obj.Association_Type = self.association_type.to_obj(ns_info=ns_info) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> object_dict = super(AssociatedObject, self).to_dict() <NEW_LINE> if self.association_type is not None: <NEW_LINE> <INDENT> object_dict['association_type'] = self.association_type.to_dict() <NEW_LINE> <DEDENT> return object_dict <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_obj(object_obj): <NEW_LINE> <INDENT> if not object_obj: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> obj = Object.from_obj(object_obj, AssociatedObject()) <NEW_LINE> obj.association_type = AssociationType.from_obj(object_obj.Association_Type) <NEW_LINE> return obj <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_dict(object_dict): <NEW_LINE> <INDENT> if not object_dict: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> obj = Object.from_dict(object_dict, AssociatedObject()) <NEW_LINE> obj.association_type = AssociationType.from_dict(object_dict.get('association_type', None)) <NEW_LINE> return obj
The CybOX Associated Object element. Currently only supports the id, association_type and ObjectProperties properties
6259907e97e22403b383c975
class WatcherListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[Watcher]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(WatcherListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None)
The response model for the list watcher operation. :param value: Gets or sets a list of watchers. :type value: list[~azure.mgmt.automation.models.Watcher] :param next_link: Gets or sets the next link. :type next_link: str
6259907e1f5feb6acb16466e
class AlarmObjectFile(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'alarmobjectfile' <NEW_LINE> __table_args__ = {'extend_existing': True} <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> object_id = db.Column(db.Integer, db.ForeignKey('alarmobjects.id')) <NEW_LINE> filename = db.Column(db.String(80)) <NEW_LINE> filetype = db.Column(db.String(50)) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<alarmobjectfile %s>" % self.filename <NEW_LINE> <DEDENT> def __init__(self, objectid, filename, filetype): <NEW_LINE> <INDENT> self.object_id = objectid <NEW_LINE> self.filename = filename <NEW_LINE> self.filetype = filetype <NEW_LINE> <DEDENT> @property <NEW_LINE> def filesize(self): <NEW_LINE> <INDENT> def sizeof_fmt(num): <NEW_LINE> <INDENT> for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: <NEW_LINE> <INDENT> if num < 1024.0: <NEW_LINE> <INDENT> return "%3.1f %s" % (num, x) <NEW_LINE> <DEDENT> num /= 1024.0 <NEW_LINE> <DEDENT> <DEDENT> if os.path.exists('%salarmobjects/%s/%s' % (current_app.config.get('PATH_DATA'), self.object_id, self.filename)): <NEW_LINE> <INDENT> return sizeof_fmt(os.stat('%salarmobjects/%s/%s' % (current_app.config.get('PATH_DATA'), self.object_id, self.filename)).st_size) <NEW_LINE> <DEDENT> return sizeof_fmt(0) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getFile(id, filename=""): <NEW_LINE> <INDENT> if filename == "": <NEW_LINE> <INDENT> return db.session.query(AlarmObjectFile).filter_by(id=id).first() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return db.session.query(AlarmObjectFile).filter_by(object_id=id, filename=filename).first() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def getAlarmObjectTypes(objectid=0): <NEW_LINE> <INDENT> if id != 0: <NEW_LINE> <INDENT> return db.session.query(AlarmObjectFile).filter_by(id=id).all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return db.session.query(AlarmObjectFile).order_by(collate(AlarmObjectFile.name, 'NOCASE')).all()
Files for alarmobjects
6259907e3617ad0b5ee07bc4
class QueryError(ProcessError): <NEW_LINE> <INDENT> area = "QUERY"
Process errors that are related to processing queries.
6259907eaad79263cf43022f
class Solution: <NEW_LINE> <INDENT> def binary_search(self, ordered_nums: list, target) -> int: <NEW_LINE> <INDENT> start_index = 0 <NEW_LINE> end_index = len(ordered_nums) <NEW_LINE> while True: <NEW_LINE> <INDENT> if end_index == 0: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> mid_index = (start_index + end_index) // 2 <NEW_LINE> mid_value = ordered_nums[mid_index] <NEW_LINE> if start_index >= end_index: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> if target < mid_value: <NEW_LINE> <INDENT> end_index = mid_index <NEW_LINE> <DEDENT> elif target > mid_value: <NEW_LINE> <INDENT> start_index = mid_index + 1 <NEW_LINE> <DEDENT> elif target == mid_value: <NEW_LINE> <INDENT> return mid_index
边界逻辑混乱,不推荐
6259907e66673b3332c31e75
class StaleGlobalConfigError(TransientError): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> TransientError.__init__(self, msg)
Thrown if the GlobalConfig is older than the requester's.
6259907e283ffb24f3cf5316
class Room(models.Model): <NEW_LINE> <INDENT> creater = models.ForeignKey(User, verbose_name='Создатель', on_delete=models.CASCADE) <NEW_LINE> invited = models.ManyToManyField(User, verbose_name='Участники', related_name='invited_user') <NEW_LINE> date = models.DateTimeField('Дата создания', auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Комната чата' <NEW_LINE> verbose_name_plural = 'Комнаты чата'
Модель комнаты чата
6259907e167d2b6e312b82cf
class ShowPartition(ShowCommand): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ShowPartition, self).__init__(*args, **kwargs) <NEW_LINE> self.command = "show part" <NEW_LINE> self.start_key = "ID"
Show Partition. show part [part={partition-IDs} | lv={LV-IDs}] [-l]
6259907e99fddb7c1ca63b13
class ShowPlatformSoftwareDpidIndex(ShowPlatformSoftwareDpidIndexSchema): <NEW_LINE> <INDENT> cli_command = 'show platform software dpidb index' <NEW_LINE> def cli(self, output=None,): <NEW_LINE> <INDENT> if output is None: <NEW_LINE> <INDENT> out = self.device.execute(self.cli_command) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = output <NEW_LINE> <DEDENT> result_dict = {} <NEW_LINE> p1 = re.compile(r'^Index\s+(?P<index>\d+)\s+--\s+swidb\s+(?P<interface>\S+)$') <NEW_LINE> for line in out.splitlines(): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> m = p1.match(line) <NEW_LINE> if m: <NEW_LINE> <INDENT> group = m.groupdict() <NEW_LINE> interface = group.pop('interface') <NEW_LINE> interface_dict = result_dict.setdefault(interface,{}) <NEW_LINE> interface_dict.update({ 'index' : int(group['index']), }) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> return result_dict
parser for : show platform software dpidb index
6259907e7d43ff2487428150
@inherit_doc <NEW_LINE> class _PowerIterationClusteringParams(HasMaxIter, HasWeightCol): <NEW_LINE> <INDENT> k: Param[int] = Param( Params._dummy(), "k", "The number of clusters to create. Must be > 1.", typeConverter=TypeConverters.toInt, ) <NEW_LINE> initMode: Param[str] = Param( Params._dummy(), "initMode", "The initialization algorithm. This can be either " + "'random' to use a random vector as vertex properties, or 'degree' to use " + "a normalized sum of similarities with other vertices. Supported options: " + "'random' and 'degree'.", typeConverter=TypeConverters.toString, ) <NEW_LINE> srcCol: Param[str] = Param( Params._dummy(), "srcCol", "Name of the input column for source vertex IDs.", typeConverter=TypeConverters.toString, ) <NEW_LINE> dstCol: Param[str] = Param( Params._dummy(), "dstCol", "Name of the input column for destination vertex IDs.", typeConverter=TypeConverters.toString, ) <NEW_LINE> def __init__(self, *args: Any): <NEW_LINE> <INDENT> super(_PowerIterationClusteringParams, self).__init__(*args) <NEW_LINE> self._setDefault(k=2, maxIter=20, initMode="random", srcCol="src", dstCol="dst") <NEW_LINE> <DEDENT> @since("2.4.0") <NEW_LINE> def getK(self) -> int: <NEW_LINE> <INDENT> return self.getOrDefault(self.k) <NEW_LINE> <DEDENT> @since("2.4.0") <NEW_LINE> def getInitMode(self) -> str: <NEW_LINE> <INDENT> return self.getOrDefault(self.initMode) <NEW_LINE> <DEDENT> @since("2.4.0") <NEW_LINE> def getSrcCol(self) -> str: <NEW_LINE> <INDENT> return self.getOrDefault(self.srcCol) <NEW_LINE> <DEDENT> @since("2.4.0") <NEW_LINE> def getDstCol(self) -> str: <NEW_LINE> <INDENT> return self.getOrDefault(self.dstCol)
Params for :py:class:`PowerIterationClustering`. .. versionadded:: 3.0.0
6259907e23849d37ff852b2f
class PolicyLoadMessage(AuditMessage): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> AuditMessage.__init__(self, message)
Audit message indicating that the policy was reloaded.
6259907e26068e7796d4e3b6
class HasMaxIter(Params): <NEW_LINE> <INDENT> maxIter: "Param[int]" = Param( Params._dummy(), "maxIter", "max number of iterations (>= 0).", typeConverter=TypeConverters.toInt, ) <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> super(HasMaxIter, self).__init__() <NEW_LINE> <DEDENT> def getMaxIter(self) -> int: <NEW_LINE> <INDENT> return self.getOrDefault(self.maxIter)
Mixin for param maxIter: max number of iterations (>= 0).
6259907e97e22403b383c977
class HCBlock(nn.Module): <NEW_LINE> <INDENT> def __init__(self, n_in, out_sz=256): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.conv = nn.Conv2d(n_in, 16, 1) <NEW_LINE> self.bn = nn.BatchNorm2d(16) <NEW_LINE> self.out_sz = out_sz <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = F.relu(self.conv(x)) <NEW_LINE> x = self.bn(x) <NEW_LINE> return interpolate(x, (self.out_sz, self.out_sz), mode='bilinear', align_corners=False)
Hypercolumn block - reduces num of channels and interpolates
6259907e099cdd3c63676135
class AppError(Exception): <NEW_LINE> <INDENT> pass
标记由当前应用(而非第三方库)抛出的,但可以恢复的普通错误。 该错误应该由接触第三方接口的 DAO、client 等模块抛出,然后由最上层处理。
6259907ebe7bc26dc9252b91
class LogAnalyticsOperationResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'properties': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'properties': {'key': 'properties', 'type': 'LogAnalyticsOutput'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(LogAnalyticsOperationResult, self).__init__(**kwargs) <NEW_LINE> self.properties = None
LogAnalytics operation status response. Variables are only populated by the server, and will be ignored when sending a request. :ivar properties: LogAnalyticsOutput. :vartype properties: ~azure.mgmt.compute.v2021_04_01.models.LogAnalyticsOutput
6259907e2c8b7c6e89bd525d
class NoGlobalSettingError(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(NoGlobalSettingError, self).__init__( "Git on this system has no global setting '%s'" % name)
Raised when the requested global setting does not exist. Subclasses :exc:`RuntimeError`.
6259907e8a349b6b43687cd4
class TestInsight(EntityTestHelpers): <NEW_LINE> <INDENT> @pytest.fixture <NEW_LINE> def pywemo_model(self): <NEW_LINE> <INDENT> return "Insight" <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def wemo_entity_suffix(self): <NEW_LINE> <INDENT> return InsightBinarySensor._name_suffix.lower() <NEW_LINE> <DEDENT> @pytest.fixture(name="pywemo_device") <NEW_LINE> def pywemo_device_fixture(self, pywemo_device): <NEW_LINE> <INDENT> pywemo_device.insight_params = { "currentpower": 1.0, "todaymw": 200000000.0, "state": "0", "onfor": 0, "ontoday": 0, "ontotal": 0, "powerthreshold": 0, } <NEW_LINE> yield pywemo_device <NEW_LINE> <DEDENT> async def test_registry_state_callback( self, hass, pywemo_registry, pywemo_device, wemo_entity ): <NEW_LINE> <INDENT> pywemo_device.get_state.return_value = 1 <NEW_LINE> pywemo_device.insight_params["state"] = "1" <NEW_LINE> pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "") <NEW_LINE> await hass.async_block_till_done() <NEW_LINE> assert hass.states.get(wemo_entity.entity_id).state == STATE_ON <NEW_LINE> pywemo_device.get_state.return_value = 1 <NEW_LINE> pywemo_device.insight_params["state"] = "8" <NEW_LINE> pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "") <NEW_LINE> await hass.async_block_till_done() <NEW_LINE> assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF <NEW_LINE> pywemo_device.get_state.return_value = 0 <NEW_LINE> pywemo_device.insight_params["state"] = "1" <NEW_LINE> pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "") <NEW_LINE> await hass.async_block_till_done() <NEW_LINE> assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF
Test for the pyWeMo Insight device.
6259907e7cff6e4e811b74b8
class crm_phonecall2partner(osv.osv_memory): <NEW_LINE> <INDENT> _name = 'crm.phonecall2partner' <NEW_LINE> _inherit = 'crm.lead2partner' <NEW_LINE> _description = 'Phonecall to Partner' <NEW_LINE> def _select_partner(self, cr, uid, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> phonecall_obj = self.pool.get('crm.phonecall') <NEW_LINE> partner_obj = self.pool.get('res.partner') <NEW_LINE> rec_ids = context and context.get('active_ids', []) <NEW_LINE> value = {} <NEW_LINE> partner_id = False <NEW_LINE> for phonecall in phonecall_obj.browse(cr, uid, rec_ids, context=context): <NEW_LINE> <INDENT> partner_ids = partner_obj.search(cr, uid, [('name', '=', phonecall.name or phonecall.name)]) <NEW_LINE> if not partner_ids and (phonecall.partner_phone or phonecall.partner_mobile): <NEW_LINE> <INDENT> partner_ids = partner_obj.search(cr, uid, ['|', ('phone', '=', phonecall.partner_phone), ('mobile','=',phonecall.partner_mobile)]) <NEW_LINE> <DEDENT> partner_id = partner_ids and partner_ids[0] or False <NEW_LINE> <DEDENT> return partner_id <NEW_LINE> <DEDENT> def _create_partner(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> phonecall = self.pool.get('crm.phonecall') <NEW_LINE> data = self.browse(cr, uid, ids, context=context)[0] <NEW_LINE> call_ids = context and context.get('active_ids') or [] <NEW_LINE> partner_id = data.partner_id and data.partner_id.id or False <NEW_LINE> partner_ids = phonecall.convert_partner(cr, uid, call_ids, data.action, partner_id, context=context) <NEW_LINE> return partner_ids[call_ids[0]]
Converts phonecall to partner
6259907ea05bb46b3848be64
class StmtexprHandler(IndentSyntaxNodeHandler): <NEW_LINE> <INDENT> pass
Handler for Stmtexpr nodes. Do nothing.
6259907e091ae356687066b7
class ModifyCluster: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = ROOT <NEW_LINE> self.content_type = CONTENT_TYPE <NEW_LINE> <DEDENT> def add_node(self, ip, x_api_session, cluster_object, vios_id): <NEW_LINE> <INDENT> log.log_debug("adding a node to a cluster process starting") <NEW_LINE> link = "https://"+ip+":12443/rest/api/uom/VirtualIOServer/"+vios_id <NEW_LINE> log.log_debug("vios to be added to the cluster -- %s"%(link)) <NEW_LINE> node_object = UOM.Node() <NEW_LINE> node_object.HostName = NODE_HOST_NAME <NEW_LINE> node_object.VirtualIOServer = pyxb.BIND() <NEW_LINE> node_object.VirtualIOServer.href = link <NEW_LINE> node_object.VirtualIOServer.rel = "related" <NEW_LINE> node_object.schemaVersion = SCHEMA_VER <NEW_LINE> cluster_object.Node.schemaVersion = SCHEMA_VER <NEW_LINE> cluster_object.Node.Node.append(node_object) <NEW_LINE> cluster_xml = cluster_object.toxml() <NEW_LINE> cluster_id = cluster_object.Metadata.Atom.AtomID.value() <NEW_LINE> http_object = HTTPClient.HTTPClient("uom", ip, self.root, self.content_type, x_api_session) <NEW_LINE> http_object.HTTPPost(cluster_xml, append = cluster_id) <NEW_LINE> log.log_debug("response for adding a node to cluster --- %s"%(http_object.response)) <NEW_LINE> if http_object.response_b: <NEW_LINE> <INDENT> print("Node is added to the cluster successfully") <NEW_LINE> log.log_debug("node added successfully") <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> log.log_error("Error occured while adding a node to the cluster")
adds a vios node to the existing cluster
6259907e1b99ca4002290271
class SlackBuilder: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def mentor_request(channel, details, attachment, matches, **kwargs): <NEW_LINE> <INDENT> return ResponseContainer(route="Slack", method="mentor_request", payload=dict(channel=channel, first=kwargs, attachment=attachment, matches=matches, second=details)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def message(channel, **message_payload): <NEW_LINE> <INDENT> return ResponseContainer(route='Slack', method='chat.postMessage', payload=dict(channel=channel, as_user=True, **message_payload)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def update(**message_payload): <NEW_LINE> <INDENT> return ResponseContainer(route='Slack', method='chat.update', payload=dict(**message_payload)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def dialog(**message_payload): <NEW_LINE> <INDENT> return ResponseContainer(route='Slack', method='dialog.open', payload=dict(**message_payload))
user_id and flattened dict get passed
6259907e4527f215b58eb6dc
class TestAwsClassicElbServiceLimitsCollectorAttribute(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testAwsClassicElbServiceLimitsCollectorAttribute(self): <NEW_LINE> <INDENT> pass
AwsClassicElbServiceLimitsCollectorAttribute unit test stubs
6259907e92d797404e389898
class PReLU(base.Layer): <NEW_LINE> <INDENT> def __init__(self, alpha_initializer=init_ops.zeros_initializer(), alpha_regularizer=None, activity_regularizer=None, alpha_constraint=lambda x: clip_ops.clip_by_value(x, 0., 1.), shared_axes=None, trainable=True, name=None, **kwargs): <NEW_LINE> <INDENT> super().__init__( trainable=trainable, name=name, activity_regularizer=activity_regularizer, **kwargs) <NEW_LINE> self.supports_masking = True <NEW_LINE> self.alpha_initializer = alpha_initializer <NEW_LINE> self.alpha_regularizer = alpha_regularizer <NEW_LINE> self.alpha_constraint = alpha_constraint <NEW_LINE> if shared_axes is None: <NEW_LINE> <INDENT> self.shared_axes = None <NEW_LINE> <DEDENT> elif not isinstance(shared_axes, (list, tuple)): <NEW_LINE> <INDENT> self.shared_axes = [shared_axes] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.shared_axes = list(shared_axes) <NEW_LINE> <DEDENT> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> input_shape = tensor_shape.TensorShape(input_shape).as_list() <NEW_LINE> param_shape = input_shape[1:] <NEW_LINE> self.param_broadcast = [False] * len(param_shape) <NEW_LINE> if self.shared_axes is not None: <NEW_LINE> <INDENT> for i in self.shared_axes: <NEW_LINE> <INDENT> param_shape[i - 1] = 1 <NEW_LINE> self.param_broadcast[i - 1] = True <NEW_LINE> <DEDENT> <DEDENT> self.alpha = self.add_variable( 'alpha', shape=param_shape, initializer=self.alpha_initializer, regularizer=self.alpha_regularizer, constraint=self.alpha_constraint, dtype=self.dtype, trainable=True) <NEW_LINE> axes = {} <NEW_LINE> if self.shared_axes: <NEW_LINE> <INDENT> for i in range(1, len(input_shape)): <NEW_LINE> <INDENT> if i not in self.shared_axes: <NEW_LINE> <INDENT> axes[i] = input_shape[i] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.input_spec = base.InputSpec(ndim=len(input_shape), axes=axes) <NEW_LINE> self.built = True <NEW_LINE> <DEDENT> def call(self, inputs, mask=None): <NEW_LINE> <INDENT> inputs = ops.convert_to_tensor(inputs, dtype=self.dtype) <NEW_LINE> return math_ops.maximum(self.alpha * inputs, inputs)
Parametric Rectified Linear Unit. It follows: `f(x) = alpha * x for x < 0`, `f(x) = x for x >= 0`, where `alpha` is a learned array with the same shape as x. Input shape: Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. Output shape: Same shape as the input. Arguments: alpha_initializer: initializer function for the weights. alpha_regularizer: regularizer for the weights. alpha_constraint: constraint for the weights. shared_axes: the axes along which to share learnable parameters for the activation function. For example, if the incoming feature maps are from a 2D convolution with output shape `(batch, height, width, channels)`, and you wish to share parameters across space so that each filter only has one set of parameters, set `shared_axes=[1, 2]`. activity_regularizer: Optional regularizer function for the output. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). name: A string, the name of the layer.
6259907e5fc7496912d48fa7
class TooShortReadFilter(object): <NEW_LINE> <INDENT> name = "too_short" <NEW_LINE> def __init__(self, minimum_length): <NEW_LINE> <INDENT> self.minimum_length = minimum_length <NEW_LINE> <DEDENT> def __call__(self, read): <NEW_LINE> <INDENT> return len(read) < self.minimum_length
Returns True if the read sequence is shorter than `minimum_length`.
6259907e91f36d47f2231bca
class InterpND(object): <NEW_LINE> <INDENT> def __init__(self, xv, fxv, dfxv=None, xg=None, fpxg=None, dfpxg=None, beta=None, gamma=None, N=None, l=1, verbose=1): <NEW_LINE> <INDENT> pass
Interpolation in multi-dimensional space. xv is the ``value data points'', i.e. the points where the function value is available and given by fxv; dfxv estimates the standard deviation of the error in the function values; the default of dfxv is 0 (fxv is exact). xg is the ``gradient data points'', i.e. points where the function gradient is available and given by fpxg; dfpxg estimates the standard devisiton of the error in the gradient values; the default of dfpxg is 0 (dfxg is exact). beta is the `magnitude' of the target function, can be automatically calculated. gamma is the `wave number' of the target function, can be automatically calculated. Combined with beta, it provides an estimate of the derivative growth: f^(k) = O(beta * gamma**k) Larger gamma yields more conservative, more robust and lower order interpolation. N is the order of the Taylor expansion, can be automatically calculated. Smaller N yields lower order interpolation. Numerical instability may occur when N is too large. l is the polynomial order. The interpolant is forced to interpolate order l-1 polynomials exactly. l=1 is the most robust, higher l makes a difference only when gamma is large, or when data is sparse and oscilatory if gamma is automatically calculated. verbose is the verbosity level. 0 is silent. Reference: * Q.Wang et al. A Rational Interpolation Scheme with Super-polynomial Rate of Convergence.
6259907e796e427e538501f3
class Section(models.Model): <NEW_LINE> <INDENT> type = models.PositiveSmallIntegerField() <NEW_LINE> testpaper = models.ForeignKey(Test_Paper) <NEW_LINE> label = models.CharField(max_length=30) <NEW_LINE> note = models.CharField(max_length=50) <NEW_LINE> is_show_section_header = models.BooleanField(default=True) <NEW_LINE> question_type_order = models.CharField(max_length=100)
type: 分卷类别,分卷I或者分卷II testpaper: 对应试卷类别 label: 对应卷标 note: 对应卷注 question_type_order: 分卷对应的题型顺序,以字符串顺序存储PaperQuestionType的id is_show_section_header: 是否显示分卷头部
6259907e1f5feb6acb164672
class BucketFactory(object): <NEW_LINE> <INDENT> BUCKET_CLASS = staticmethod(Bucket) <NEW_LINE> @classmethod <NEW_LINE> def from_api_response(cls, api, response): <NEW_LINE> <INDENT> return [cls.from_api_bucket_dict(api, bucket_dict) for bucket_dict in response['buckets']] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_api_bucket_dict(cls, api, bucket_dict): <NEW_LINE> <INDENT> type_ = bucket_dict['bucketType'] <NEW_LINE> if type_ is None: <NEW_LINE> <INDENT> raise UnrecognizedBucketType(bucket_dict['bucketType']) <NEW_LINE> <DEDENT> bucket_name = bucket_dict['bucketName'] <NEW_LINE> bucket_id = bucket_dict['bucketId'] <NEW_LINE> bucket_info = bucket_dict['bucketInfo'] <NEW_LINE> cors_rules = bucket_dict['corsRules'] <NEW_LINE> lifecycle_rules = bucket_dict['lifecycleRules'] <NEW_LINE> revision = bucket_dict['revision'] <NEW_LINE> options = set(bucket_dict['options']) <NEW_LINE> if 'defaultServerSideEncryption' not in bucket_dict: <NEW_LINE> <INDENT> raise UnexpectedCloudBehaviour('server did not provide `defaultServerSideEncryption`') <NEW_LINE> <DEDENT> default_server_side_encryption = EncryptionSettingFactory.from_bucket_dict(bucket_dict) <NEW_LINE> file_lock_configuration = FileLockConfiguration.from_bucket_dict(bucket_dict) <NEW_LINE> return cls.BUCKET_CLASS( api, bucket_id, bucket_name, type_, bucket_info, cors_rules, lifecycle_rules, revision, bucket_dict, options, default_server_side_encryption, file_lock_configuration.default_retention, file_lock_configuration.is_file_lock_enabled, )
This is a factory for creating bucket objects from different kind of objects.
6259907ee1aae11d1e7cf54e
class MatPlotView(BoxLayout, VirtualWindowRootView): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.last_image = None <NEW_LINE> matplotlib.use('agg') <NEW_LINE> self.figure_data = FigureData() <NEW_LINE> self.image_view = AdvancedImage() <NEW_LINE> self.add_widget(self.image_view) <NEW_LINE> self.refresh_graph() <NEW_LINE> <DEDENT> def refresh_graph(self): <NEW_LINE> <INDENT> fig = self.get_figure() <NEW_LINE> self.update_image(fig) <NEW_LINE> plt.close() <NEW_LINE> <DEDENT> def update_image(self, fig): <NEW_LINE> <INDENT> fig.canvas.draw() <NEW_LINE> img = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') <NEW_LINE> self.last_image = img.reshape(fig.canvas.get_width_height()[::-1] + (3,)) <NEW_LINE> self.last_image = ColorFormatConverter.ensure_format_cv(self.last_image, ColorFormatConverter.BGR, bgr_order=False) <NEW_LINE> self.image_view.set_image_data(self.last_image) <NEW_LINE> <DEDENT> def get_figure(self): <NEW_LINE> <INDENT> plt.clf() <NEW_LINE> fig = plt.figure() <NEW_LINE> self.figure_data.pyplot_visualize(fig) <NEW_LINE> return fig <NEW_LINE> <DEDENT> def get_optimal_size(self): <NEW_LINE> <INDENT> return (self.last_image.shape[1], self.last_image.shape[0]) if self.last_image is not None else (100, 100)
Displays a matplotlib figure
6259907e4c3428357761bd33
class Button(): <NEW_LINE> <INDENT> max = 100 <NEW_LINE> ramp_time = 0.5 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.last = False <NEW_LINE> self.value = 0 <NEW_LINE> <DEDENT> def update(self, now): <NEW_LINE> <INDENT> if now is not self.last: <NEW_LINE> <INDENT> self.last = now <NEW_LINE> self.value = 0 <NEW_LINE> if now: <NEW_LINE> <INDENT> self.start_press = time.time() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not now: <NEW_LINE> <INDENT> self.value = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if time.time() - self.start_press > self.ramp_time: <NEW_LINE> <INDENT> self.value = self.max <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> factor = (time.time() - self.start_press) / self.ramp_time <NEW_LINE> self.value = self.max * factor
Imitates joystick behavior by gradually increasing the value
6259907ef548e778e596d00b
class ADMMPeakFinder(CSDeconPeakFinder): <NEW_LINE> <INDENT> def __init__(self, parameters = None, **kwds): <NEW_LINE> <INDENT> super(ADMMPeakFinder, self).__init__(parameters = parameters, **kwds) <NEW_LINE> self.admm_iterations = parameters.getAttr("admm_iterations") <NEW_LINE> self.admm_lambda = parameters.getAttr("admm_lambda") <NEW_LINE> self.admm_number_z = parameters.getAttr("admm_number_z") <NEW_LINE> self.admm_rho = parameters.getAttr("admm_rho") <NEW_LINE> self.admm_threshold = parameters.getAttr("admm_threshold") <NEW_LINE> <DEDENT> def deconInit(self, image): <NEW_LINE> <INDENT> self.decon_object = admmDecon.ADMMDecon(image.shape, self.psf_object, self.admm_number_z, self.admm_rho) <NEW_LINE> <DEDENT> def deconvolve(self): <NEW_LINE> <INDENT> self.decon_object.decon(self.admm_iterations, self.admm_lambda) <NEW_LINE> <DEDENT> def getPeaks(self): <NEW_LINE> <INDENT> return self.decon_object.getPeaks(self.admm_threshold, self.margin)
ADMM deconvolution peak finding.
6259907e7cff6e4e811b74ba
class Measure(BessModule): <NEW_LINE> <INDENT> def __init__( self, offset: uint64 = ..., jitter_sample_prob: double = ..., latency_ns_max: uint64 = ..., latency_ns_resolution: uint32 = ..., name: str = ... ): <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def get_summary( self, clear: bool = ..., latency_percentiles: List[double] = ..., jitter_percentiles: List[double] = ... ) -> MeasureCommandGetSummaryResponse: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def clear( self ): <NEW_LINE> <INDENT> ...
measures packet latency (paired with Timestamp module) The measure module tracks latencies, packets per second, and other statistics. It should be paired with a Timestamp module, which attaches a timestamp to packets. The measure module will log how long (in nanoseconds) it has been for each packet it received since it was timestamped. This module is somewhat experimental and undergoing various changes. There is a test for the the Measure module in [`bessctl/module_tests/timestamp.py`](https://github.com/NetSys/bess/blob/master/bessctl/module_tests/timestamp.py). __Input Gates__: 1 __Output Gates__: 1
6259907e4428ac0f6e659fa8
class BufferedWriter(BaseWriter): <NEW_LINE> <INDENT> bufsize = 4096 <NEW_LINE> def __init__(self, raw, logger, bufsize = None): <NEW_LINE> <INDENT> super().__init__(raw, logger) <NEW_LINE> if bufsize: <NEW_LINE> <INDENT> self.bufsize = bufsize <NEW_LINE> <DEDENT> self.buffer = None <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if self.buffer is None: <NEW_LINE> <INDENT> self.buffer = io.BytesIO() <NEW_LINE> <DEDENT> self.buffer.write(data) <NEW_LINE> if self.buffer.tell() >= self.bufsize: <NEW_LINE> <INDENT> self.flush() <NEW_LINE> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> if self.buffer: <NEW_LINE> <INDENT> data = self.buffer.getvalue() <NEW_LINE> if data: <NEW_LINE> <INDENT> self.raw.write(data) <NEW_LINE> <DEDENT> self.buffer = None <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.flush() <NEW_LINE> self.raw.close()
A wrapper to buffer data to avoid small pieces of data.
6259907eaad79263cf430234
class Vbox(BaseScreen): <NEW_LINE> <INDENT> NAME = 'VirtualBox' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.logger = logging.getLogger() <NEW_LINE> self.logger.info('initialising screen...') <NEW_LINE> self.vbox = virtualbox.VirtualBox() <NEW_LINE> self.vm = self.vbox.machines[0] <NEW_LINE> self.vm_session = self.vm.create_session() <NEW_LINE> self.vm_res = self.vm_session.console.display.get_screen_resolution(0) <NEW_LINE> <DEDENT> def take_screen_shot(self): <NEW_LINE> <INDENT> png = self.vm_session.console.display.take_screen_shot_to_array( 0, self.vm_res[0], self.vm_res[1], virtualbox.library.BitmapFormat.png) <NEW_LINE> open('screenshot_vbox.png', 'wb').write(png) <NEW_LINE> return Image.open('screenshot_vbox.png')
Vbox screen provider
6259907e67a9b606de5477e3
class Classroom(object): <NEW_LINE> <INDENT> def __init__(self, ID): <NEW_LINE> <INDENT> self.teacher = None <NEW_LINE> self.subject = None <NEW_LINE> self.fee = None <NEW_LINE> self.free_places = None <NEW_LINE> self.ID = ID
This class represents a 'classroom-class' at the university. Attributes: 1. teacher : The teacher 2. subject : Subject being taught 3. fee : The fee to attend the class 4. free_places : The number of free places in the classroom.
6259907e23849d37ff852b33
class SpacedLinkExtension(markdown.Extension): <NEW_LINE> <INDENT> def extendMarkdown(self, md, md_globals): <NEW_LINE> <INDENT> md.inlinePatterns['link'] = markdown.inlinepatterns.LinkPattern(SPACED_LINK_RE, md) <NEW_LINE> md.inlinePatterns['reference'] = markdown.inlinepatterns.ReferencePattern(SPACED_REFERENCE_RE, md) <NEW_LINE> md.inlinePatterns['image_link'] = markdown.inlinepatterns.ImagePattern(SPACED_IMAGE_LINK_RE, md) <NEW_LINE> md.inlinePatterns['image_reference'] = markdown.inlinepatterns.ImageReferencePattern( SPACED_IMAGE_REFERENCE_RE, md)
An extension that supports links and images with additional whitespace.
6259907e5fc7496912d48fa8
class FastEncodingBuffer: <NEW_LINE> <INDENT> def __init__(self, encoding=None, errors="strict"): <NEW_LINE> <INDENT> self.data = collections.deque() <NEW_LINE> self.encoding = encoding <NEW_LINE> self.delim = "" <NEW_LINE> self.errors = errors <NEW_LINE> self.write = self.data.append <NEW_LINE> <DEDENT> def truncate(self): <NEW_LINE> <INDENT> self.data = collections.deque() <NEW_LINE> self.write = self.data.append <NEW_LINE> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> if self.encoding: <NEW_LINE> <INDENT> return self.delim.join(self.data).encode( self.encoding, self.errors ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.delim.join(self.data)
a very rudimentary buffer that is faster than StringIO, and supports unicode data.
6259907e4f88993c371f125f
class BBFileLikeObjectWriter(NSObject): <NEW_LINE> <INDENT> def initWithFileLikeObject_(self, fileobj): <NEW_LINE> <INDENT> self = super().init() <NEW_LINE> self.__fileobj = fileobj <NEW_LINE> return self <NEW_LINE> <DEDENT> initWithFileLikeObject_ = objc.selector(initWithFileLikeObject_, signature=b"@@:@") <NEW_LINE> def write_(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__fileobj.write(data) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> return data.length() <NEW_LINE> <DEDENT> write_ = objc.selector(write_, signature=b"i12@0:4@8")
Provides a suitable delegate class for the BBDelegatingOutputStream class in LightAquaBlue.framework. This basically provides a wrapper for a python file-like object so that it can be written to through a NSOutputStream.
6259907e63b5f9789fe86be2
class Topic(BrowserView): <NEW_LINE> <INDENT> def url(self, item): <NEW_LINE> <INDENT> context_url = "" <NEW_LINE> putils = getToolByName(self.context, 'plone_utils') <NEW_LINE> if putils.isDefaultPage(self.context): <NEW_LINE> <INDENT> context_url = self.context.aq_inner.aq_parent.absolute_url() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> context_url = self.context.absolute_url() <NEW_LINE> <DEDENT> item_url = item.getURL() <NEW_LINE> portal_url = getToolByName(self.context, 'portal_url')() <NEW_LINE> result_url = context_url + item_url[len(portal_url):] <NEW_LINE> return result_url <NEW_LINE> <DEDENT> def test(self, a, b , c): <NEW_LINE> <INDENT> if a: <NEW_LINE> <INDENT> return b <NEW_LINE> <DEDENT> return c
Lets render the target throw aquisition
6259907ef548e778e596d00d
class BaseDataLakeBucket(s3.Bucket): <NEW_LINE> <INDENT> def __init__(self, scope: core.Construct, deploy_env: Environment, layer: DataLakeLayer, **kwargs) -> None: <NEW_LINE> <INDENT> self.deploy_env = deploy_env <NEW_LINE> self.layer = layer <NEW_LINE> self.obj_name = f's3-contatolucas-{self.deploy_env.value}-data-lake-{self.layer.value}' <NEW_LINE> super().__init__( scope, self.obj_name, bucket_name=self.obj_name, block_public_access=self.default_block_public_access(), encryption=self.default_encryption(), versioned=True, **kwargs ) <NEW_LINE> self.set_default_lifecycle_rules() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_block_public_access(): <NEW_LINE> <INDENT> block_public_access = s3.BlockPublicAccess( block_public_acls=True, block_public_policy=True, ignore_public_acls=True, restrict_public_buckets=True ) <NEW_LINE> return block_public_access <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_encryption(): <NEW_LINE> <INDENT> encryption = s3.BucketEncryption(s3.BucketEncryption.S3_MANAGED) <NEW_LINE> return encryption <NEW_LINE> <DEDENT> def set_default_lifecycle_rules(self): <NEW_LINE> <INDENT> self.add_lifecycle_rule( abort_incomplete_multipart_upload_after=core.Duration.days(7), enabled=True ) <NEW_LINE> self.add_lifecycle_rule( noncurrent_version_transitions=[ s3.NoncurrentVersionTransition( storage_class=s3.StorageClass.INFREQUENT_ACCESS, transition_after=core.Duration.days(30) ), s3.NoncurrentVersionTransition( storage_class=s3.StorageClass.GLACIER, transition_after=core.Duration.days(60) ) ] ) <NEW_LINE> self.add_lifecycle_rule( noncurrent_version_expiration=core.Duration.days(360) )
Base class to create a data lake bucket
6259907e55399d3f05627f90
class CircularSection(SectionProperties): <NEW_LINE> <INDENT> r= 0.0 <NEW_LINE> def __init__(self,name,Rext,Rint=0): <NEW_LINE> <INDENT> super(CircularSection,self).__init__(name) <NEW_LINE> self.Rext= Rext <NEW_LINE> self.Rint=Rint <NEW_LINE> <DEDENT> def A(self): <NEW_LINE> <INDENT> return math.pi*(self.Rext*self.Rext-self.Rint*self.Rint) <NEW_LINE> <DEDENT> def getThickness(self): <NEW_LINE> <INDENT> return self.Rext-self.Rint <NEW_LINE> <DEDENT> def getAverageRadius(self): <NEW_LINE> <INDENT> return (self.Rext+self.Rint)/2.0 <NEW_LINE> <DEDENT> def getAverageDiameter(self): <NEW_LINE> <INDENT> return self.getAverageRadius*2.0 <NEW_LINE> <DEDENT> def getDiameter(self): <NEW_LINE> <INDENT> return 2.0*self.Rext <NEW_LINE> <DEDENT> def getExternalDiameter(self): <NEW_LINE> <INDENT> return 2.0*self.Rext <NEW_LINE> <DEDENT> def getInternalDiameter(self): <NEW_LINE> <INDENT> return 2.0*self.Rint <NEW_LINE> <DEDENT> def Iy(self): <NEW_LINE> <INDENT> return 1.0/4.0*math.pi*(self.Rext**4-self.Rint**4) <NEW_LINE> <DEDENT> def Iz(self): <NEW_LINE> <INDENT> return self.Iy() <NEW_LINE> <DEDENT> def J(self): <NEW_LINE> <INDENT> return 2*self.Iy()*self.torsionalStiffnessFactor <NEW_LINE> <DEDENT> def alphaY(self): <NEW_LINE> <INDENT> if self.Rint==0: <NEW_LINE> <INDENT> alpha=6.0/7.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> c=self.Rint/self.Rext <NEW_LINE> K=c/(1+c**2) <NEW_LINE> alpha=6/(7+20*K**2) <NEW_LINE> <DEDENT> return alpha <NEW_LINE> <DEDENT> def alphaZ(self): <NEW_LINE> <INDENT> return self.alphaY() <NEW_LINE> <DEDENT> def getTorsionalStiffness(self, G): <NEW_LINE> <INDENT> return G*self.J() <NEW_LINE> <DEDENT> def getShearStiffnessY(self, G): <NEW_LINE> <INDENT> retval= 0.0 <NEW_LINE> if(self.Rint==0): <NEW_LINE> <INDENT> retval= 32.0/37.0*G*self.A() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lmsg.error('getShearStiffness for tubes not implemented yet.') <NEW_LINE> <DEDENT> return retval <NEW_LINE> <DEDENT> def getShearStiffnessZ(self, G): <NEW_LINE> <INDENT> return self.getShearStiffnessY(G) <NEW_LINE> <DEDENT> def getContourPoints(self, nDiv= 100): <NEW_LINE> <INDENT> theta = np.linspace(0, 2*np.pi, nDiv) <NEW_LINE> retval= list() <NEW_LINE> r = np.sqrt(self.Rext) <NEW_LINE> if(self.Rint!=0): <NEW_LINE> <INDENT> lmsg.error('getContourPoints for tubes not implemented yet.') <NEW_LINE> <DEDENT> x1= r*np.cos(theta) <NEW_LINE> x2= r*np.sin(theta) <NEW_LINE> for x,y in zip(x1,x2): <NEW_LINE> <INDENT> retval.append(geom.Pos2d(x, y)) <NEW_LINE> <DEDENT> return retval
Geometric parameters of a circular or circular hollow section :ivar Rext: external radius :ivar Rint: internal radius (defaults to 0)
6259907e7cff6e4e811b74bc
class CountCalls(object): <NEW_LINE> <INDENT> __instances = {} <NEW_LINE> def __init__(self, f): <NEW_LINE> <INDENT> self.__f = f <NEW_LINE> self.__numcalls = 0 <NEW_LINE> CountCalls.__instances[f] = self <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.__numcalls += 1 <NEW_LINE> return self.__f(*args, **kwargs) <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> return CountCalls.__instances[self.__f].__numcalls <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def counts(): <NEW_LINE> <INDENT> return dict([(f.__name__, CountCalls.__instances[f].__numcalls) for f in CountCalls.__instances])
Decorator that keeps track of the number of times a function is called.
6259907e4527f215b58eb6de
class CustomArgParser(argparse.ArgumentParser): <NEW_LINE> <INDENT> def error(self, message): <NEW_LINE> <INDENT> sys.stderr.write('error: %s\n' % message) <NEW_LINE> self.print_help() <NEW_LINE> sys.exit(2)
Class overriden
6259907e92d797404e38989a
class XPathSelectorList(list): <NEW_LINE> <INDENT> def __getslice__(self, i, j): <NEW_LINE> <INDENT> return XPathSelectorList(list.__getslice__(self, i, j)) <NEW_LINE> <DEDENT> def select(self, xpath): <NEW_LINE> <INDENT> return XPathSelectorList(flatten([x.select(xpath) for x in self])) <NEW_LINE> <DEDENT> def re(self, regex): <NEW_LINE> <INDENT> return flatten([x.re(regex) for x in self]) <NEW_LINE> <DEDENT> def extract(self): <NEW_LINE> <INDENT> return [x.extract() if isinstance(x, XPathSelector) else x for x in self] <NEW_LINE> <DEDENT> def extract_unquoted(self): <NEW_LINE> <INDENT> return [x.extract_unquoted() if isinstance(x, XPathSelector) else x for x in self] <NEW_LINE> <DEDENT> @deprecated(use_instead='XPathSelectorList.select') <NEW_LINE> def x(self, xpath): <NEW_LINE> <INDENT> return self.select(xpath)
List of XPathSelector objects
6259907ef548e778e596d00e
class BaseFilter(object): <NEW_LINE> <INDENT> def _filter_one(self, obj, pod_group): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def filter_all(self, filter_obj_list, pod_group): <NEW_LINE> <INDENT> for obj in filter_obj_list: <NEW_LINE> <INDENT> if self._filter_one(obj, pod_group): <NEW_LINE> <INDENT> yield obj
Base class for all pod filter classes.
6259907e76e4537e8c3f0ffc
class DataExport(FolioApi): <NEW_LINE> <INDENT> def set_export(self, export: dict): <NEW_LINE> <INDENT> return self.call("POST", "/data-export/export", data=export) <NEW_LINE> <DEDENT> def set_quickExport(self, quickExport: dict): <NEW_LINE> <INDENT> return self.call("POST", "/data-export/quick-export", data=quickExport) <NEW_LINE> <DEDENT> def get_jobExecutions(self, **kwargs): <NEW_LINE> <INDENT> return self.call("GET", "/data-export/job-executions", query=kwargs) <NEW_LINE> <DEDENT> def delete_jobExecution(self, jobExecutionsId: str): <NEW_LINE> <INDENT> return self.call("DELETE", f"/data-export/job-executions/{jobExecutionsId}") <NEW_LINE> <DEDENT> def get_download(self, jobExecutionId: str, exportFileId: str): <NEW_LINE> <INDENT> return self.call("GET", f"/data-export/job-executions/{jobExecutionId}/download/{exportFileId}") <NEW_LINE> <DEDENT> def set_expireJob(self): <NEW_LINE> <INDENT> return self.call("POST", "/data-export/expire-jobs") <NEW_LINE> <DEDENT> def set_cleanUpFile(self): <NEW_LINE> <INDENT> return self.call("POST", "/data-export/clean-up-files")
Data export API API for exporting MARC records
6259907e4f88993c371f1260
class projfeat3d(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_planes, out_planes, stride): <NEW_LINE> <INDENT> super(projfeat3d, self).__init__() <NEW_LINE> self.stride = stride <NEW_LINE> self.conv1 = nn.Conv2d(in_planes, out_planes, (1, 1), padding=( 0, 0), stride=stride[:2], bias=False) <NEW_LINE> self.bn = nn.BatchNorm2d(out_planes) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> b, c, d, h, w = x.size() <NEW_LINE> x = self.conv1(x.view(b, c, d, h * w)) <NEW_LINE> x = self.bn(x) <NEW_LINE> x = x.view(b, -1, d // self.stride[0], h, w) <NEW_LINE> return x
Turn 3d projection into 2d projection
6259907e1f5feb6acb164676
class TravelOptionUpdateSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> drive = serializers.JSONField(validators=[validate_drive_option]) <NEW_LINE> transit = serializers.JSONField(validators=[validate_transit_option]) <NEW_LINE> uber = serializers.JSONField(validators=[validate_uber_option]) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = TravelOption <NEW_LINE> fields = ('drive', 'transit', 'uber',)
We use this serializer to write calculated travel options back to database.
6259907e4c3428357761bd37
class BaseConverter(object): <NEW_LINE> <INDENT> netjson_key = None <NEW_LINE> intermediate_key = None <NEW_LINE> def __init__(self, backend): <NEW_LINE> <INDENT> self.backend = backend <NEW_LINE> self.netjson = backend.config <NEW_LINE> self.intermediate_data = backend.intermediate_data <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def should_run_forward(cls, config): <NEW_LINE> <INDENT> return cls.netjson_key in config <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def should_run_backward(cls, intermediate_data): <NEW_LINE> <INDENT> return cls.intermediate_key in intermediate_data <NEW_LINE> <DEDENT> def type_cast(self, item, schema=None): <NEW_LINE> <INDENT> if schema is None: <NEW_LINE> <INDENT> schema = self._schema <NEW_LINE> <DEDENT> properties = schema['properties'] <NEW_LINE> for key, value in item.items(): <NEW_LINE> <INDENT> if key not in properties: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> json_type = properties[key]['type'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> json_type = None <NEW_LINE> <DEDENT> if isinstance(json_type, list) and json_type: <NEW_LINE> <INDENT> json_type = json_type[0] <NEW_LINE> <DEDENT> if json_type == 'integer' and not isinstance(value, int): <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> elif json_type == 'boolean' and not isinstance(value, bool): <NEW_LINE> <INDENT> value = value == '1' <NEW_LINE> <DEDENT> item[key] = value <NEW_LINE> <DEDENT> return item <NEW_LINE> <DEDENT> def get_copy(self, dict_, key, default=None): <NEW_LINE> <INDENT> return get_copy(dict_, key, default) <NEW_LINE> <DEDENT> def sorted_dict(self, dict_): <NEW_LINE> <INDENT> return sorted_dict(dict_) <NEW_LINE> <DEDENT> def to_intermediate(self): <NEW_LINE> <INDENT> result = OrderedDict() <NEW_LINE> netjson = get_copy(self.netjson, self.netjson_key) <NEW_LINE> if isinstance(netjson, list): <NEW_LINE> <INDENT> for index, block in enumerate(netjson): <NEW_LINE> <INDENT> result = self.to_intermediate_loop(block, result, index + 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result = self.to_intermediate_loop(netjson, result) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def to_intermediate_loop(self, block, result, index=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def to_netjson(self, remove_block=True): <NEW_LINE> <INDENT> result = OrderedDict() <NEW_LINE> intermediate_data = self.to_netjson_clean( self.intermediate_data[self.intermediate_key] ) <NEW_LINE> for index, block in enumerate(intermediate_data): <NEW_LINE> <INDENT> if self.should_skip_block(block): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if remove_block: <NEW_LINE> <INDENT> self.intermediate_data[self.intermediate_key].remove(block) <NEW_LINE> <DEDENT> result = self.to_netjson_loop(block, result, index + 1) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def to_netjson_clean(self, intermediate_data): <NEW_LINE> <INDENT> return list(intermediate_data) <NEW_LINE> <DEDENT> def to_netjson_loop(self, block, result, index=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def should_skip_block(self, block): <NEW_LINE> <INDENT> return not block
Base Converter class Converters are used to convert a configuration dictionary which represent a NetJSON object to a data structure that can be easily rendered as the final router configuration and vice versa.
6259907ee1aae11d1e7cf550
class Job: <NEW_LINE> <INDENT> def __init__(self,files=None): <NEW_LINE> <INDENT> self.solved = False <NEW_LINE> self.unloaded = False <NEW_LINE> self.config = {} <NEW_LINE> self.solver = SimFDTD() <NEW_LINE> self.unload() <NEW_LINE> self.load(files) <NEW_LINE> <DEDENT> def load(self, files = None): <NEW_LINE> <INDENT> if files: <NEW_LINE> <INDENT> if isinstance(files,str): <NEW_LINE> <INDENT> files = [files] <NEW_LINE> <DEDENT> for file in files: <NEW_LINE> <INDENT> with open(file) as f: <NEW_LINE> <INDENT> newsetup = yaml.load(f) <NEW_LINE> self.config = {**self.config, **newsetup} <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.unloaded = False <NEW_LINE> <DEDENT> def unload(self): <NEW_LINE> <INDENT> with open('default.json') as fp: <NEW_LINE> <INDENT> self.config = yaml.load(fp) <NEW_LINE> <DEDENT> self.unloaded = True <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> if isinstance(self.config['config']['processor'],str): <NEW_LINE> <INDENT> self.config['config']['processor'] = eval(self.config['config']['processor']) <NEW_LINE> <DEDENT> self.solver.setup(**self.config['config']) <NEW_LINE> for source in self.config['config']['sources']: <NEW_LINE> <INDENT> if isinstance(self.config['config']['sources'][source],str): <NEW_LINE> <INDENT> self.config['config']['sources'][source] = eval(self.config['config']['sources'][source]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def solve(self): <NEW_LINE> <INDENT> self.solver.run() <NEW_LINE> self.solved = True <NEW_LINE> <DEDENT> def unsolve(self): <NEW_LINE> <INDENT> self.solved = False
Specifies, configures, and runs a simulation.
6259907e5166f23b2e244e55
class SingletonCreator(type): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def singleton_decorator(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> if args[0].sl_init: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return wrapper <NEW_LINE> <DEDENT> def __new__(mcs, name, bases, attrs): <NEW_LINE> <INDENT> if not (len(bases) == 1 and object in bases): <NEW_LINE> <INDENT> if '__init__' in attrs: <NEW_LINE> <INDENT> attrs['__init__'] = mcs.singleton_decorator(attrs['__init__']) <NEW_LINE> <DEDENT> <DEDENT> return super(SingletonCreator, mcs).__new__(mcs, name, bases, attrs)
This metaclass wraps __init__ method of created class with singleton_decorator. This ensures that it's impossible to mess up the instance for example by calling __init__ with getattr.
6259907ebf627c535bcb2f4f
class UserData(Resource): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @auth.login_required <NEW_LINE> def get(): <NEW_LINE> <INDENT> return util.load_user_data(g.user.user_id) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @auth.login_required <NEW_LINE> def put(): <NEW_LINE> <INDENT> retval = defaultdict(lambda: defaultdict(dict)) <NEW_LINE> json_data = util.load_user_data(g.user.user_id) <NEW_LINE> for section, data in request.get_json().items(): <NEW_LINE> <INDENT> if section == 'colors': <NEW_LINE> <INDENT> for name, color in data.items(): <NEW_LINE> <INDENT> if util.is_color(color): <NEW_LINE> <INDENT> if name in json_data['colors']: <NEW_LINE> <INDENT> retval['colors']['modified'][name] = { 'old': json_data['colors'][name], 'new': color } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> retval['colors']['added'][name] = color <NEW_LINE> <DEDENT> json_data['colors'][name] = color <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif section == 'color_lists': <NEW_LINE> <INDENT> for name, color_list in data.items(): <NEW_LINE> <INDENT> if all([util.is_color(color) for color in color_list]): <NEW_LINE> <INDENT> if name in json_data['color_lists']: <NEW_LINE> <INDENT> retval['color_lists']['modified'][name] = { 'old': json_data['color_lists'][name], 'new': color_list } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> retval['color_lists']['added'][name] = color_list <NEW_LINE> <DEDENT> json_data['color_lists'][name] = color_list <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> with open(f'{USERS_DIR}/{g.user.user_id}.json', 'w') as data_file: <NEW_LINE> <INDENT> json.dump(json_data, data_file, indent=4) <NEW_LINE> <DEDENT> return retval <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @auth.login_required <NEW_LINE> def delete(): <NEW_LINE> <INDENT> retval = defaultdict(lambda: defaultdict(dict)) <NEW_LINE> json_data = util.load_user_data(g.user.user_id) <NEW_LINE> for section, data in request.get_json().items(): <NEW_LINE> <INDENT> for name in data: <NEW_LINE> <INDENT> if name in json_data[section]: <NEW_LINE> <INDENT> retval[section]['deleted'][name] = json_data[section][name] <NEW_LINE> del json_data[section][name] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> with open(f'{USERS_DIR}/{g.user.user_id}.json', 'w') as data_file: <NEW_LINE> <INDENT> json.dump(json_data, data_file, indent=4) <NEW_LINE> <DEDENT> return retval
Manage a user's saved data, i.e. colors and color lists. Requires authentication.
6259907e4527f215b58eb6df
class SemesterResultDetail(DetailView): <NEW_LINE> <INDENT> model = Result <NEW_LINE> template_name = "cbt/semester_result.html" <NEW_LINE> context_object_name = "result" <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return get_object_or_404( Result, user=self.request.user.userdetail, level=self.kwargs.get('level', 0), semester=self.kwargs.get('semester', 0) )
Returns the matched semester result.
6259907e92d797404e38989b
class ActionExecutionSchedulingQueueItemDB( stormbase.StormFoundationDB, stormbase.ChangeRevisionFieldMixin ): <NEW_LINE> <INDENT> RESOURCE_TYPE = ResourceType.EXECUTION_REQUEST <NEW_LINE> UID_FIELDS = ["id"] <NEW_LINE> liveaction_id = me.StringField( required=True, help_text="Foreign key to the LiveActionDB which is to be scheduled", ) <NEW_LINE> action_execution_id = me.StringField( help_text="Foreign key to the ActionExecutionDB which is to be scheduled" ) <NEW_LINE> original_start_timestamp = ComplexDateTimeField( default=date_utils.get_datetime_utc_now, help_text="The timestamp when the liveaction was created and originally be scheduled to " "run.", ) <NEW_LINE> scheduled_start_timestamp = ComplexDateTimeField( default=date_utils.get_datetime_utc_now, help_text="The timestamp when liveaction is scheduled to run.", ) <NEW_LINE> delay = me.IntField() <NEW_LINE> handling = me.BooleanField( default=False, help_text="Flag indicating if this item is currently being handled / " "processed by a scheduler service", ) <NEW_LINE> meta = { "indexes": [ {"fields": ["action_execution_id"], "name": "ac_exc_id"}, {"fields": ["liveaction_id"], "name": "lv_ac_id"}, {"fields": ["original_start_timestamp"], "name": "orig_s_ts"}, {"fields": ["scheduled_start_timestamp"], "name": "schd_s_ts"}, ] }
A model which represents a request for execution to be scheduled. Those models are picked up by the scheduler and scheduled to be ran by an action runner.
6259907e7b180e01f3e49da4
class DgtCn(DgtDisplayIface): <NEW_LINE> <INDENT> def __init__(self, dgtboard: DgtBoard): <NEW_LINE> <INDENT> super(DgtCn, self).__init__(dgtboard) <NEW_LINE> MsgDisplay.show(Message.DGT_CLOCK_VERSION(main=2, sub=2, dev='i2c', text=None)) <NEW_LINE> <DEDENT> def display_text_on_clock(self, message): <NEW_LINE> <INDENT> print('text ', message.l) <NEW_LINE> return True <NEW_LINE> <DEDENT> def display_move_on_clock(self, message): <NEW_LINE> <INDENT> print('move ', message.move) <NEW_LINE> return True <NEW_LINE> <DEDENT> def display_time_on_clock(self, message): <NEW_LINE> <INDENT> if self.get_name() not in message.devs: <NEW_LINE> <INDENT> logging.debug('ignored endText - devs: %s', message.devs) <NEW_LINE> return True <NEW_LINE> <DEDENT> print('time ', message.devs) <NEW_LINE> return True <NEW_LINE> <DEDENT> def light_squares_on_revelation(self, uci_move: str): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def clear_light_on_revelation(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def _resume_clock(self, side): <NEW_LINE> <INDENT> print('rsume') <NEW_LINE> return True <NEW_LINE> <DEDENT> def stop_clock(self, devs: set): <NEW_LINE> <INDENT> print('stop ', devs) <NEW_LINE> return True <NEW_LINE> <DEDENT> def start_clock(self, side: ClockSide, devs: set): <NEW_LINE> <INDENT> print('start', devs) <NEW_LINE> return True <NEW_LINE> <DEDENT> def set_clock(self, time_left: int, time_right: int, devs: set): <NEW_LINE> <INDENT> print('set ', devs) <NEW_LINE> return True <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return 'i2c'
Handle the DgtXL/3000 communication.
6259907e3317a56b869bf285
class PailgunHandler(PailgunHandlerBase): <NEW_LINE> <INDENT> def _run_pants(self, sock, arguments, environment): <NEW_LINE> <INDENT> runner = self.server.runner_factory(sock, arguments, environment) <NEW_LINE> runner.run() <NEW_LINE> <DEDENT> def handle(self): <NEW_LINE> <INDENT> _, _, arguments, environment = NailgunProtocol.parse_request(self.request) <NEW_LINE> arguments.insert(0, './pants') <NEW_LINE> self.logger.info('handling pailgun request: `{}`'.format(' '.join(arguments))) <NEW_LINE> self.logger.debug('pailgun request environment: %s', environment) <NEW_LINE> NailgunProtocol.send_start_reading_input(self.request) <NEW_LINE> with maybe_profiled(environment.get('PANTSD_PROFILE')): <NEW_LINE> <INDENT> self._run_pants(self.request, arguments, environment) <NEW_LINE> <DEDENT> <DEDENT> def handle_error(self, exc=None): <NEW_LINE> <INDENT> if exc: <NEW_LINE> <INDENT> NailgunProtocol.write_chunk(self.request, ChunkType.STDERR, traceback.format_exc()) <NEW_LINE> <DEDENT> NailgunProtocol.write_chunk(self.request, ChunkType.EXIT, '1')
A nailgun protocol handler for use with forking, SocketServer-based servers.
6259907e656771135c48ad6f
class SpotifyPlaylist: <NEW_LINE> <INDENT> SPLIT_MAX = 100 <NEW_LINE> def __init__( self, playlist_id: str, session: spotipy.Spotify, username: str, ): <NEW_LINE> <INDENT> self._playlist_id = playlist_id <NEW_LINE> self._session = session <NEW_LINE> self._username = username <NEW_LINE> <DEDENT> def _get_current_songs(self) -> List[spotify_song.SpotifySong]: <NEW_LINE> <INDENT> results = self._session.user_playlist_tracks( self._username, playlist_id=self._playlist_id ) <NEW_LINE> songs = results["items"] <NEW_LINE> while results["next"]: <NEW_LINE> <INDENT> results = self._session.next(results) <NEW_LINE> songs.extend(results["items"]) <NEW_LINE> <DEDENT> return [spotify_song.SpotifySong(self._session, song["track"]["id"]) for song in songs] <NEW_LINE> <DEDENT> def _remove_current_songs_not_in_songs_to_load( self, songs_to_load: Set[spotify_song.SpotifySong], ) -> None: <NEW_LINE> <INDENT> playlist_current_songs = self._get_current_songs() <NEW_LINE> for current_song in playlist_current_songs: <NEW_LINE> <INDENT> if current_song not in songs_to_load: <NEW_LINE> <INDENT> self._session.user_playlist_remove_all_occurrences_of_tracks( self._username, playlist_id=self._playlist_id, tracks=[current_song.song_id], ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update( self, songs_to_load: Set[spotify_song.SpotifySong], ) -> None: <NEW_LINE> <INDENT> self._remove_current_songs_not_in_songs_to_load(songs_to_load) <NEW_LINE> playlist_current_songs = self._get_current_songs() <NEW_LINE> final_songs_to_load: List[spotify_song.SpotifySong] = [] <NEW_LINE> for song_to_load in songs_to_load: <NEW_LINE> <INDENT> if song_to_load not in playlist_current_songs: <NEW_LINE> <INDENT> final_songs_to_load.append(song_to_load) <NEW_LINE> <DEDENT> <DEDENT> if final_songs_to_load: <NEW_LINE> <INDENT> for i in range(0, len(final_songs_to_load), self.SPLIT_MAX): <NEW_LINE> <INDENT> self._session.user_playlist_add_tracks( self._username, playlist_id=self._playlist_id, tracks=[song.song_id for song in final_songs_to_load[i: i + self.SPLIT_MAX]], )
Class that represents a Spotify playlist.
6259907e5166f23b2e244e57
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> user_id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> user_name = db.Column(db.String(50), nullable=False) <NEW_LINE> password = db.Column(db.String(25), nullable=False) <NEW_LINE> posts = db.relationship("Post") <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return """<user_id = {}, user_name = {}""".format(self.user_id, self.user_name)
user information
6259907e3346ee7daa3383a1
class IntentsServicer(object): <NEW_LINE> <INDENT> def ListIntents(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def GetIntent(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def CreateIntent(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def UpdateIntent(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def DeleteIntent(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def BatchUpdateIntents(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def BatchDeleteIntents(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
Manages agent intents. Refer to the [Dialogflow documentation](https://dialogflow.com/docs/intents) for more details about agent intents. #
6259907e7c178a314d78e92a
class ApplicationCall(Annotation): <NEW_LINE> <INDENT> def __init__(self, application): <NEW_LINE> <INDENT> self.application = application <NEW_LINE> super(ApplicationCall, self).__init__() <NEW_LINE> <DEDENT> def add_auxiliary_variable(self, variable, roles=None, name=None): <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> variable.name = _variable_name( self.application.brick.name, self.application.name, name) <NEW_LINE> variable.tag.name = name <NEW_LINE> name = None <NEW_LINE> <DEDENT> add_annotation(variable, self.application.brick) <NEW_LINE> return super(ApplicationCall, self).add_auxiliary_variable( variable, roles, name)
A link between the variable tags and bricks. The application call can be used to attach to an apply call auxiliary variables (e.g. monitors or regularizers) that do not form part of the main computation graph. The application call object is created before the call to the application method and can be accessed by specifying an application_call argument. Also see :class:`.Annotation`. Parameters ---------- application : :class:`BoundApplication` instance The bound application (i.e. belong to a brick instance) object being called Examples -------- >>> class Foo(Brick): ... @application ... def apply(self, x, application_call): ... application_call.add_auxiliary_variable(x.mean()) ... return x + 1 >>> x = tensor.vector() >>> y = Foo().apply(x) >>> from blocks.filter import get_application_call >>> get_application_call(y) # doctest: +ELLIPSIS <blocks.bricks.base.ApplicationCall object at ...>
6259907eaad79263cf430239
class TestWordsMethods: <NEW_LINE> <INDENT> @pytest.fixture(scope='function', autouse=True) <NEW_LINE> def setup_class(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_read_file(self): <NEW_LINE> <INDENT> assert read_file("data/projects/words/words.txt") == {3: 1294, 4: 4994, 5: 5757} <NEW_LINE> <DEDENT> def test_distance(self): <NEW_LINE> <INDENT> w1 = 'Hello' <NEW_LINE> w2 = 'Hello' <NEW_LINE> assert distance(w1, w2) == 0 <NEW_LINE> w1 = 'Hello' <NEW_LINE> w2 = 'Jello' <NEW_LINE> assert distance(w1, w2) == 1 <NEW_LINE> w1 = 'Hello' <NEW_LINE> w2 = 'Happy' <NEW_LINE> assert distance(w1, w2) == 4 <NEW_LINE> <DEDENT> def test_distance_all(self): <NEW_LINE> <INDENT> word = 'Hello' <NEW_LINE> all_words = ['Jello', 'Happy'] <NEW_LINE> used_words = [] <NEW_LINE> assert distance_all(word, all_words, used_words) == ['Jello'] <NEW_LINE> word = 'Hello' <NEW_LINE> all_words = ['Jello', 'Happy'] <NEW_LINE> used_words = ['Jello'] <NEW_LINE> assert distance_all(word, all_words, used_words) == []
Testing module words
6259907e66673b3332c31e7f
class dimension(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.associatedUnits = {} <NEW_LINE> <DEDENT> def addUnit(self, unit, conversion): <NEW_LINE> <INDENT> self.associatedUnits.update({unit:conversion}) <NEW_LINE> <DEDENT> def convert(self, sourceUnit, targetUnit): <NEW_LINE> <INDENT> if sourceUnit in self.associatedUnits: <NEW_LINE> <INDENT> baseFactor = self.associatedUnits[sourceUnit] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise errors.UnitConversionError("Source unit not found in dimension") <NEW_LINE> return False <NEW_LINE> <DEDENT> if targetUnit in self.associatedUnits: <NEW_LINE> <INDENT> targetFactor = self.associatedUnits[targetUnit] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise errors.unitConversionError("Target unit not found in dimension") <NEW_LINE> return False <NEW_LINE> <DEDENT> value = float(baseFactor)/float(targetFactor) <NEW_LINE> units = unitDict({targetUnit:1}) <NEW_LINE> return dFloat(value, units)
This class defines a dimension such as distance or time.
6259907e4428ac0f6e659fae
@base.gf2n <NEW_LINE> @base.vectorize <NEW_LINE> class ldms(base.DirectMemoryInstruction, base.ReadMemoryInstruction): <NEW_LINE> <INDENT> __slots__ = ["code"] <NEW_LINE> code = base.opcodes['LDMS'] <NEW_LINE> arg_format = ['sw','int'] <NEW_LINE> def execute(self): <NEW_LINE> <INDENT> self.args[0].value = program.mem_s[self.args[1]]
Assigns register $s_i$ the value in memory \verb+S[n]+.
6259907e97e22403b383c980
class PaymentAPI(ShopAPI): <NEW_LINE> <INDENT> def confirm_payment(self, order, amount, transaction_id, payment_method, save=True): <NEW_LINE> <INDENT> OrderPayment.objects.create( order=order, amount=Decimal(amount), transaction_id=transaction_id, payment_method=payment_method) <NEW_LINE> if save and self.is_order_paid(order): <NEW_LINE> <INDENT> order.status = Order.COMPLETED <NEW_LINE> order.save() <NEW_LINE> try: <NEW_LINE> <INDENT> cart = Cart.objects.get(pk=order.cart_pk) <NEW_LINE> cart.empty() <NEW_LINE> <DEDENT> except Cart.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> completed.send(sender=self, order=order) <NEW_LINE> <DEDENT> <DEDENT> def cancel_payment(self, order, amount, payment_method, save=True): <NEW_LINE> <INDENT> if save: <NEW_LINE> <INDENT> order.status= Order.CANCELLED <NEW_LINE> order.save() <NEW_LINE> <DEDENT> cancelled.send(sender=self, order=order) <NEW_LINE> <DEDENT> def get_finished_url(self): <NEW_LINE> <INDENT> return reverse('thank_you_for_your_order') <NEW_LINE> <DEDENT> def get_cancel_url(self): <NEW_LINE> <INDENT> return reverse('payment_error')
This object's purpose is to expose an API to the shop system. Ideally, shops (django SHOP or others) should implement this API, so that payment plugins are interchangeable between systems. This implementation is the interface reference for django SHOP Don't forget that since plenty of methods are common to both PaymentAPI and ShippingAPI(), they are defined in the ShopAPI base class!
6259907e1b99ca4002290275
class Session(ndb.Model): <NEW_LINE> <INDENT> sessionName = ndb.StringProperty(required=True) <NEW_LINE> conferenceName = ndb.StringProperty() <NEW_LINE> highlights = ndb.StringProperty() <NEW_LINE> speakWSK = ndb.StringProperty() <NEW_LINE> lastName = ndb.StringProperty() <NEW_LINE> duration = ndb.IntegerProperty(repeated=True) <NEW_LINE> typeOfSession = ndb.StringProperty("TypeOfSession", default='NOT_SPECIFIED') <NEW_LINE> location = ndb.StringProperty() <NEW_LINE> sessionDate = ndb.DateProperty() <NEW_LINE> startTime = ndb.TimeProperty() <NEW_LINE> maxRegistered = ndb.IntegerProperty() <NEW_LINE> spotsAvailable = ndb.IntegerProperty()
Session -- Session object
6259907e76e4537e8c3f1000
class VectorAxesInsn(AxisMaskInsn): <NEW_LINE> <INDENT> def __init__(self, line, tab, axes): <NEW_LINE> <INDENT> super(VectorAxesInsn, self).__init__(line, tab, axes) <NEW_LINE> self.set_upper_8(0x0B) <NEW_LINE> self.set_lower_16(0)
Vector axes instructions VECTOR AXES|AXIS ARE|IS axes
6259907e1f5feb6acb16467a
class EmployeeForm(FlaskForm): <NEW_LINE> <INDENT> name = StringField( "Name", validators=[DataRequired(), Length(min=2, max=50)] ) <NEW_LINE> date_of_birth = DateField("Date of Birth", validators=[DataRequired()]) <NEW_LINE> salary = DecimalField("Salary", validators=[DataRequired()]) <NEW_LINE> department_id = SelectField("Department", coerce=int, choices=[ (department.id, department.name) for department in Department.query.order_by(Department.id).all()]) <NEW_LINE> submit = SubmitField("Submit") <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> EmployeeForm.department_id = SelectField("Department", coerce=int, choices=[ (department.id, department.name) for department in Department.query.order_by(Department.id).all()])
Form for adding and updating employees.
6259907e3346ee7daa3383a2
class RichardViewsTest(ViewTestCase): <NEW_LINE> <INDENT> def test_home(self): <NEW_LINE> <INDENT> url = reverse('home') <NEW_LINE> self.assert_HTTP_200(url) <NEW_LINE> self.assert_used_templates(url, templates=['home.html'])
Tests for the project's views.
6259907e4a966d76dd5f0967
class Cat(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> print('%s被创建' % self.name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '我是小猫: %s' % self.name
猫类
6259907e099cdd3c6367613a
class DispatchingApp: <NEW_LINE> <INDENT> def __init__(self, loader, use_eager_loading=None): <NEW_LINE> <INDENT> self.loader = loader <NEW_LINE> self._app = None <NEW_LINE> self._lock = Lock() <NEW_LINE> self._bg_loading_exc = None <NEW_LINE> if use_eager_loading is None: <NEW_LINE> <INDENT> use_eager_loading = os.environ.get("WERKZEUG_RUN_MAIN") != "true" <NEW_LINE> <DEDENT> if use_eager_loading: <NEW_LINE> <INDENT> self._load_unlocked() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._load_in_background() <NEW_LINE> <DEDENT> <DEDENT> def _load_in_background(self): <NEW_LINE> <INDENT> def _load_app(): <NEW_LINE> <INDENT> __traceback_hide__ = True <NEW_LINE> with self._lock: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._load_unlocked() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self._bg_loading_exc = e <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> t = Thread(target=_load_app, args=()) <NEW_LINE> t.start() <NEW_LINE> <DEDENT> def _flush_bg_loading_exception(self): <NEW_LINE> <INDENT> __traceback_hide__ = True <NEW_LINE> exc = self._bg_loading_exc <NEW_LINE> if exc is not None: <NEW_LINE> <INDENT> self._bg_loading_exc = None <NEW_LINE> raise exc <NEW_LINE> <DEDENT> <DEDENT> def _load_unlocked(self): <NEW_LINE> <INDENT> __traceback_hide__ = True <NEW_LINE> self._app = rv = self.loader() <NEW_LINE> self._bg_loading_exc = None <NEW_LINE> return rv <NEW_LINE> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> __traceback_hide__ = True <NEW_LINE> if self._app is not None: <NEW_LINE> <INDENT> return self._app(environ, start_response) <NEW_LINE> <DEDENT> self._flush_bg_loading_exception() <NEW_LINE> with self._lock: <NEW_LINE> <INDENT> if self._app is not None: <NEW_LINE> <INDENT> rv = self._app <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rv = self._load_unlocked() <NEW_LINE> <DEDENT> return rv(environ, start_response)
Special application that dispatches to a Flask application which is imported by name in a background thread. If an error happens it is recorded and shown as part of the WSGI handling which in case of the Werkzeug debugger means that it shows up in the browser.
6259907e283ffb24f3cf5321
class Quotas: <NEW_LINE> <INDENT> def __init__(self, total, total_r): <NEW_LINE> <INDENT> self.init = {'total': total, 'total_r': total_r, 'stm_max': total // 2, 'stm_max_r': total_r // 2, 'Mon': 2, 'Tue': 2, 'Wed': 2, 'Thu': 2, 'Fri': 2} <NEW_LINE> self.reset_quotas() <NEW_LINE> <DEDENT> def reset_quotas(self): <NEW_LINE> <INDENT> self.current = {} <NEW_LINE> for key, value in self.init.items(): <NEW_LINE> <INDENT> self.current[key] = value <NEW_LINE> <DEDENT> <DEDENT> def test_schedule(self, schedule_to_test): <NEW_LINE> <INDENT> if self.current[schedule_to_test.day] > 0: <NEW_LINE> <INDENT> if schedule_to_test.category == 'accueil': <NEW_LINE> <INDENT> return True if self.current['total'] > 0 else False <NEW_LINE> <DEDENT> if schedule_to_test.category == 'stm': <NEW_LINE> <INDENT> return True if self.current['stm_max'] > 0 and self.current['total'] > 0 else False <NEW_LINE> <DEDENT> if schedule_to_test.category == 'accueil_r': <NEW_LINE> <INDENT> return True if self.current['total_r'] > 0 else False <NEW_LINE> <DEDENT> if schedule_to_test.category == 'stm_r': <NEW_LINE> <INDENT> return True if self.current['stm_max_r'] > 0 and self.current['total_r'] > 0 else False <NEW_LINE> <DEDENT> <DEDENT> return False
Classe représentant les quotas des guichetiers.
6259907e67a9b606de5477e7
class CustomAppIndexDashboard(AppIndexDashboard): <NEW_LINE> <INDENT> title = '' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> AppIndexDashboard.__init__(self, *args, **kwargs) <NEW_LINE> self.children += [ modules.ModelList(self.app_title, self.models), modules.RecentActions( _('Recent Actions'), include_list=self.get_app_content_types(), limit=5 ) ] <NEW_LINE> <DEDENT> def init_with_context(self, context): <NEW_LINE> <INDENT> return super(CustomAppIndexDashboard, self).init_with_context(context)
Custom app index dashboard for wysiweb.
6259907ed268445f2663a89f
class Key(object): <NEW_LINE> <INDENT> NOTHING = 0 <NEW_LINE> SHARP = 1 <NEW_LINE> FLAT = 2 <NEW_LINE> NATURAL = 3 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._semiMap = {} <NEW_LINE> self.major(Pitch()) <NEW_LINE> <DEDENT> def major(self, pitch): <NEW_LINE> <INDENT> mySemiList = [ self.NOTHING, self.SHARP, self.NOTHING, self.SHARP, self.NOTHING, self.NOTHING, self.SHARP, self.NOTHING, self.SHARP, self.NOTHING, self.SHARP, ] <NEW_LINE> <DEDENT> def harmonicMinor(self, pitch): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def naturalMinor(self, pitch): <NEW_LINE> <INDENT> pass
Class used for calculating the accidentals required for each note.
6259907e23849d37ff852b3b
class ScriptFile(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> super(ScriptFile, self).__init__() <NEW_LINE> self._path = path <NEW_LINE> self.name = os.path.basename(path) <NEW_LINE> self.version = None <NEW_LINE> <DEDENT> def ReadVersion(self): <NEW_LINE> <INDENT> with open(self._path, 'rb') as file_object: <NEW_LINE> <INDENT> for line in file_object.readlines(): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if line.startswith(b'# Version: '): <NEW_LINE> <INDENT> _, _, version = line.rpartition(b'# Version: ') <NEW_LINE> self.version = version.decode(u'ascii') <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
Class that defines a script or configuration file. Attributes: name (str): name. version (str): version.
6259907e76e4537e8c3f1002
class IDataConversionMonitor: <NEW_LINE> <INDENT> def GetVerbosity(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ProcessMessage(self,messageId,messageSeverity,entityIds): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass
A base class for an application-specific logger. It should be used to track errors during conversion and/or ,track conversion progress,cancel a conversion process if necessary. Implementing a logger class is optional,but highly recommended for all but most basic data converters. The base class is UI- and language-independent. It is up to the using app to implement UI. Language-specifc data may be used to communicate information to application users. English should be used to communicate data of interest to Revit development.
6259907ed486a94d0ba2da39
class Place(BaseModel): <NEW_LINE> <INDENT> city_id = '' <NEW_LINE> user_id = '' <NEW_LINE> name = '' <NEW_LINE> description = '' <NEW_LINE> number_rooms = 0 <NEW_LINE> number_bathrooms = 0 <NEW_LINE> max_guest = 0 <NEW_LINE> price_by_night = 0 <NEW_LINE> latitude = 0.0 <NEW_LINE> longitude = 0.0 <NEW_LINE> amenity_ids = []
used for defining attributes of Place
6259907e3317a56b869bf287
class M109(SetTemperatureExtruder): <NEW_LINE> <INDENT> param_letters = utils.str_to_chars("srt") <NEW_LINE> def __init__( self, s: float, r: float = None, t: int = None, line_number: int = None ): <NEW_LINE> <INDENT> super().__init__(s=s, r=r, line_number=line_number) <NEW_LINE> self.t = t <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( f"{self.__class__.__name__}" f"(line_number={self.line_number}, " f"s={self.s}, r={self.r}, t={self.t})" )
Set Extruder Temperature and Wait
6259907e7d847024c075de60
class Ant(Insect): <NEW_LINE> <INDENT> is_ant = True <NEW_LINE> implemented = False <NEW_LINE> food_cost = 0 <NEW_LINE> blocks_path = True <NEW_LINE> container = False <NEW_LINE> def __init__(self, armor=1): <NEW_LINE> <INDENT> Insect.__init__(self, armor) <NEW_LINE> <DEDENT> def can_contain(self, insect): <NEW_LINE> <INDENT> if ((self.container == True) and (self.ant == None) and (insect.container == False)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
An Ant occupies a place and does work for the colony.
6259907e4c3428357761bd3d
class HillClimbing(object): <NEW_LINE> <INDENT> def __init__( self, iterations, target, size , type ): <NEW_LINE> <INDENT> self.iterations = iterations <NEW_LINE> self.target = target <NEW_LINE> self.height , self.width = target.shape[:2] <NEW_LINE> self.size = size <NEW_LINE> self.type = type <NEW_LINE> <DEDENT> def run( self , *args ): <NEW_LINE> <INDENT> if len(args) == 0: <NEW_LINE> <INDENT> solution = individual.IndividualGen(self.size, self.height, self.width, self.type , 0.2 ) <NEW_LINE> min_err = self.height * self.width <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> solution = individual.IndividualGen(args[0]) <NEW_LINE> data = args[0].split("_") <NEW_LINE> min_err = float(data[3][:-4]) <NEW_LINE> <DEDENT> for i in range(self.iterations): <NEW_LINE> <INDENT> temp = copy.deepcopy(solution) <NEW_LINE> temp.mutate() <NEW_LINE> err = temp.fitness(self.target) <NEW_LINE> if err < min_err : <NEW_LINE> <INDENT> min_err = err <NEW_LINE> solution = copy.deepcopy(temp) <NEW_LINE> <DEDENT> if i % 5000 == 0 : <NEW_LINE> <INDENT> solution.write("SolutionHC_Error_" + str(i) + "_" + str(min_err) + ".jpg") <NEW_LINE> solution.encode("SolutionHC_Error_" + str(i) + "_" + str(min_err) + ".txt")
' Hill Climbing is a mathematical optimization technique which belongs to the family of local search. It is an iterative algorithm that starts with an arbitrary solution to a problem, then attempts to find a better solution by incrementally changing a single element of the solution. If the change produces a better solution, an incremental change is made to the new solution, repeating until no further improvements can be found. We will consider an initial pool of polygons and we keep adding more polygons as long as the total mean squared error is significantly reduced with respect to the target image we want to approximate. INPUT: - Number of iterations for the algorithm - Target image that we try to approximate - Number of polygons to include in the solution - Type of polygon in solution
6259907e55399d3f05627f98
class RobtexMonitorScraper(MechanizedScraper): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> MechanizedScraper.__init__(self) <NEW_LINE> <DEDENT> def run(self, ip): <NEW_LINE> <INDENT> results = [] <NEW_LINE> url_param = ip.replace(".", "/") <NEW_LINE> url = "https://www.robtex.com/en/advisory/ip/" + url_param + "/shared.html" <NEW_LINE> self.browser.open(url) <NEW_LINE> parser = self.browser.parsed <NEW_LINE> search = parser.find("ol", {"class": "xbul"}) <NEW_LINE> total = 0 <NEW_LINE> if search is not None: <NEW_LINE> <INDENT> for result in search.find_all("li"): <NEW_LINE> <INDENT> total += 1 <NEW_LINE> if total > 100: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result_value = result.text <NEW_LINE> if ' ' in result_value: <NEW_LINE> <INDENT> result_value = re.sub(' ', '.', result_value) <NEW_LINE> results.append(result_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results.append(result_value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return results
Created by: LNguyen Date: 17April2017 Created class to handle web scraping logic for the certificate monitoring function. This class will be referenced by the Monitor function.
6259907e99fddb7c1ca63b1a
class PageChildrenField(Field): <NEW_LINE> <INDENT> def get_attribute(self, instance): <NEW_LINE> <INDENT> return instance <NEW_LINE> <DEDENT> def to_representation(self, page): <NEW_LINE> <INDENT> return OrderedDict([ ('count', self.context['base_queryset'].child_of(page).count()), ('listing_url', get_model_listing_url(self.context, Page) + '?child_of=' + str(page.id)), ])
Serializes the "children" field. Example: "children": { "count": 1, "listing_url": "/api/v1/pages/?child_of=2" }
6259907f5fdd1c0f98e5fa03
class DatiFatturaBodyDTRType (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName( Namespace, 'DatiFatturaBodyDTRType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location( '../data/datifatture/DatiFatturav2.0.xsd', 113, 2) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __DatiGenerali = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName( None, 'DatiGenerali'), 'DatiGenerali', '__httpivaservizi_agenziaentrate_gov_itdocsxsdfatturev2_0_DatiFatturaBodyDTRType_DatiGenerali', False, pyxb.utils.utility.Location('../data/datifatture/DatiFatturav2.0.xsd', 115, 6), ) <NEW_LINE> DatiGenerali = property(__DatiGenerali.value, __DatiGenerali.set, None, None) <NEW_LINE> __DatiRiepilogo = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName( None, 'DatiRiepilogo'), 'DatiRiepilogo', '__httpivaservizi_agenziaentrate_gov_itdocsxsdfatturev2_0_DatiFatturaBodyDTRType_DatiRiepilogo', True, pyxb.utils.utility.Location('../data/datifatture/DatiFatturav2.0.xsd', 116, 6), ) <NEW_LINE> DatiRiepilogo = property(__DatiRiepilogo.value, __DatiRiepilogo.set, None, None) <NEW_LINE> _ElementMap.update({ __DatiGenerali.name(): __DatiGenerali, __DatiRiepilogo.name(): __DatiRiepilogo }) <NEW_LINE> _AttributeMap.update({ })
Complex type {http://ivaservizi.agenziaentrate.gov.it/docs/xsd/fatture/v2.0}DatiFatturaBodyDTRType with content type ELEMENT_ONLY
6259907e60cbc95b06365aae
@dataclasses.dataclass(frozen=True) <NEW_LINE> class UserFacingCLIError(APDSensorsError, SystemExit): <NEW_LINE> <INDENT> message: str <NEW_LINE> return_code: int <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"[{self.return_code}] {self.message}"
A fatal error for the CLI
6259907f97e22403b383c984
class LibvirtGuestsConfig(LibvirtConfigCommon): <NEW_LINE> <INDENT> conf_path = '/etc/sysconfig/libvirt-guests' <NEW_LINE> __option_types__ = { 'URIS': 'string', 'ON_BOOT': 'string', 'START_DELAY': 'int', 'ON_SHUTDOWN': 'string', 'PARALLEL_SHUTDOWN': 'int', 'SHUTDOWN_TIMEOUT': 'int', 'BYPASS_CACHE': 'boolean' }
Class for sysconfig libvirt-guests config file.
6259907f5fdd1c0f98e5fa04
class G_LogLocalThemeOverridesNode(G_LogChildNode, G_ThemeOverridesNode, G_ListContainedNode): <NEW_LINE> <INDENT> def BuildPage(parent): <NEW_LINE> <INDENT> me = __class__ <NEW_LINE> me._Sizer = parent.GetSizer() <NEW_LINE> G_ThemeOverridesNode.BuildControl(me, parent) <NEW_LINE> <DEDENT> def __init__(self, factory, wproject, witem, name, **kwargs): <NEW_LINE> <INDENT> G_ListContainedNode.__init__(self, factory, wproject, witem) <NEW_LINE> G_ThemeOverridesNode.__init__(self, G_Const.LogThemeCls, G_ThemeNode.DomainLogfile) <NEW_LINE> <DEDENT> def Activate(self): <NEW_LINE> <INDENT> self.SetNodeHelp("Logfile Pattern Themes", "logfile.html", "logpatterns") <NEW_LINE> self.ActivateControl()
Logfile local theme overrides saving/restoring
6259907f4f6381625f19a1f0
class CopyrightViewlet(ViewletBase): <NEW_LINE> <INDENT> pass
This viewlet shows the copyright license at the end of the page.
6259907f7b180e01f3e49da7
class _JSTestSelector(_Selector): <NEW_LINE> <INDENT> def __init__(self, test_file_explorer): <NEW_LINE> <INDENT> _Selector.__init__(self, test_file_explorer) <NEW_LINE> self._tags = self._test_file_explorer.parse_tag_file("js_test", config.TAG_FILE) <NEW_LINE> <DEDENT> def select(self, selector_config): <NEW_LINE> <INDENT> self._tags = self._test_file_explorer.parse_tag_file("js_test", selector_config.tag_file, self._tags) <NEW_LINE> return _Selector.select(self, selector_config) <NEW_LINE> <DEDENT> def get_tags(self, test_file): <NEW_LINE> <INDENT> file_tags = self._test_file_explorer.jstest_tags(test_file) <NEW_LINE> if test_file in self._tags: <NEW_LINE> <INDENT> return list(set(file_tags) | set(self._tags[test_file])) <NEW_LINE> <DEDENT> return file_tags
_Selector subclass for JavaScript tests.
6259907f4f88993c371f1264
@inherit_doc <NEW_LINE> class ChiSqSelector(JavaEstimator, HasFeaturesCol, HasOutputCol, HasLabelCol, JavaMLReadable, JavaMLWritable): <NEW_LINE> <INDENT> numTopFeatures = Param(Params._dummy(), "numTopFeatures", "Number of features that selector will select, ordered by statistics value " + "descending. If the number of features is < numTopFeatures, then this will select " + "all features.", typeConverter=TypeConverters.toInt) <NEW_LINE> @keyword_only <NEW_LINE> def __init__(self, numTopFeatures=50, featuresCol="features", outputCol=None, labelCol="label"): <NEW_LINE> <INDENT> super(ChiSqSelector, self).__init__() <NEW_LINE> self._java_obj = self._new_java_obj("org.apache.spark.ml.feature.ChiSqSelector", self.uid) <NEW_LINE> kwargs = self.__init__._input_kwargs <NEW_LINE> self.setParams(**kwargs) <NEW_LINE> <DEDENT> @keyword_only <NEW_LINE> @since("2.0.0") <NEW_LINE> def setParams(self, numTopFeatures=50, featuresCol="features", outputCol=None, labelCol="labels"): <NEW_LINE> <INDENT> kwargs = self.setParams._input_kwargs <NEW_LINE> return self._set(**kwargs) <NEW_LINE> <DEDENT> @since("2.0.0") <NEW_LINE> def setNumTopFeatures(self, value): <NEW_LINE> <INDENT> self._paramMap[self.numTopFeatures] = value <NEW_LINE> return self <NEW_LINE> <DEDENT> @since("2.0.0") <NEW_LINE> def getNumTopFeatures(self): <NEW_LINE> <INDENT> return self.getOrDefault(self.numTopFeatures) <NEW_LINE> <DEDENT> def _create_model(self, java_model): <NEW_LINE> <INDENT> return ChiSqSelectorModel(java_model)
.. note:: Experimental Chi-Squared feature selection, which selects categorical features to use for predicting a categorical label. >>> from pyspark.mllib.linalg import Vectors >>> df = sqlContext.createDataFrame( ... [(Vectors.dense([0.0, 0.0, 18.0, 1.0]), 1.0), ... (Vectors.dense([0.0, 1.0, 12.0, 0.0]), 0.0), ... (Vectors.dense([1.0, 0.0, 15.0, 0.1]), 0.0)], ... ["features", "label"]) >>> selector = ChiSqSelector(numTopFeatures=1, outputCol="selectedFeatures") >>> model = selector.fit(df) >>> model.transform(df).head().selectedFeatures DenseVector([1.0]) >>> model.selectedFeatures [3] >>> chiSqSelectorPath = temp_path + "/chi-sq-selector" >>> selector.save(chiSqSelectorPath) >>> loadedSelector = ChiSqSelector.load(chiSqSelectorPath) >>> loadedSelector.getNumTopFeatures() == selector.getNumTopFeatures() True >>> modelPath = temp_path + "/chi-sq-selector-model" >>> model.save(modelPath) >>> loadedModel = ChiSqSelectorModel.load(modelPath) >>> loadedModel.selectedFeatures == model.selectedFeatures True .. versionadded:: 2.0.0
6259907f7d847024c075de62
class ISOTopicCategoryFilter(rest_framework_filters.FilterSet): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = geospaas.vocabularies.models.ISOTopicCategory <NEW_LINE> fields = {'name': '__all__'}
Filter for ISOTopicCategories
6259907f63b5f9789fe86bec
class Page(object): <NEW_LINE> <INDENT> def __init__(self, item_count, page_index=1, page_size=10): <NEW_LINE> <INDENT> self.item_count = item_count <NEW_LINE> self.page_size = page_size <NEW_LINE> self.page_count = item_count // page_size + (1 if item_count % page_size > 0 else 0) <NEW_LINE> if item_count == 0 or page_index > self.page_count: <NEW_LINE> <INDENT> self.offset = 0 <NEW_LINE> self.limit = 0 <NEW_LINE> self.page_index = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.page_index = page_index <NEW_LINE> self.offset = self.page_size * (page_index - 1) <NEW_LINE> self.limit = self.page_size <NEW_LINE> <DEDENT> self.has_next = self.page_index < self.page_count <NEW_LINE> self.has_previous = self.page_index > 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'item_count: %s, page_count: %s, page_index: %s, page_size: %s, offset: %s, limit: %s' % (self.item_count, self.page_count, self.page_index, self.page_size, self.offset, self.limit) <NEW_LINE> <DEDENT> __repr__ = __str__
Page object for display pages.
6259907fe1aae11d1e7cf554
class BoardRestored(BoardAccessChanged): <NEW_LINE> <INDENT> pass
The board has been restored from archive.
6259907f442bda511e95da99
class Session(ndb.Model): <NEW_LINE> <INDENT> name = ndb.StringProperty(required=True) <NEW_LINE> highlights = ndb.StringProperty() <NEW_LINE> speakerId = ndb.StringProperty() <NEW_LINE> duration = ndb.IntegerProperty() <NEW_LINE> typeOfSession = ndb.StringProperty() <NEW_LINE> date = ndb.DateProperty() <NEW_LINE> startTime = ndb.TimeProperty() <NEW_LINE> location = ndb.StringProperty()
Session -- Session object for conferences
6259907f7c178a314d78e92d
class FileDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, dirPath, recursive=False, transform=lambda ndarray: ndarray): <NEW_LINE> <INDENT> self.dirPath = dirPath <NEW_LINE> if(recursive == False): <NEW_LINE> <INDENT> self.fileNames = seq(os.listdir(dirPath)).filter(lambda name: os.path.isfile(dirPath/name)).to_list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise "Not yet implemented" <NEW_LINE> <DEDENT> self.transform = transform <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.fileNames) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> raise NotImplementedError()
Dataset which load numpy npy dataset
6259907ff548e778e596d017
class TestDLLList(testlib.RekallBaseUnitTestCase): <NEW_LINE> <INDENT> PARAMETERS = dict(commandline="dlllist") <NEW_LINE> def ParseDllist(self, output): <NEW_LINE> <INDENT> map = {} <NEW_LINE> for section in self.SplitLines(output, seperator="***********"): <NEW_LINE> <INDENT> process_name, pid = section[1].split("pid:") <NEW_LINE> try: <NEW_LINE> <INDENT> preamble, dlllist = list(self.SplitLines(section, seperator="-----")) <NEW_LINE> map[int(pid)] = dlllist <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> map[int(pid)] = [] <NEW_LINE> <DEDENT> <DEDENT> return map <NEW_LINE> <DEDENT> def testDlllist(self): <NEW_LINE> <INDENT> previous_map = self.ParseDllist(self.baseline['output']) <NEW_LINE> current_map = self.ParseDllist(self.current['output']) <NEW_LINE> self.assertListEqual(previous_map, current_map) <NEW_LINE> for pid in previous_map: <NEW_LINE> <INDENT> self.assertListEqual( self.ExtractColumn(previous_map[pid], 0, 1), self.ExtractColumn(current_map[pid], 0, 1)) <NEW_LINE> self.assertListEqual( self.ExtractColumn(previous_map[pid], 2, 1), self.ExtractColumn(current_map[pid], 2, 1))
Test the dlllist module.
6259907f4428ac0f6e659fb4
class MessageKeys: <NEW_LINE> <INDENT> APP = "app" <NEW_LINE> PATH = "path" <NEW_LINE> LINE = "line" <NEW_LINE> CHAR = "char" <NEW_LINE> CODE = "code" <NEW_LINE> CODE_READABLE = "code_readable" <NEW_LINE> MESSAGE = "msg" <NEW_LINE> EXTENDS = "extends" <NEW_LINE> EXTRAS = "extras"
Message names.
6259907fad47b63b2c5a92d6
class Person(BaseModel): <NEW_LINE> <INDENT> logger.info('Note how we defined the class') <NEW_LINE> logger.info('Specify the fields in our model, their lengths and if mandatory') <NEW_LINE> logger.info('Must be a unique identifier for each person') <NEW_LINE> person_name = CharField(primary_key = True, max_length = 30) <NEW_LINE> lives_in_town = CharField(max_length = 40) <NEW_LINE> nickname = CharField(max_length = 20, null = True) <NEW_LINE> job_held = ForeignKeyField(Job, related_name='job held', null = False)
This class defines Person, which maintains details of someone for whom we want to research career to date.
6259907fbe7bc26dc9252b98
class ClusterList(list, Item): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return id(self) <NEW_LINE> <DEDENT> def iterfiles(self, save=False): <NEW_LINE> <INDENT> for cluster in self: <NEW_LINE> <INDENT> yield from cluster.iterfiles(save) <NEW_LINE> <DEDENT> <DEDENT> def can_save(self): <NEW_LINE> <INDENT> return len(self) > 0 <NEW_LINE> <DEDENT> def can_analyze(self): <NEW_LINE> <INDENT> return any(cluster.can_analyze() for cluster in self) <NEW_LINE> <DEDENT> def can_autotag(self): <NEW_LINE> <INDENT> return len(self) > 0 <NEW_LINE> <DEDENT> def can_browser_lookup(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def lookup_metadata(self): <NEW_LINE> <INDENT> for cluster in self: <NEW_LINE> <INDENT> cluster.lookup_metadata()
A list of clusters.
6259907f5fdd1c0f98e5fa05
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0): <NEW_LINE> <INDENT> self.__size = size <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self.__size <NEW_LINE> <DEDENT> @size.setter <NEW_LINE> def size(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError('size must be an integer') <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError('size must be >= 0') <NEW_LINE> <DEDENT> self.__size = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__size**2 <NEW_LINE> <DEDENT> def my_print(self): <NEW_LINE> <INDENT> if self.__size == 0: <NEW_LINE> <INDENT> print("") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for a in range(self.__size): <NEW_LINE> <INDENT> for b in range(self.__size): <NEW_LINE> <INDENT> print("#", end="") <NEW_LINE> <DEDENT> print()
Initialize
6259907f44b2445a339b76a0
class DynamicController(Controller): <NEW_LINE> <INDENT> def __init__(self, control_law, persistent_control=False): <NEW_LINE> <INDENT> self.persistent_control = persistent_control <NEW_LINE> super(DynamicController, self).__init__(control_law) <NEW_LINE> <DEDENT> def step(self, states, parameters): <NEW_LINE> <INDENT> if len(parameters) == 0 and not self.persistent_control: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.control_law.set_parameters(parameters) <NEW_LINE> return super(DynamicController, self).step(states)
A parameterized controller whose parameters may change over time
6259907f76e4537e8c3f1006
class Subscription(models.Model): <NEW_LINE> <INDENT> BILLING_CYCLES = ( ('one', 'One Time'), ('month', 'Monthly'), ('year', 'Yearly') ) <NEW_LINE> expires = models.DateTimeField() <NEW_LINE> school = models.OneToOneField(School, unique=True, related_name="subscription") <NEW_LINE> billing_cycle = models.CharField(choices=BILLING_CYCLES, max_length=5) <NEW_LINE> logger_access = models.BooleanField(blank=True, default=False) <NEW_LINE> max_student_ids = models.IntegerField() <NEW_LINE> max_kiosks = models.IntegerField() <NEW_LINE> max_logs = models.IntegerField() <NEW_LINE> student_rewards = models.BooleanField() <NEW_LINE> data_intel = models.BooleanField() <NEW_LINE> percentage_discount = models.FloatField() <NEW_LINE> numeric_discount = models.IntegerField() <NEW_LINE> @property <NEW_LINE> def current_student_ids(self): <NEW_LINE> <INDENT> return Student.objects.filter(school=self.school).count()
A field for paid subscriptions.
6259907f7b180e01f3e49da8
class CardState(Enum): <NEW_LINE> <INDENT> BACKLOG = 0 <NEW_LINE> IN_PROGRESS = 1 <NEW_LINE> DONE = 2 <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> if self == CardState.BACKLOG: <NEW_LINE> <INDENT> return "todo" <NEW_LINE> <DEDENT> elif self == CardState.IN_PROGRESS: <NEW_LINE> <INDENT> return "prgs" <NEW_LINE> <DEDENT> return "done"
Describes the state of a task. Can be Backlog, In Progress, or Done.
6259907f7047854f46340e3c