code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ConfigDataContextFactory(DataContextFactory): <NEW_LINE> <INDENT> def __init__(self, config_file): <NEW_LINE> <INDENT> self._config_file = config_file <NEW_LINE> <DEDENT> def create_data_context(self): <NEW_LINE> <INDENT> return NotImplementedError
:type _config_file: str
625990682c8b7c6e89bd4f9e
class SettingsForm(forms.Form): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(SettingsForm, self).__init__(*args, **kwargs) <NEW_LINE> settings.use_editable() <NEW_LINE> for name in sorted(registry.keys()): <NEW_LINE> <INDENT> setting = registry[name] <NEW_LINE> if setting["editable"]: <NEW_LINE> <INDENT> field_class = FIELD_TYPES.get(setting["type"], forms.CharField) <NEW_LINE> kwargs = { "label": setting["label"] + ":", "required": setting["type"] == int, "initial": getattr(settings, name), "help_text": self.format_help(setting["description"]), } <NEW_LINE> if setting["choices"]: <NEW_LINE> <INDENT> field_class = forms.ChoiceField <NEW_LINE> kwargs["choices"] = setting["choices"] <NEW_LINE> <DEDENT> self.fields[name] = field_class(**kwargs) <NEW_LINE> css_class = field_class.__name__.lower() <NEW_LINE> self.fields[name].widget.attrs["class"] = css_class <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> fields = list(super(SettingsForm, self).__iter__()) <NEW_LINE> group = lambda field: field.name.split("_", 1)[0].title() <NEW_LINE> misc = _("Miscellaneous") <NEW_LINE> groups = defaultdict(int) <NEW_LINE> for field in fields: <NEW_LINE> <INDENT> groups[group(field)] += 1 <NEW_LINE> <DEDENT> for (i, field) in enumerate(fields): <NEW_LINE> <INDENT> setattr(fields[i], "group", group(field)) <NEW_LINE> if groups[fields[i].group] == 1: <NEW_LINE> <INDENT> fields[i].group = misc <NEW_LINE> <DEDENT> <DEDENT> return iter(sorted(fields, key=lambda x: x.group != misc or x.group)) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> for (name, value) in self.cleaned_data.items(): <NEW_LINE> <INDENT> setting_obj, created = Setting.objects.get_or_create(name=name) <NEW_LINE> setting_obj.value = value <NEW_LINE> setting_obj.save() <NEW_LINE> <DEDENT> <DEDENT> def format_help(self, description): <NEW_LINE> <INDENT> for bold in ("``", "*"): <NEW_LINE> <INDENT> parts = [] <NEW_LINE> for i, s in enumerate(description.split(bold)): <NEW_LINE> <INDENT> parts.append(s if i % 2 == 0 else "<b>%s</b>" % s) <NEW_LINE> <DEDENT> description = "".join(parts) <NEW_LINE> <DEDENT> return mark_safe(urlize(description).replace("\n", "<br>"))
Form for settings - creates a field for each setting in ``mezzanine.conf`` that is marked as editable.
62599068e76e3b2f99fda1b9
class SetClientInfo_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRUCT, 'success', (TSetClientInfoResp, TSetClientInfoResp.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = TSetClientInfoResp() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('SetClientInfo_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62599068cc40096d6161adbc
class AutoRegressiveModel: <NEW_LINE> <INDENT> def __init__(self, regression_level, epochs): <NEW_LINE> <INDENT> self.epochs = epochs <NEW_LINE> self.current_step = 0 <NEW_LINE> self.regression_level = regression_level <NEW_LINE> num_scale_nets = 2 <NEW_LINE> g_scale_fms = [[8, 16, 8, C], [8, 16, 8, C]] <NEW_LINE> g_scale_k_sizes = [[3 ,3 ,3 ,3], [5, 3, 3, 5]] <NEW_LINE> self.generator = GeneratorNetwork(INPUT_IMG_HEIGHT, INPUT_IMG_WIDTH, num_scale_nets, g_scale_fms, g_scale_k_sizes) <NEW_LINE> d_scale_fms = [[8], [8, 8, 16]] <NEW_LINE> d_scale_k_sizes = [[3], [5, 5, 5]] <NEW_LINE> d_scale_fc_sizes = [[32, 16, 1], [32, 16, 1]] <NEW_LINE> self.disriminator = DisriminatorNetwork(INPUT_IMG_HEIGHT, INPUT_IMG_WIDTH, num_scale_nets, d_scale_fms, d_scale_k_sizes, d_scale_fc_sizes) <NEW_LINE> <DEDENT> def train_ar_model(self, X_frames, batch_size): <NEW_LINE> <INDENT> d_input = X_frames[0:batch_size,:,:,0:NUM_PREV_FRAMES] <NEW_LINE> d_ground_t_seq = X_frames[0:batch_size,:,:,NUM_PREV_FRAMES:] <NEW_LINE> g_input = X_frames[batch_size:,:,:,0:NUM_PREV_FRAMES] <NEW_LINE> g_ground_t_seq = X_frames[batch_size:,:,:,NUM_PREV_FRAMES:] <NEW_LINE> for i in range(self.epochs): <NEW_LINE> <INDENT> self.disriminator.train(d_input, d_ground_t_seq, self.generator) <NEW_LINE> self.generator.train_model(g_input, g_ground_t_seq) <NEW_LINE> <DEDENT> <DEDENT> def test(self, test_frames, sample_size): <NEW_LINE> <INDENT> return self.generator.test_model(test_frames, sample_size)
Use previous trained subnetworks to predict more than one time step ahead in time in autoregressive method
62599068aad79263cf42ff6e
class InheritanceTests: <NEW_LINE> <INDENT> subclasses = [] <NEW_LINE> superclasses = [] <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> assert self.subclasses or self.superclasses, self.__class__ <NEW_LINE> self.__test = getattr(abc, self.__class__.__name__) <NEW_LINE> <DEDENT> def test_subclasses(self): <NEW_LINE> <INDENT> for subclass in self.subclasses: <NEW_LINE> <INDENT> self.assertTrue(issubclass(subclass, self.__test), "{0} is not a subclass of {1}".format(subclass, self.__test)) <NEW_LINE> <DEDENT> <DEDENT> def test_superclasses(self): <NEW_LINE> <INDENT> for superclass in self.superclasses: <NEW_LINE> <INDENT> self.assertTrue(issubclass(self.__test, superclass), "{0} is not a superclass of {1}".format(superclass, self.__test))
Test that the specified class is a subclass/superclass of the expected classes.
625990684a966d76dd5f06ad
class DingBotMessageService: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @service_template <NEW_LINE> async def send_text(token: str, msg: str): <NEW_LINE> <INDENT> if not token or not msg: <NEW_LINE> <INDENT> raise ValueError(f"参数非法,token:{token},msg:{msg}") <NEW_LINE> <DEDENT> await ding_bot_client.send_text(token, msg)
钉钉自定义机器人发送消息,包含异常处理
625990687b180e01f3e49c40
class HasRevisions(ABCMixin): <NEW_LINE> <INDENT> @property <NEW_LINE> def versioned_relationships(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def propagated_attributes(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_suppress_transaction_creation(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> API_OBJECT_NAME = abc.abstractproperty() <NEW_LINE> def has_versioned_changes(self): <NEW_LINE> <INDENT> obj_state = inspect(self) <NEW_LINE> versioned_attribute_names = list(self.versioned_relationships) <NEW_LINE> for mapper in obj_state.mapper.iterate_to_root(): <NEW_LINE> <INDENT> for attr in mapper.column_attrs: <NEW_LINE> <INDENT> versioned_attribute_names.append(attr.key) <NEW_LINE> <DEDENT> <DEDENT> for attr_name in versioned_attribute_names: <NEW_LINE> <INDENT> if getattr(obj_state.attrs, attr_name).history.has_changes(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Mixin for tables that should be versioned in the transaction log.
62599068d6c5a102081e38e0
class EditProfileForm(forms.ModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(EditProfileForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields["First Name"] = forms.CharField(initial=self.instance.user.first_name) <NEW_LINE> self.fields["Last Name"] = forms.CharField(initial=self.instance.user.last_name) <NEW_LINE> del self.fields["user"] <NEW_LINE> for field in self.fields: <NEW_LINE> <INDENT> self.fields[field].required = False <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = ProfileCfn <NEW_LINE> exclude = ['follows', 'avatar_url', 'github_url']
Form to add new album.
62599068009cb60464d02cf2
class NetworkService(model_base.BASEV2): <NEW_LINE> <INDENT> id = sa.Column(sa.String(36), primary_key=True, nullable=False) <NEW_LINE> vnfm_id = sa.Column(sa.String(4000),nullable=False) <NEW_LINE> vdus = sa.Column(sa.String(4000), nullable=False) <NEW_LINE> networks = sa.Column(sa.String(4000), nullable=False) <NEW_LINE> subnets = sa.Column(sa.String(4000), nullable=False) <NEW_LINE> router = sa.Column(sa.String(4000), nullable=False) <NEW_LINE> service_type = sa.Column(sa.String(36), nullable=False) <NEW_LINE> status = sa.Column(sa.String(36), nullable=False)
Represents Network service details
625990687d847024c075db92
class ColorStream( Stream ): <NEW_LINE> <INDENT> _colors = 'black', 'red', 'green', 'yellow', 'blue', 'purple', 'cyan', 'white' <NEW_LINE> def __init__( self, color, bold=False ): <NEW_LINE> <INDENT> colorid = self._colors.index( color ) <NEW_LINE> boldid = 1 if bold else 0 <NEW_LINE> self.fmt = '\033[%d;3%dm%%s\033[0m' % ( boldid, colorid ) <NEW_LINE> <DEDENT> def flush( self ): <NEW_LINE> <INDENT> sys.stdout.flush() <NEW_LINE> <DEDENT> def write( self, text ): <NEW_LINE> <INDENT> sys.stdout.write( self.fmt % text )
Wraps all text in unix terminal escape sequences to select color and weight.
625990684f88993c371f10fb
class Testing2(ModuleBase): <NEW_LINE> <INDENT> def output_usage(self): <NEW_LINE> <INDENT> print("This is the usage\n")
Some dummy module
625990683539df3088ecda58
class FactsCollectionPhase(Phase): <NEW_LINE> <INDENT> name = 'FactsCollection' <NEW_LINE> filter = TagFilter(FactsPhaseTag) <NEW_LINE> policies = Policies(Policies.Errors.FailPhase, Policies.Retry.Phase) <NEW_LINE> flags = Flags()
Get information (facts) about the system (e.g. installed packages, configuration, ...). No decision should be done in this phase. Scan the system to get information you need and provide it to other actors in the following phases.
6259906897e22403b383c6c6
class Comment(Content): <NEW_LINE> <INDENT> implements(IComment) <NEW_LINE> id = Column(Integer, ForeignKey('contents.id'), primary_key=True) <NEW_LINE> message = Column('message', Unicode(256)) <NEW_LINE> creator = Column('creator', Unicode(256)) <NEW_LINE> type_info = Content.type_info.copy( name=u'Comment', title=_(u'Comment'), add_view=u'add_comment', addable_to=[u'Discussion'], ) <NEW_LINE> def __init__(self, message=u'', creator=u'', **kwargs): <NEW_LINE> <INDENT> super(Comment, self).__init__(**kwargs) <NEW_LINE> self.message = message
This is your content type.
625990685fc7496912d48e44
class Game: <NEW_LINE> <INDENT> def __init__(self, game_date, home_team, away_team): <NEW_LINE> <INDENT> self.game_date = game_date <NEW_LINE> self.home_team = home_team <NEW_LINE> self.away_team = away_team <NEW_LINE> self.plays = {} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{game_date: ' + self.game_date + ', home_team: ' + home_team + ', away_team: ' + away_team + '}'
A game between two teams on a date
625990696e29344779b01e0b
class Task(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TaskId = None <NEW_LINE> self.TaskName = None <NEW_LINE> self.MigrationType = None <NEW_LINE> self.Status = None <NEW_LINE> self.ProjectId = None <NEW_LINE> self.ProjectName = None <NEW_LINE> self.SrcInfo = None <NEW_LINE> self.MigrationTimeLine = None <NEW_LINE> self.Updated = None <NEW_LINE> self.DstInfo = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TaskId = params.get("TaskId") <NEW_LINE> self.TaskName = params.get("TaskName") <NEW_LINE> self.MigrationType = params.get("MigrationType") <NEW_LINE> self.Status = params.get("Status") <NEW_LINE> self.ProjectId = params.get("ProjectId") <NEW_LINE> self.ProjectName = params.get("ProjectName") <NEW_LINE> if params.get("SrcInfo") is not None: <NEW_LINE> <INDENT> self.SrcInfo = SrcInfo() <NEW_LINE> self.SrcInfo._deserialize(params.get("SrcInfo")) <NEW_LINE> <DEDENT> if params.get("MigrationTimeLine") is not None: <NEW_LINE> <INDENT> self.MigrationTimeLine = TimeObj() <NEW_LINE> self.MigrationTimeLine._deserialize(params.get("MigrationTimeLine")) <NEW_LINE> <DEDENT> self.Updated = params.get("Updated") <NEW_LINE> if params.get("DstInfo") is not None: <NEW_LINE> <INDENT> self.DstInfo = DstInfo() <NEW_LINE> self.DstInfo._deserialize(params.get("DstInfo"))
迁移任务类别
62599069442bda511e95d935
class TokenBucket: <NEW_LINE> <INDENT> def __init__(self, rate, capacity): <NEW_LINE> <INDENT> self._rate = rate <NEW_LINE> self._capacity = capacity <NEW_LINE> self._current_amount = 0 <NEW_LINE> self._last_consume_time = int(time.time()) <NEW_LINE> <DEDENT> def consume(self, token_amount): <NEW_LINE> <INDENT> increment = (int(time.time()) - self._last_consume_time) * self._rate <NEW_LINE> self._current_amount = min(increment + self._current_amount, self._capacity) <NEW_LINE> if token_amount > self._current_amount: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self._last_consume_time = int(time.time()) <NEW_LINE> self._current_amount -= token_amount <NEW_LINE> return True
令牌桶
625990694e4d562566373bc1
class DefaultResponseManager(AbstractJSONResponseManager): <NEW_LINE> <INDENT> def get_error_message(self, json_error_response: Any) -> Optional[str]: <NEW_LINE> <INDENT> return self.get_default_error_message(json_error_response) <NEW_LINE> <DEDENT> def get_error_response_schema(self) -> Optional[Any]: <NEW_LINE> <INDENT> return self.get_default_error_response_schema() <NEW_LINE> <DEDENT> def get_response_schema(self) -> Any: <NEW_LINE> <INDENT> return self.get_default_success_response_schema() <NEW_LINE> <DEDENT> def get_response_type(self) -> Type: <NEW_LINE> <INDENT> return DefaultSuccessResponse
JSON response manager for REST endpoints (PUT) returning a generic success response
6259906932920d7e50bc7800
class CobroCliente(ObjetoBase): <NEW_LINE> <INDENT> def __init__(self,numero,factura,tipo,importe): <NEW_LINE> <INDENT> self.numero=numero <NEW_LINE> self.id_factura=factura <NEW_LINE> self.tipo=tipo <NEW_LINE> self.importe=importe <NEW_LINE> self.nota_credito=None <NEW_LINE> <DEDENT> def setNC(self,nro_nota): <NEW_LINE> <INDENT> self.nota_credito=nro_nota <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def obtenerNumero(cls,sesion): <NEW_LINE> <INDENT> return sesion.query(cls).count()+1 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def getTotalNC(cls,sesion,notac): <NEW_LINE> <INDENT> query = sesion.query(CobroCliente).filter(CobroCliente.nota_credito == notac) <NEW_LINE> total = 0 <NEW_LINE> for value in query: <NEW_LINE> <INDENT> total += value.importe <NEW_LINE> <DEDENT> return total
Clase que modela la logica del Cobro al Cliente
625990698e71fb1e983bd281
class RemoteObjectInterface(object): <NEW_LINE> <INDENT> removed = False <NEW_LINE> @abstractmethod <NEW_LINE> def _remove(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove(self): <NEW_LINE> <INDENT> if self.removed: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._remove() <NEW_LINE> self.removed = True <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.remove() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.remove()
Handle to data on workers of a |WorkerPool|. See documentation of :class:`WorkerPoolInterface` for usage of these handles in conjunction with :meth:`~WorkerPoolInterface.apply`, :meth:`~WorkerPoolInterface.scatter_array`, :meth:`~WorkerPoolInterface.scatter_list`. Remote objects can be used as a context manager: when leaving the context, the remote object's :meth:`~RemoteObjectInterface.remove` method is called to ensure proper cleanup of remote resources. Attributes ---------- removed `True`, if :meth:`RemoteObjectInterface.remove` has been called.
62599069f548e778e596cd46
class Document(object): <NEW_LINE> <INDENT> def __init__(self, meta=None, body=None, uri=None, lang=None, basefile=None, version=None): <NEW_LINE> <INDENT> if meta is None: <NEW_LINE> <INDENT> self.meta = Graph() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.meta = meta <NEW_LINE> <DEDENT> if body is None: <NEW_LINE> <INDENT> self.body = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.body = body <NEW_LINE> <DEDENT> self.uri = uri <NEW_LINE> self.lang = lang <NEW_LINE> self.basefile = basefile <NEW_LINE> self.version = version <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__
A document represents the content of a document together with a RDF graph containing metadata about the document. Don't create instances of :class:`~ferenda.Document` directly. Create them through :meth:`~ferenda.DocumentRepository.make_document` in order to properly initialize the ``meta`` property. :param meta: A RDF graph containing metadata about the document :param body: A list of :mod:`ferenda.elements` based objects representing the content of the document :param uri: The canonical URI for this document :param lang: The main language of the document as a IETF language tag, i.e. "sv" or "en-GB" :param basefile: The basefile of the document
62599069796e427e5384ff31
class SonyProjector(SwitchDevice): <NEW_LINE> <INDENT> def __init__(self, sdcp_connection, name): <NEW_LINE> <INDENT> self._sdcp = sdcp_connection <NEW_LINE> self._name = name <NEW_LINE> self._state = None <NEW_LINE> self._available = False <NEW_LINE> self._attributes = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self._available <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_attributes(self): <NEW_LINE> <INDENT> return self._attributes <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._state = self._sdcp.get_power() <NEW_LINE> self._available = True <NEW_LINE> <DEDENT> except ConnectionRefusedError: <NEW_LINE> <INDENT> _LOGGER.error("Projector connection refused") <NEW_LINE> self._available = False <NEW_LINE> <DEDENT> <DEDENT> def turn_on(self, **kwargs): <NEW_LINE> <INDENT> _LOGGER.debug("Powering on projector '%s'...", self.name) <NEW_LINE> if self._sdcp.set_power(True): <NEW_LINE> <INDENT> _LOGGER.debug("Powered on successfully.") <NEW_LINE> self._state = STATE_ON <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.error("Power on command was not successful") <NEW_LINE> <DEDENT> <DEDENT> def turn_off(self, **kwargs): <NEW_LINE> <INDENT> _LOGGER.debug("Powering off projector '%s'...", self.name) <NEW_LINE> if self._sdcp.set_power(False): <NEW_LINE> <INDENT> _LOGGER.debug("Powered off successfully.") <NEW_LINE> self._state = STATE_OFF <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.error("Power off command was not successful")
Represents a Sony Projector as a switch.
62599069009cb60464d02cf4
class Spectrum(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> wavenumber_resolution = None <NEW_LINE> max_mode_order = None <NEW_LINE> _required_attributes = ["wavenumber_resolution", "max_mode_order"] <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._sizes = np.zeros(3) <NEW_LINE> required_attributes = list() <NEW_LINE> for cls in reversed(self.__class__.__mro__): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> required_attributes += cls._required_attributes <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> missing = list() <NEW_LINE> for attr in set(required_attributes): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> setattr(self, attr, kwargs.pop(attr)) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> missing.append(attr) <NEW_LINE> <DEDENT> <DEDENT> if missing: <NEW_LINE> <INDENT> raise ValueError("Missing parameters: " + str(set(missing))) <NEW_LINE> <DEDENT> if kwargs: <NEW_LINE> <INDENT> raise ValueError("Unknown parameters: " + str(kwargs.keys())) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def modes(self): <NEW_LINE> <INDENT> return np.arange(0, self.max_mode_order) <NEW_LINE> <DEDENT> @property <NEW_LINE> def wavenumber(self): <NEW_LINE> <INDENT> return self.wavenumber_resolution * self.modes <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def mode_amplitude(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def spectral_density(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @abc.abstractmethod <NEW_LINE> def spectral_density_function(): <NEW_LINE> <INDENT> pass
Abstract turbulence spectrum.
6259906901c39578d7f14312
class CommentParserError(Exception): <NEW_LINE> <INDENT> def __init__(self, token, comment_block, message): <NEW_LINE> <INDENT> self.token = token <NEW_LINE> self.comment_block = comment_block <NEW_LINE> self.message = f"Error parsing the comment block \n {comment_block} from the token \n {token}. With the error message {message}" <NEW_LINE> super().__init__(self.message)
Custom class for a comment parser error.
625990694f88993c371f10fc
@provider(IFormFieldProvider) <NEW_LINE> class IGoogleNews(model.Schema): <NEW_LINE> <INDENT> model.fieldset( 'google-news', label=_(u'Google News'), fields=['standout_journalism', 'news_keywords'], ) <NEW_LINE> standout_journalism = schema.Bool( title=_(u'Standout Journalism'), description=_( u'help_standout_journalism', default=u'Used to indicate this is a big story, or an extraordinary work of journalism. ' u'You can mark as standout no more than seven news articles in the past calendar week. ' u'Implements Google News <code>standout</code> metatag.', ), required=False, ) <NEW_LINE> news_keywords = schema.Tuple( title=_(u'Keywords'), description=_( u'help_news_keywords', default=u'Used to specify keywords that are relevant to this news article. ' u'Add one phrase or keyword on each line. ' u'Implements Google News <code>news_keywords</code> metatag.', ), value_type=schema.TextLine(), required=False, ) <NEW_LINE> @invariant <NEW_LINE> def validate_standout_journalism(data): <NEW_LINE> <INDENT> context = data.__context__ <NEW_LINE> if context is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not data.standout_journalism: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if api.content.get_state(context) != 'published': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> results = get_current_standout_journalism() <NEW_LINE> assert len(results) <= 7 <NEW_LINE> results = [o for o in results if o != context] <NEW_LINE> if len(results) == 7: <NEW_LINE> <INDENT> raise Invalid(_( u"Can't mark this item as standout. " u'There are already 7 items marked in the past calendar week.' ))
Behavior interface to add some Google News features.
62599069f7d966606f749498
class A4(A3, B1): <NEW_LINE> <INDENT> def __init__(self, x0: int, y1: float = 1.): <NEW_LINE> <INDENT> A3.__init__(self, x0, y1=y1) <NEW_LINE> B1.__init__(self, x0)
This is an empty class that inherit from A3 and B1
6259906976e4537e8c3f0d3e
class JoinRequest(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User, related_name='join_requests') <NEW_LINE> group = models.ForeignKey(Group, related_name='join_requests') <NEW_LINE> date = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('user', 'group') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '#%s' % self.pk <NEW_LINE> <DEDENT> def send_creation_email(self, site): <NEW_LINE> <INDENT> context = { 'request': self, 'site': site } <NEW_LINE> to = self.group.get_emails(role=Roles.GROUP_ADMIN) <NEW_LINE> template = 'users/created_join_request.email' <NEW_LINE> send_email_template(template=template, context=context, to=to) <NEW_LINE> <DEDENT> def send_approval_email(self, site): <NEW_LINE> <INDENT> context = { 'request': self, 'site': site } <NEW_LINE> template = 'users/accepted_join_request.email' <NEW_LINE> to = self.user.email <NEW_LINE> send_email_template(template=template, context=context, to=to) <NEW_LINE> <DEDENT> def send_rejection_email(self, site): <NEW_LINE> <INDENT> context = { 'request': self, 'site': site } <NEW_LINE> template = 'users/rejected_join_request.email' <NEW_LINE> to = self.user.email <NEW_LINE> send_email_template(template=template, context=context, to=to) <NEW_LINE> <DEDENT> def accept(self, roles=[]): <NEW_LINE> <INDENT> roles_kwargs = dict(('is_%s' % role, True) for role in roles) <NEW_LINE> Roles.objects.create(user=self.user, group=self.group, **roles_kwargs) <NEW_LINE> self.delete() <NEW_LINE> <DEDENT> def reject(self): <NEW_LINE> <INDENT> self.delete()
This model's purpose is to store data temporaly for join the group. An user can request to join into a group in two main scenaries: 1. While the registration: chooses the group(s) 2. A registered user: can request to join a group(s) Once the request is created, the group's admin have to check it accepting or refusing it.
62599069ac7a0e7691f73ca1
class TestInlineResponse20027(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return InlineResponse20027( items = [ xepmts_staging.models.xenon1t_muveto_pmt.Xenon1tMuvetoPmt( serial_number = '0', manufacturer = '0', location = '0', datasheet = '0', _id = '0', ) ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return InlineResponse20027( ) <NEW_LINE> <DEDENT> <DEDENT> def testInlineResponse20027(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
InlineResponse20027 unit test stubs
625990692ae34c7f260ac8a3
class _copyto: <NEW_LINE> <INDENT> __qualname__ = 'copyto' <NEW_LINE> def __new__(cls, inst): <NEW_LINE> <INDENT> return tuple.__new__(type(inst), inst)
U.copy() -> a shallow copy of U
62599069627d3e7fe0e08644
class SourcesTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.new_source = Sources('newsbyelkwal', 'My News', 'get the latest updates', 'https://google.com', 'general', 'kenya') <NEW_LINE> <DEDENT> def test_instance(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(self.new_source, Source)) <NEW_LINE> <DEDENT> def test_to_check_instance_variables(self): <NEW_LINE> <INDENT> self.assertEquals(self.new_source.id, 'newsbyelkwal') <NEW_LINE> self.assertEquals(self.new_source.name, 'My News') <NEW_LINE> self.assertEquals(self.new_source.description, 'get the latest updates') <NEW_LINE> self.assertEquals(self.new_source.url, 'https://google.com') <NEW_LINE> self.assertEquals(self.new_source.category, 'general') <NEW_LINE> self.assertEquals(self.new_source.country, 'kenya') (edited)
Test case to test the behavior of the Sources class
62599069d486a94d0ba2d77a
class FileTransferConfigSchema(jsl.Document): <NEW_LINE> <INDENT> local_data_dir = jsl.StringField(pattern=schemautil.StringPatterns.absOrRelativePathPatternOrEmpty, required=True) <NEW_LINE> site_mapping_push = jsl.ArrayField(items=jsl.DocumentField(_SiteMappingSchema), unique_items=True, required=True) <NEW_LINE> site_mapping_fetch = jsl.ArrayField(items=jsl.DocumentField(_SiteMappingSchema), unique_items=True, required=True) <NEW_LINE> remote_site_config = jsl.DictField(additional_properties=jsl.DocumentField(_RemoteSiteConfigSchema), required=True) <NEW_LINE> default_site = jsl.OneOfField([jsl.DocumentField(schemautil.filetransfer.FileTransferSiteLocal), jsl.DocumentField(schemautil.filetransfer.FileTransferSiteRemote)], required=True) <NEW_LINE> quiet = jsl.BooleanField(required=True) <NEW_LINE> local_fetch_option = jsl.StringField(enum=_LOCAL_FETCH_OPTIONS, required=True)
schema for FileTransfer's config. Notice that push and fetch mappings are separate. This can be useful when we have a read-only (thus safe) mapping for fetch, and don't use mapping at all when push
62599069435de62698e9d5c6
class EnterpriseApiClient(JwtLmsApiClient): <NEW_LINE> <INDENT> API_BASE_URL = settings.LMS_INTERNAL_ROOT_URL + '/enterprise/api/v1/' <NEW_LINE> APPEND_SLASH = True <NEW_LINE> ENTERPRISE_CUSTOMER_ENDPOINT = 'enterprise-customer' <NEW_LINE> ENTERPRISE_CUSTOMER_CATALOGS_ENDPOINT = 'enterprise_catalogs' <NEW_LINE> DEFAULT_VALUE_SAFEGUARD = object() <NEW_LINE> def get_content_metadata(self, enterprise_customer): <NEW_LINE> <INDENT> content_metadata = OrderedDict() <NEW_LINE> if enterprise_customer.catalog: <NEW_LINE> <INDENT> response = self._load_data( self.ENTERPRISE_CUSTOMER_ENDPOINT, detail_resource='courses', resource_id=str(enterprise_customer.uuid), traverse_pagination=True, ) <NEW_LINE> for course in response['results']: <NEW_LINE> <INDENT> for course_run in course['course_runs']: <NEW_LINE> <INDENT> course_run['content_type'] = 'courserun' <NEW_LINE> content_metadata[course_run['key']] = course_run <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for enterprise_customer_catalog in enterprise_customer.enterprise_customer_catalogs.all(): <NEW_LINE> <INDENT> response = self._load_data( self.ENTERPRISE_CUSTOMER_CATALOGS_ENDPOINT, resource_id=str(enterprise_customer_catalog.uuid), traverse_pagination=True, querystring={'page_size': 1000}, ) <NEW_LINE> for item in response['results']: <NEW_LINE> <INDENT> content_id = utils.get_content_metadata_item_id(item) <NEW_LINE> content_metadata[content_id] = item <NEW_LINE> <DEDENT> <DEDENT> return content_metadata.values() <NEW_LINE> <DEDENT> @JwtLmsApiClient.refresh_token <NEW_LINE> def _load_data( self, resource, detail_resource=None, resource_id=None, querystring=None, traverse_pagination=False, default=DEFAULT_VALUE_SAFEGUARD, ): <NEW_LINE> <INDENT> default_val = default if default != self.DEFAULT_VALUE_SAFEGUARD else {} <NEW_LINE> querystring = querystring if querystring else {} <NEW_LINE> cache_key = utils.get_cache_key( resource=resource, querystring=querystring, traverse_pagination=traverse_pagination, resource_id=resource_id ) <NEW_LINE> response = cache.get(cache_key) <NEW_LINE> if not response: <NEW_LINE> <INDENT> endpoint = getattr(self.client, resource)(resource_id) <NEW_LINE> endpoint = getattr(endpoint, detail_resource) if detail_resource else endpoint <NEW_LINE> response = endpoint.get(**querystring) <NEW_LINE> if traverse_pagination: <NEW_LINE> <INDENT> results = utils.traverse_pagination(response, endpoint) <NEW_LINE> response = { 'count': len(results), 'next': 'None', 'previous': 'None', 'results': results, } <NEW_LINE> <DEDENT> if response: <NEW_LINE> <INDENT> cache.set(cache_key, response, settings.ENTERPRISE_API_CACHE_TIMEOUT) <NEW_LINE> <DEDENT> <DEDENT> return response or default_val
Object builds an API client to make calls to the Enterprise API.
625990697d847024c075db95
class SnakeoilCACertificatePlugin(cert_manager.CertificatePluginBase): <NEW_LINE> <INDENT> def __init__(self, conf=CONF): <NEW_LINE> <INDENT> self.ca = SnakeoilCA(conf.snakeoil_ca_plugin.ca_cert_path, conf.snakeoil_ca_plugin.ca_cert_key_path) <NEW_LINE> self.cert_manager = CertManager(self.ca) <NEW_LINE> <DEDENT> def get_default_ca_name(self): <NEW_LINE> <INDENT> return "Snakeoil CA" <NEW_LINE> <DEDENT> def get_default_signing_cert(self): <NEW_LINE> <INDENT> return crypto.dump_certificate(crypto.FILETYPE_PEM, self.ca.cert) <NEW_LINE> <DEDENT> def get_default_intermediates(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def supported_request_types(self): <NEW_LINE> <INDENT> return [cert_manager.CertificateRequestType.CUSTOM_REQUEST, cert_manager.CertificateRequestType.STORED_KEY_REQUEST] <NEW_LINE> <DEDENT> def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): <NEW_LINE> <INDENT> if barbican_meta_dto.generated_csr is not None: <NEW_LINE> <INDENT> encoded_csr = barbican_meta_dto.generated_csr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> encoded_csr = order_meta['request_data'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return cert_manager.ResultDTO( cert_manager.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("No request_data specified")) <NEW_LINE> <DEDENT> <DEDENT> csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, encoded_csr) <NEW_LINE> cert = self.cert_manager.make_certificate(csr) <NEW_LINE> cert_enc = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) <NEW_LINE> ca_enc = crypto.dump_certificate(crypto.FILETYPE_PEM, self.ca.cert) <NEW_LINE> return cert_manager.ResultDTO( cert_manager.CertificateStatus.CERTIFICATE_GENERATED, certificate=base64.b64encode(cert_enc), intermediates=base64.b64encode(ca_enc)) <NEW_LINE> <DEDENT> def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def supports(self, certificate_spec): <NEW_LINE> <INDENT> request_type = certificate_spec.get( cert_manager.REQUEST_TYPE, cert_manager.CertificateRequestType.CUSTOM_REQUEST) <NEW_LINE> return request_type in self.supported_request_types()
Snakeoil CA certificate plugin. This is used for easily generating certificates which are not useful in a production environment.
625990695fcc89381b266d34
class FileFolderCopy(BaseFileFolderAction): <NEW_LINE> <INDENT> action_type = 'copy'
Copies a file or folder to a different location in the user’s Dropbox. If the source path is a folder all its content will be copied. If destination path doesn't exist it will be created.
625990693617ad0b5ee0790f
class Copy(object): <NEW_LINE> <INDENT> _filename = '' <NEW_LINE> _copy = {} <NEW_LINE> def __init__(self, filename, cell_wrapper_cls=None): <NEW_LINE> <INDENT> self._filename = filename <NEW_LINE> self._cell_wrapper_cls = cell_wrapper_cls or (lambda x: x) <NEW_LINE> self.load() <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> if name not in self._copy: <NEW_LINE> <INDENT> return Error('COPY.%s [sheet does not exist]' % name) <NEW_LINE> <DEDENT> return self._copy[name] <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> book = load_workbook(self._filename, data_only=True) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> raise CopyException('"%s" does not exist. Have you run "fab update_copy"?' % self._filename) <NEW_LINE> <DEDENT> for sheet in book: <NEW_LINE> <INDENT> columns = [] <NEW_LINE> rows = [] <NEW_LINE> for i, row in enumerate(sheet.rows): <NEW_LINE> <INDENT> row_data = [c.internal_value for c in row] <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> columns = row_data <NEW_LINE> <DEDENT> if all([c is None for c in row_data]): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> rows.append(dict(zip(columns, row_data))) <NEW_LINE> <DEDENT> self._copy[sheet.title] = Sheet(sheet.title, rows, columns, cell_wrapper_cls=self._cell_wrapper_cls) <NEW_LINE> <DEDENT> <DEDENT> def json(self): <NEW_LINE> <INDENT> import json <NEW_LINE> obj = {} <NEW_LINE> for name, sheet in self._copy.items(): <NEW_LINE> <INDENT> if 'key' in sheet._columns: <NEW_LINE> <INDENT> obj[name] = {} <NEW_LINE> for row in sheet: <NEW_LINE> <INDENT> obj[name][row['key']] = row['value'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> obj[name] = [] <NEW_LINE> for row in sheet: <NEW_LINE> <INDENT> obj[name].append(row._row) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return json.dumps(obj)
Wraps copy text, for multiple worksheets, for error handling.
625990698a43f66fc4bf394d
class FlaskResourceful(object): <NEW_LINE> <INDENT> def init_app(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> def add_views(self, route, views): <NEW_LINE> <INDENT> for view in views: <NEW_LINE> <INDENT> self.add_view(route, view, [view._method]) <NEW_LINE> <DEDENT> <DEDENT> def add_view(self, route, view, methods): <NEW_LINE> <INDENT> view_func = view.as_view('{}{}'.format(route, methods)) <NEW_LINE> self.app.add_url_rule(route, view_func=view_func, methods=methods)
REST API framework.
625990697047854f46340b71
class ServersManipulator: <NEW_LINE> <INDENT> def __init__(self, servers_query): <NEW_LINE> <INDENT> self.servers_query = servers_query <NEW_LINE> <DEDENT> def get_servers_names(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for server in self.servers_query: <NEW_LINE> <INDENT> result.append(server.name) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def get_by_pkey(self, _key): <NEW_LINE> <INDENT> result = None <NEW_LINE> for obj in self.servers_query: <NEW_LINE> <INDENT> if obj.name == _key: <NEW_LINE> <INDENT> result = obj <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def get_columns(self): <NEW_LINE> <INDENT> result = Server().get_columns() <NEW_LINE> return result
Purpose: To manipulate the Servers data gathered in a DB query
625990692c8b7c6e89bd4fa2
class IdentityCertActionInvoker(certlib.BaseActionInvoker): <NEW_LINE> <INDENT> def _do_update(self): <NEW_LINE> <INDENT> action = IdentityUpdateAction() <NEW_LINE> return action.perform()
An object to update the identity certificate in the event the server deems it is about to expire. This is done to prevent the identity certificate from expiring thus disallowing connection to the server for updates.
625990695166f23b2e244b8d
class _MockCalledSubject(_MockAssertionConverter): <NEW_LINE> <INDENT> def __init__(self, actual): <NEW_LINE> <INDENT> super(_MockCalledSubject, self).__init__(actual) <NEW_LINE> self._Resolve() <NEW_LINE> <DEDENT> def Once(self): <NEW_LINE> <INDENT> with self._WrapMockAssertions(): <NEW_LINE> <INDENT> self._actual.assert_called_once() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def Times(self, expected): <NEW_LINE> <INDENT> if self._actual.call_count != expected: <NEW_LINE> <INDENT> name = self._actual._mock_name or 'mock' <NEW_LINE> self._Fail( "Expected '{0}' to have been called {1} times. Called {2} times.\n" "All calls: {3}" .format(name, expected, self._actual.call_count, self._actual.mock_calls)) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def With(self, *args, **kwargs): <NEW_LINE> <INDENT> with self._WrapMockAssertions(): <NEW_LINE> <INDENT> self._actual.assert_any_call(*args, **kwargs) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def LastWith(self, *args, **kwargs): <NEW_LINE> <INDENT> with self._WrapMockAssertions(): <NEW_LINE> <INDENT> self._actual.assert_called_with(*args, **kwargs)
Subject for a mock already asserted [not] to have been called.
625990695fdd1c0f98e5f741
class elmundo(rss): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.doctype = "elmundo (www)" <NEW_LINE> self.rss_url = "http://estaticos.elmundo.es/elmundo/rss/portada.xml" <NEW_LINE> self.version = ".1" <NEW_LINE> self.date = datetime.datetime(year=2017, month=5, day=10) <NEW_LINE> <DEDENT> def parsehtml(self, htmlsource): <NEW_LINE> <INDENT> tree = fromstring(htmlsource) <NEW_LINE> try: <NEW_LINE> <INDENT> title = "".join(tree.xpath('//*[@class="js-headline"]/text()')).strip() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> title = "" <NEW_LINE> logger.warning("Could not parse article title") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> teaser = "\n".join( tree.xpath('//*[@class="subtitle-items"]//text()') ).strip() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> teaser = "" <NEW_LINE> logger.debug("Could not parse article teaser") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> author = ( "".join(tree.xpath('//*[@class="author-name"]//text()')) .strip() .replace("| ", "\n") ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> author = "" <NEW_LINE> logger.debug("Could not parse article source") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> category = "".join(tree.xpath('//*[@class="first-level"]//text()')).strip() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> category = "" <NEW_LINE> logger.debug("Could not parse article category") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> text = "".join(tree.xpath('//*[@itemprop="articleBody"]//p/text()')).strip() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logger.warning("Could not parse article text") <NEW_LINE> text = "" <NEW_LINE> <DEDENT> extractedinfo = { "title": title.strip(), "teaser": teaser.strip(), "author": author.strip(), "category": category.strip(), "text": polish(text).strip(), } <NEW_LINE> return extractedinfo
Scrapes elmundo
62599069f548e778e596cd47
class AccountsController(rest.RestController): <NEW_LINE> <INDENT> charges = ChargeController() <NEW_LINE> transfer = TransferMoneyController() <NEW_LINE> detail = DetailController() <NEW_LINE> @pecan.expose() <NEW_LINE> def _lookup(self, user_id, *remainder): <NEW_LINE> <INDENT> if remainder and not remainder[-1]: <NEW_LINE> <INDENT> remainder = remainder[:-1] <NEW_LINE> <DEDENT> if len(user_id) == 32: <NEW_LINE> <INDENT> return AccountController(user_id), remainder <NEW_LINE> <DEDENT> <DEDENT> @wsexpose(models.AdminAccounts, bool, int, int, wtypes.text) <NEW_LINE> def get_all(self, owed=None, limit=None, offset=None, duration=None): <NEW_LINE> <INDENT> check_policy(request.context, "account:all") <NEW_LINE> if limit and limit < 0: <NEW_LINE> <INDENT> raise exception.InvalidParameterValue(err="Invalid limit") <NEW_LINE> <DEDENT> if offset and offset < 0: <NEW_LINE> <INDENT> raise exception.InvalidParameterValue(err="Invalid offset") <NEW_LINE> <DEDENT> self.conn = pecan.request.db_conn <NEW_LINE> duration = gringutils.normalize_timedelta(duration) <NEW_LINE> if duration: <NEW_LINE> <INDENT> active_from = datetime.datetime.utcnow() - duration <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> active_from = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> accounts = self.conn.get_accounts(request.context, owed=owed, limit=limit, offset=offset, active_from=active_from) <NEW_LINE> count = self.conn.get_accounts_count(request.context, owed=owed, active_from=active_from) <NEW_LINE> pecan.response.headers['X-Total-Count'] = str(count) <NEW_LINE> <DEDENT> except exception.NotAuthorized as e: <NEW_LINE> <INDENT> LOG.exception('Failed to get all accounts') <NEW_LINE> raise exception.NotAuthorized() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.exception('Failed to get all accounts') <NEW_LINE> raise exception.DBError(reason=e) <NEW_LINE> <DEDENT> accounts = [models.AdminAccount.from_db_model(account) for account in accounts] <NEW_LINE> return models.AdminAccounts(total_count=count, accounts=accounts) <NEW_LINE> <DEDENT> @wsexpose(None, body=models.AdminAccount) <NEW_LINE> def post(self, data): <NEW_LINE> <INDENT> check_policy(request.context, "account:post") <NEW_LINE> conn = pecan.request.db_conn <NEW_LINE> try: <NEW_LINE> <INDENT> account = db_models.Account(**data.as_dict()) <NEW_LINE> return conn.create_account(request.context, account) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> LOG.exception('Fail to create account: %s' % data.as_dict()) <NEW_LINE> raise exception.AccountCreateFailed(user_id=data.user_id, domain_id=data.domain_id)
Manages operations on the accounts collection.
625990693d592f4c4edbc69a
class DefaultException(Exception): <NEW_LINE> <INDENT> def __init__(self, msg=None): <NEW_LINE> <INDENT> if msg is None: <NEW_LINE> <INDENT> msg = self.default_msg <NEW_LINE> <DEDENT> super(DefaultException, self).__init__(msg)
Exceptions with a default error message.
62599069cc40096d6161adbe
class Map(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length = 100, unique = True) <NEW_LINE> root = models.ForeignKey(System, related_name="root") <NEW_LINE> explicitperms = models.BooleanField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> permissions = (("map_unrestricted", "Do not require excplicit access to maps."), ("map_admin", "Access map configuration."),) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __contains__(self, system): <NEW_LINE> <INDENT> if system is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.systems.filter(system=system).exists() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for msys in self.systems.all(): <NEW_LINE> <INDENT> yield msys.system <NEW_LINE> <DEDENT> <DEDENT> def add_log(self, user, action, visible=False): <NEW_LINE> <INDENT> log = MapLog(user=user, map=self, action=action, timestamp=datetime.now(pytz.utc), visible=visible) <NEW_LINE> log.save() <NEW_LINE> <DEDENT> def get_permission(self, user): <NEW_LINE> <INDENT> if user.is_anonymous(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if user.has_perm('Map.map_admin'): <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> if user.has_perm('Map.map_unrestricted') and not self.explicitperms: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> highestperm = 0 <NEW_LINE> groups = user.groups.all() <NEW_LINE> for perm in self.grouppermissions.filter(group__in=groups): <NEW_LINE> <INDENT> highestperm = max(highestperm, perm) <NEW_LINE> <DEDENT> return highestperm <NEW_LINE> <DEDENT> def add_system(self, user, system, friendlyname, parent=None): <NEW_LINE> <INDENT> mapsystem = MapSystem(map=self, system=system, friendlyname=friendlyname, parentsystem = parent) <NEW_LINE> mapsystem.save() <NEW_LINE> self.add_log(user, "Added system: %s" % system.name, True) <NEW_LINE> return mapsystem <NEW_LINE> <DEDENT> def as_json(self, user): <NEW_LINE> <INDENT> return utils.MapJSONGenerator(self, user).get_systems_json() <NEW_LINE> <DEDENT> def snapshot(self, user, name, description): <NEW_LINE> <INDENT> result = Snapshot(user=user, name=name, description=description, json=self.as_json(user)) <NEW_LINE> result.save() <NEW_LINE> self.add_log(user, "Created Snapshot: %s" % (name)) <NEW_LINE> return result
Stores the maps available in the map tool. root relates to System model.
62599069a219f33f346c7fc4
class ModelView(View): <NEW_LINE> <INDENT> model = None <NEW_LINE> name = None <NEW_LINE> edit = None <NEW_LINE> @admin_role_required <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> obj = self.model.objects.all() <NEW_LINE> return render(request, 'actions/model_view.html', {'objects': obj, 'name': self.name, 'edit': self.edit})
Base class to View model
625990693cc13d1c6d466f01
class MetricsStore: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._store = dict() <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self._store <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._store) <NEW_LINE> <DEDENT> def add(self, workload: str, device: str, scheduler: str, metrics: dict): <NEW_LINE> <INDENT> key = (workload, device, scheduler) <NEW_LINE> if key not in self._store: <NEW_LINE> <INDENT> self._store[key] = {'workload': workload, 'device': device, 'scheduler': scheduler, 'key': key, 'metrics': metrics} <NEW_LINE> <DEDENT> <DEDENT> def get(self, workload: str, device: str, scheduler: str): <NEW_LINE> <INDENT> key = (workload, device, scheduler) <NEW_LINE> if key not in self._store: <NEW_LINE> <INDENT> raise KeyError("Unable to find key: (%s, %s, %s)" % (workload, device, scheduler)) <NEW_LINE> <DEDENT> return self._store[key] <NEW_LINE> <DEDENT> def get_all(self, **kwargs): <NEW_LINE> <INDENT> workload = None <NEW_LINE> if 'workload' in kwargs: <NEW_LINE> <INDENT> workload = kwargs['workload'] <NEW_LINE> <DEDENT> device = None <NEW_LINE> if 'device' in kwargs: <NEW_LINE> <INDENT> device = kwargs['device'] <NEW_LINE> <DEDENT> scheduler = None <NEW_LINE> if 'scheduler' in kwargs: <NEW_LINE> <INDENT> scheduler = kwargs['scheduler'] <NEW_LINE> <DEDENT> items = [] <NEW_LINE> for key, value in self._store.items(): <NEW_LINE> <INDENT> if workload and key[0] != workload: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if device and key[1] != device: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if scheduler and key[2] != scheduler: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> items.append(value) <NEW_LINE> <DEDENT> return items
A datastore for saving / retrieving metrics.
625990691f5feb6acb1643aa
class SCProjectDeactivate(ProjectServerCommand): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ServerCommand.__init__(self, "project-deactivate") <NEW_LINE> <DEDENT> def run(self, serverState, request, response): <NEW_LINE> <INDENT> prj=self.getProject(request, serverState) <NEW_LINE> if request.hasParam('item'): <NEW_LINE> <INDENT> item=request.getParam('item') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item="" <NEW_LINE> <DEDENT> prj.deactivate(item) <NEW_LINE> if item == "": <NEW_LINE> <INDENT> response.add("De-activated all items in project %s"%prj.getName()) <NEW_LINE> log.info("De-activated all items in project %s"%prj.getName()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response.add("De-activated: %s in project %s"%(item, prj.getName())) <NEW_LINE> log.info("De-activated: %s in project %s"%(item, prj.getName()))
De-activate all elements in a project.
6259906932920d7e50bc7803
class SystemAPI(Resource): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.parser = reqparse.RequestParser() <NEW_LINE> if not WizardMiddleware.isShowWizard(): <NEW_LINE> <INDENT> abort(503, message="The wizard and its API is not available.") <NEW_LINE> <DEDENT> <DEDENT> def get(self, system_id): <NEW_LINE> <INDENT> system = System.query.get(system_id) <NEW_LINE> if system: <NEW_LINE> <INDENT> return SYS.dump(system) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> abort(404, message="System with ID {} not found.".format(system_id)) <NEW_LINE> <DEDENT> <DEDENT> def put(self, system_id): <NEW_LINE> <INDENT> self.parser.add_argument('name', type=str) <NEW_LINE> self.parser.add_argument('ip_address', type=str) <NEW_LINE> self.parser.add_argument('location', type=str) <NEW_LINE> self.parser.add_argument('scan_enabled', type=bool) <NEW_LINE> self.parser.add_argument('ids_enabled', type=bool) <NEW_LINE> self.parser.add_argument('types', type=int, action='append') <NEW_LINE> self.parser.add_argument('network_id', type=int) <NEW_LINE> try: <NEW_LINE> <INDENT> self.args = self.parser.parse_args() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> abort(400, message="Bad Request. Failed parsing arguments.") <NEW_LINE> <DEDENT> system = System.query.get(system_id) <NEW_LINE> if not system: <NEW_LINE> <INDENT> abort(404, message="System with ID {} not found. Nothing changed.".format(system_id)) <NEW_LINE> <DEDENT> system.name = self.args['name'] <NEW_LINE> system.ip_address = self.args['ip_address'] <NEW_LINE> system.scan_enabled = self.args['scan_enabled'] <NEW_LINE> system.ids_enabled = self.args['ids_enabled'] <NEW_LINE> system.network_id = self.args['network_id'] <NEW_LINE> if self.args['types']: <NEW_LINE> <INDENT> system.types = [SystemType.query.get(tid) for tid in self.args['types']] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> system.types = [] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> db.session.add(system) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> abort(500, message="Error while saving system to database.") <NEW_LINE> <DEDENT> <DEDENT> def delete(self, system_id): <NEW_LINE> <INDENT> system = System.query.get(system_id) <NEW_LINE> if system: <NEW_LINE> <INDENT> db.session.delete(system) <NEW_LINE> db.session.commit() <NEW_LINE> return '', 204 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> abort(404, message="System with ID {} not found. Nothing deleted.".format(system_id))
API resource for representing a single system.
625990694f6381625f19a084
class SubModule(Scope, Module): <NEW_LINE> <INDENT> def __init__(self, path, start_pos=(1, 0), top_module=None): <NEW_LINE> <INDENT> super(SubModule, self).__init__(self, start_pos) <NEW_LINE> self.path = path <NEW_LINE> self.global_vars = [] <NEW_LINE> self._name = None <NEW_LINE> self.used_names = {} <NEW_LINE> self.temp_used_names = [] <NEW_LINE> self.line_offset = 0 <NEW_LINE> self.use_as_parent = top_module or self <NEW_LINE> <DEDENT> def add_global(self, name): <NEW_LINE> <INDENT> self.global_vars.append(name) <NEW_LINE> <DEDENT> def get_set_vars(self): <NEW_LINE> <INDENT> n = super(SubModule, self).get_set_vars() <NEW_LINE> n += self.global_vars <NEW_LINE> return n <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> if self._name is not None: <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> if self.path is None: <NEW_LINE> <INDENT> string = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sep = (re.escape(os.path.sep),) * 2 <NEW_LINE> r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep, self.path) <NEW_LINE> string = re.sub('\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1)) <NEW_LINE> <DEDENT> names = [(string, (0, 0))] <NEW_LINE> self._name = Name(self, names, self.start_pos, self.end_pos, self.use_as_parent) <NEW_LINE> return self._name <NEW_LINE> <DEDENT> def is_builtin(self): <NEW_LINE> <INDENT> return not (self.path is None or self.path.endswith('.py'))
The top scope, which is always a module. Depending on the underlying parser this may be a full module or just a part of a module.
625990694428ac0f6e659cee
class RFnn(RPackage): <NEW_LINE> <INDENT> homepage = "https://cloud.r-project.org/package=FNN" <NEW_LINE> url = "https://cloud.r-project.org/src/contrib/FNN_1.1.tar.gz" <NEW_LINE> list_url = "https://cloud.r-project.org/src/contrib/Archive/FNN" <NEW_LINE> version('1.1.3', sha256='de763a25c9cfbd19d144586b9ed158135ec49cf7b812938954be54eb2dc59432') <NEW_LINE> version('1.1.2.2', sha256='b51a60fbbeff58c48cc90c2023c48972d5082d68efd02284c17ccd9820986326') <NEW_LINE> version('1.1', sha256='b2a2e97af14aa50ef4dce15a170e1d7329aebb7643bab4a6cf35609555acccce') <NEW_LINE> version('1.0', sha256='5606cc656c5488b56ee9227088bec662539589fd626ea5aae0e4d57d70a6fe03') <NEW_LINE> version('0.6-4', sha256='2d0eb7b2aab9ff2e4deaf0b5e39b817f3f3701c0dcefa8a380bdc7111e68d853') <NEW_LINE> version('0.6-3', sha256='9ac1817852427a056b5c6ad6ac5212bc43abd29ce15f98441a6261b25cf5f810') <NEW_LINE> version('0.6-2', sha256='f1fc410c341175bdb11a75b063c8c987e15b632378b56148d3566b91fca53a31') <NEW_LINE> depends_on('[email protected]:', type=('build', 'run'))
Cover-tree and kd-tree fast k-nearest neighbor search algorithms and related applications including KNN classification, regression and information measures are implemented.
625990694428ac0f6e659cef
class MedicatedAllergicAdopter(AllergicAdopter): <NEW_LINE> <INDENT> def __init__(self, name, desired_species, allergic_species, medicine_effectiveness): <NEW_LINE> <INDENT> AllergicAdopter.__init__(self, name, desired_species, allergic_species) <NEW_LINE> self.medicine_effectiveness = medicine_effectiveness <NEW_LINE> <DEDENT> def get_score(self, adoption_center): <NEW_LINE> <INDENT> s1 = Adopter.get_score(self, adoption_center) <NEW_LINE> other_animals = adoption_center.get_species_count() <NEW_LINE> mul = [] <NEW_LINE> for al in self.allergic_species: <NEW_LINE> <INDENT> if al in other_animals: <NEW_LINE> <INDENT> mul.append(self.medicine_effectiveness[al]) <NEW_LINE> <DEDENT> <DEDENT> m = 0 <NEW_LINE> if mul == []: <NEW_LINE> <INDENT> m = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = min(mul) <NEW_LINE> <DEDENT> return m * s1
A MedicatedAllergicAdopter is extremely allergic to a particular species However! They have a medicine of varying effectiveness, which will be given in a dictionary To calculate the score for a specific adoption center, we want to find what is the most allergy-inducing species that the adoption center has for the particular MedicatedAllergicAdopter. To do this, first examine what species the AdoptionCenter has that the MedicatedAllergicAdopter is allergic to, then compare them to the medicine_effectiveness dictionary. Take the lowest medicine_effectiveness found for these species, and multiply that value by the Adopter's calculate score method.
625990693eb6a72ae038be1d
class ListPitacscomp(generics.ListAPIView): <NEW_LINE> <INDENT> queryset = models.Pitacscomp.objects.all() <NEW_LINE> serializer_class = serializers.PitacscompSerializer
Returns percentage differences between the homeless and general Multnomah population for age and gender
62599069adb09d7d5dc0bd27
class EquationSet(set): <NEW_LINE> <INDENT> equation_class = Equation <NEW_LINE> @classmethod <NEW_LINE> def from_dict_def(cls, es_def): <NEW_LINE> <INDENT> expected_arg_names = list(inspect.signature(es_def).parameters) <NEW_LINE> args = {name: Variable(name) for name in expected_arg_names} <NEW_LINE> eq_dict = es_def(**args) <NEW_LINE> return cls.from_equations(**eq_dict) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_set_def(cls, es_def, scope=None): <NEW_LINE> <INDENT> expected_arg_names = list(inspect.signature(es_def).parameters) <NEW_LINE> if scope is None: <NEW_LINE> <INDENT> args = {name: Variable(name) for name in expected_arg_names} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args = {name: getattr(scope, name) for name in expected_arg_names} <NEW_LINE> <DEDENT> eq_set = es_def(**args) <NEW_LINE> return cls.from_equations(*eq_set) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_equations(cls, *equations, **monov_eqs): <NEW_LINE> <INDENT> return cls(equations + tuple( cls.equation_class(Variable(subj), obj) for subj, obj in monov_eqs.items())) <NEW_LINE> <DEDENT> @property <NEW_LINE> def variables(self): <NEW_LINE> <INDENT> return set().union(*(equation.variables for equation in self))
A set of Equations
6259906976e4537e8c3f0d40
class CentralMassComposite: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.satellites = [] <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> count = 1 <NEW_LINE> for satellite in self.satellites: <NEW_LINE> <INDENT> count += satellite.count() <NEW_LINE> <DEDENT> return count <NEW_LINE> <DEDENT> def count_orbits(self, level): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> for satellite in self.satellites: <NEW_LINE> <INDENT> count += satellite.count_orbits(level + 1) <NEW_LINE> <DEDENT> return level + count <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
This class is a composite component for central mass entities - such as what planets orbit around.
625990694e4d562566373bc4
class Dummy2(BaseDummyConverter): <NEW_LINE> <INDENT> TABLE = dict(zip( u"ABCDEGHIJKLOPRTUYZabcdefghijklmnopqrstuvwxyz", u"ȺɃȻĐɆǤĦƗɈꝀŁØⱣɌŦɄɎƵɐqɔpǝɟƃɥᴉɾʞlɯuødbɹsʇnʌʍxʎz" ))
A second dummy converter. Like Dummy, but uses a different obvious but readable automatic conversion: Strikes-through many letters, and turns lower-case letters upside-down.
6259906923849d37ff852872
class Explore_SQL: <NEW_LINE> <INDENT> def __init__(self,db_name): <NEW_LINE> <INDENT> self.database = db_name <NEW_LINE> self.conn = sqlite3.connect(db_name) <NEW_LINE> self.c = sqlite3.connect(db_name).cursor() <NEW_LINE> self.tables = "Not yet defined. Run the method 'print_tables' or 'tables2list' to set this attribute" <NEW_LINE> <DEDENT> def print_tables(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.c.execute("SELECT * FROM sqlite_master WHERE type='table';") <NEW_LINE> table_list = self.c.fetchall() <NEW_LINE> tables = [table_list[table][1] for table in range(len(table_list))] <NEW_LINE> self.tables = "%s" % ", ".join(tables) <NEW_LINE> print(self.tables) <NEW_LINE> <DEDENT> except Error as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def tables2list(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.c.execute("SELECT * FROM sqlite_master WHERE type='table';") <NEW_LINE> table_list = self.c.fetchall() <NEW_LINE> tables = [table_list[table][1] for table in range(len(table_list))] <NEW_LINE> self.tables = "%s" % ", ".join(tables) <NEW_LINE> return(tables) <NEW_LINE> <DEDENT> except Error as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def table2dataframe(self,table,row_start = None,row_lim=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if row_start and row_lim: <NEW_LINE> <INDENT> first_row = str(row_start) <NEW_LINE> limit = str(row_lim) <NEW_LINE> self.c.execute("SELECT * FROM "+table+" LIMIT " + first_row +", " + limit) <NEW_LINE> <DEDENT> elif row_start: <NEW_LINE> <INDENT> limit = str(row_start) <NEW_LINE> self.c.execute("SELECT * FROM "+table+" LIMIT " + limit) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.c.execute("SELECT * FROM "+table) <NEW_LINE> <DEDENT> data = self.c.fetchall() <NEW_LINE> return(pd.DataFrame(data)) <NEW_LINE> <DEDENT> except Error as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> return None
Explores what tables are available in a database Turns table of interest into a pandas dataframe
625990695166f23b2e244b8f
class PremiumIdMiddleware(middleware.PremiumIdMiddleware): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> request.premium_id = None <NEW_LINE> <DEDENT> def process_view(self, request, view_func, view_args, view_kwargs): <NEW_LINE> <INDENT> request.premium_id = request.GET.get('premium', None) or view_kwargs.get('premium', None)
Used instead of the Premium Skinner equivalent for compatibility with Wagtail
62599069009cb60464d02cf7
class BreakDataConfig(nlp.BuilderConfig): <NEW_LINE> <INDENT> def __init__(self, text_features, lexicon_tokens, **kwargs): <NEW_LINE> <INDENT> super(BreakDataConfig, self).__init__( version=nlp.Version("1.0.0", "New split API (https://tensorflow.org/datasets/splits)"), **kwargs ) <NEW_LINE> self.text_features = text_features <NEW_LINE> self.lexicon_tokens = lexicon_tokens
BuilderConfig for Break
625990693539df3088ecda5d
class TestOFPPortStatus(unittest.TestCase): <NEW_LINE> <INDENT> class Datapath(object): <NEW_LINE> <INDENT> ofproto = ofproto_v1_0 <NEW_LINE> ofproto_parser = ofproto_v1_0_parser <NEW_LINE> <DEDENT> c = OFPPortStatus(Datapath) <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_parser(self): <NEW_LINE> <INDENT> version = {'buf': '\x01', 'val': ofproto_v1_0.OFP_VERSION} <NEW_LINE> msg_type = {'buf': '\x0c', 'val': ofproto_v1_0.OFPT_PORT_STATUS} <NEW_LINE> msg_len = {'buf': '\x00\x40', 'val': ofproto_v1_0.OFP_PORT_STATUS_SIZE} <NEW_LINE> xid = {'buf': '\x06\x27\x8b\x7b', 'val': 103254907} <NEW_LINE> reason = {'buf': '\x71', 'val': 113} <NEW_LINE> zfill = '\x00' * 7 <NEW_LINE> port_no = {'buf': '\x48\xd8', 'val': 18648} <NEW_LINE> hw_addr = '41:f7:a3:52:8f:6b' <NEW_LINE> name = 'name'.ljust(16) <NEW_LINE> config = {'buf': '\xae\x73\x90\xec', 'val': 2926809324} <NEW_LINE> state = {'buf': '\x41\x37\x32\x1d', 'val': 1094136349} <NEW_LINE> curr = {'buf': '\xa9\x47\x13\x2c', 'val': 2840007468} <NEW_LINE> advertised = {'buf': '\xce\x6b\x4a\x87', 'val': 3463137927} <NEW_LINE> supported = {'buf': '\xb8\x06\x65\xa1', 'val': 3087426977} <NEW_LINE> peer = {'buf': '\x6a\x11\x52\x39', 'val': 1779520057} <NEW_LINE> buf = version['buf'] + msg_type['buf'] + msg_len['buf'] + xid['buf'] + reason['buf'] + zfill + port_no['buf'] + addrconv.mac.text_to_bin(hw_addr) + name + config['buf'] + state['buf'] + curr['buf'] + advertised['buf'] + supported['buf'] + peer['buf'] <NEW_LINE> res = OFPPortStatus.parser(object, version['val'], msg_type['val'], msg_len['val'], xid['val'], buf) <NEW_LINE> eq_(version['val'], res.version) <NEW_LINE> eq_(msg_type['val'], res.msg_type) <NEW_LINE> eq_(msg_len['val'], res.msg_len) <NEW_LINE> eq_(xid['val'], res.xid) <NEW_LINE> eq_(reason['val'], res.reason) <NEW_LINE> desc = res.desc <NEW_LINE> eq_(port_no['val'], desc.port_no) <NEW_LINE> eq_(hw_addr, desc.hw_addr) <NEW_LINE> eq_(name, desc.name) <NEW_LINE> eq_(config['val'], desc.config) <NEW_LINE> eq_(state['val'], desc.state) <NEW_LINE> eq_(curr['val'], desc.curr) <NEW_LINE> eq_(advertised['val'], desc.advertised) <NEW_LINE> eq_(supported['val'], desc.supported) <NEW_LINE> eq_(peer['val'], desc.peer) <NEW_LINE> <DEDENT> def test_serialize(self): <NEW_LINE> <INDENT> pass
Test case for ofproto_v1_0_parser.OFPPortStatus
62599069a219f33f346c7fc6
class LinearActivation(ActivationFunction): <NEW_LINE> <INDENT> def activate(self, x): <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> def derivative(self, output): <NEW_LINE> <INDENT> return 1.0
Linear Activation function (identity).
625990698e71fb1e983bd285
class Environment(dict): <NEW_LINE> <INDENT> def __init__(self, **kw): <NEW_LINE> <INDENT> self.update(dict( HOME = kw['BUILD_DIR'], DOWNLOAD_DIR = kw['DOWNLOAD_DIR'], BUILD_DIR = kw['BUILD_DIR'], INSTALL_DIR = kw['INSTALL_DIR'], ISISROOT = P.join(kw['INSTALL_DIR'], 'isis'), GIT_SSL_NO_VERIFY = 'true' )) <NEW_LINE> self.update(kw) <NEW_LINE> self['ISIS3RDPARTY'] = P.join(self['ISISROOT'], '3rdParty', 'lib') <NEW_LINE> self.create_dirs() <NEW_LINE> <DEDENT> def create_dirs(self): <NEW_LINE> <INDENT> for d in ('DOWNLOAD_DIR', 'BUILD_DIR', 'INSTALL_DIR'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(self[d]) <NEW_LINE> <DEDENT> except OSError as o: <NEW_LINE> <INDENT> if o.errno != errno.EEXIST: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def copy_set_default(self, **kw): <NEW_LINE> <INDENT> e = Environment(**self) <NEW_LINE> for k,v in kw.items(): <NEW_LINE> <INDENT> if k not in e: <NEW_LINE> <INDENT> e[k] = v <NEW_LINE> <DEDENT> <DEDENT> return e <NEW_LINE> <DEDENT> def append(self, key, value): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> self[key] += ' ' + value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> def append_many(self, key_seq, value): <NEW_LINE> <INDENT> for k in key_seq: <NEW_LINE> <INDENT> self.append(k, value)
Dictionary object containing the required environment info
62599069aad79263cf42ff74
class QueryExtendedFadeTime(_StandardCommand): <NEW_LINE> <INDENT> _cmdval = 0xa8 <NEW_LINE> response = command.Response
Query Extended Fade Time Bits 6:4 of the answer are extendedFadeTimeMultiplier, and bits 3:0 are extendedFadeTimeBase.
625990694527f215b58eb57f
class TestProjectTextUnitSimilarity(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testProjectTextUnitSimilarity(self): <NEW_LINE> <INDENT> pass
ProjectTextUnitSimilarity unit test stubs
62599069796e427e5384ff35
class TestController(BaseTestCase, BaseTestController): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(TestController, self).__init__(*args, **kwargs) <NEW_LINE> self.init()
Deprecated unittest-style test controller
62599069f548e778e596cd4a
class Parameter (models.Model): <NEW_LINE> <INDENT> name = models.CharField ( verbose_name = 'Title', max_length = 30, primary_key=True ) <NEW_LINE> text = models.TextField ( verbose_name = 'Text String to be used', ) <NEW_LINE> f = models.FileField ( upload_to = 'paramaters/', blank=True, null=True, verbose_name = "File", help_text = 'Add a file if required for parameter' ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return u'%s' % (self.name)
List of parameters changable in Admin used within the project These parameters are added through manage.py initialise and cannot be created or deleted in admin, only updated
625990693317a56b869bf122
class HEALPixTransform(object): <NEW_LINE> <INDENT> def __init__(self, nside): <NEW_LINE> <INDENT> self.nside = nside <NEW_LINE> <DEDENT> def __call__(self, img): <NEW_LINE> <INDENT> if isinstance(img, np.ndarray): <NEW_LINE> <INDENT> return self.img2healpix(img, self.nside) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.img2healpix(np.array(img), self.nside) <NEW_LINE> <DEDENT> <DEDENT> def cart_healpix(self, cartview, nside): <NEW_LINE> <INDENT> healpix = np.zeros(hp.nside2npix(nside), dtype=np.double) <NEW_LINE> hptheta = np.linspace(0, np.pi, num=cartview.shape[0])[:, None] <NEW_LINE> hpphi = np.linspace(-np.pi, np.pi, num=cartview.shape[1]) <NEW_LINE> pix = hp.ang2pix(nside, hptheta, hpphi) <NEW_LINE> healpix[pix] = np.fliplr(cartview) <NEW_LINE> return healpix <NEW_LINE> <DEDENT> def ring2nest(self, healpix): <NEW_LINE> <INDENT> nest = np.zeros(healpix.shape) <NEW_LINE> ipix = hp.ring2nest(nside=hp.npix2nside(nest.shape[-1]), ipix=np.arange(nest.shape[-1])) <NEW_LINE> nest[ipix] = healpix <NEW_LINE> return nest <NEW_LINE> <DEDENT> def img2healpix(self, digit, nside): <NEW_LINE> <INDENT> h, w = digit.shape <NEW_LINE> img = np.zeros((h, 2 * h)) <NEW_LINE> img[:, h - w // 2 : h - w // 2 + w] = digit <NEW_LINE> return self.ring2nest(self.cart_healpix(img, nside))
convert a square PIL img to a HEALPix array in numpy
625990699c8ee82313040d67
@pytest.mark.components <NEW_LINE> @pytest.allure.story('ProximityZone') <NEW_LINE> @pytest.allure.feature('POST') <NEW_LINE> class Test_PFE_Components(object): <NEW_LINE> <INDENT> @pytest.allure.link('https://jira.qumu.com/browse/TC-43816') <NEW_LINE> @pytest.mark.ProximityZone <NEW_LINE> @pytest.mark.POST <NEW_LINE> def test_TC_43816_Correct_Message_Displayed_On_Providing_Invalid_Value_Starting_With_Special_Chacarter_For_Id_Parameter(self, context): <NEW_LINE> <INDENT> with pytest.allure.step('Correct Message Displayed On Providing Invalid Value Starting With Special Chacarter For Id Parameter.'): <NEW_LINE> <INDENT> proximityDetails = context.sc.ProximityDetails( cidr='0.0.0.0/0', metric=2, notes='') <NEW_LINE> proximityZone = context.sc.ProximityZoneDetails( visibleInAllConfigurations=True, configAdminCanEdit=True, configurations=[], id='@##1', name='Proxy1', proximityDetails=[proximityDetails]) <NEW_LINE> try: <NEW_LINE> <INDENT> client, response = check( context.cl.Proximity_Zones.createEntity(body=proximityZone), quiet=True, returnResponse=True) <NEW_LINE> <DEDENT> except HTTPBadRequest as e: <NEW_LINE> <INDENT> get_error_message(e) | should.start_with('Invalid identifier') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception( "Expected error message, got {} status code instead.".format( response.status_code))
PFE ProximityZone test cases.
625990694f88993c371f10fe
class Order(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> phone_Number = models.CharField(max_length=20, blank=False) <NEW_LINE> address_Line_One = models.CharField(max_length=40, blank=False) <NEW_LINE> address_Line_Two = models.CharField(max_length=40, blank=False) <NEW_LINE> address_Line_Three = models.CharField(max_length=40, blank=False) <NEW_LINE> town_or_City = models.CharField(max_length=40, blank=False) <NEW_LINE> county = models.CharField(max_length=40, blank=False) <NEW_LINE> postcode = models.CharField(max_length=40, blank=True) <NEW_LINE> country = models.CharField(max_length=40, blank=False) <NEW_LINE> date = models.DateField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{0}-{1}-{2}".format(self.id, self.date, self.user)
A model to handle a customers delivery address when they checkout of the store from the cart.
625990697d847024c075db98
class SalesforceAuth(BaseOAuth2): <NEW_LINE> <INDENT> AUTHORIZATION_URL = SALESFORCE_AUTHORIZATION_URL <NEW_LINE> ACCESS_TOKEN_URL = SALESFORCE_ACCESS_TOKEN_URL <NEW_LINE> AUTH_BACKEND = SalesforceBackend <NEW_LINE> SETTINGS_KEY_NAME = 'SALESFORCE_CLIENT_ID' <NEW_LINE> SETTINGS_SECRET_NAME = 'SALESFORCE_CLIENT_SECRET' <NEW_LINE> def user_data(self, access_token, *args, **kwargs): <NEW_LINE> <INDENT> response = kwargs.get('response') or {} <NEW_LINE> import urllib2 <NEW_LINE> headers = {'Authorization': 'Bearer ' + access_token} <NEW_LINE> req = urllib2.Request(response.get('id'), headers=headers) <NEW_LINE> try: <NEW_LINE> <INDENT> return simplejson.load(urllib2.urlopen(req)) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return None
Salesforce OAuth mechanism
62599069fff4ab517ebcefda
class FlowInterfaceTotals(BaseFlowAggregator): <NEW_LINE> <INDENT> target_filename = 'interface_%06d.sqlite' <NEW_LINE> agg_fields = ['if', 'direction'] <NEW_LINE> @classmethod <NEW_LINE> def resolutions(cls): <NEW_LINE> <INDENT> return [30, 300, 3600, 86400] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def history_per_resolution(cls): <NEW_LINE> <INDENT> return { 30: cls.seconds_per_day(1), 300: cls.seconds_per_day(7), 3600: cls.seconds_per_day(31), 86400: cls.seconds_per_day(365) } <NEW_LINE> <DEDENT> def __init__(self, resolution, database_dir='/var/netflow'): <NEW_LINE> <INDENT> super(FlowInterfaceTotals, self).__init__(resolution, database_dir) <NEW_LINE> <DEDENT> def add(self, flow): <NEW_LINE> <INDENT> flow['if'] = flow['if_in'] <NEW_LINE> flow['direction'] = 'in' <NEW_LINE> super(FlowInterfaceTotals, self).add(flow) <NEW_LINE> flow['if'] = flow['if_out'] <NEW_LINE> flow['direction'] = 'out' <NEW_LINE> super(FlowInterfaceTotals, self).add(flow)
collect interface totals
625990690c0af96317c5793e
class GeneralLinearSolverIterationsLineAnalyzer(GeneralLinearSolverLineAnalyzer): <NEW_LINE> <INDENT> def __init__(self, doTimelines=True, doFiles=True, singleFile=False, startTime=None, endTime=None): <NEW_LINE> <INDENT> GeneralLinearSolverLineAnalyzer.__init__(self, doTimelines=doTimelines, doFiles=doFiles, singleFile=singleFile, startTime=startTime, endTime=endTime) <NEW_LINE> <DEDENT> def addToFiles(self,match): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def addToTimelines(self,match): <NEW_LINE> <INDENT> name=self.fName(match.groups()[1]) <NEW_LINE> iter=match.groups()[4] <NEW_LINE> self.lines.setAccumulator(name,"sum") <NEW_LINE> self.lines.setValue(name,iter) <NEW_LINE> <DEDENT> def getCurrentData(self,structured=False): <NEW_LINE> <INDENT> return GeneralLineAnalyzer.getCurrentData(self,structured=structured)
Parses information about the linear solver and collects the iterations
62599069435de62698e9d5c9
@keras_export('keras.losses.SquaredHinge') <NEW_LINE> class SquaredHinge(LossFunctionWrapper): <NEW_LINE> <INDENT> def __init__(self, reduction=losses_impl.ReductionV2.SUM_OVER_BATCH_SIZE, name='squared_hinge'): <NEW_LINE> <INDENT> super(SquaredHinge, self).__init__( squared_hinge, name=name, reduction=reduction)
Computes the squared hinge loss between `y_true` and `y_pred`. Usage: ```python sh = tf.losses.SquaredHinge() loss = sh([0., 1., 1.], [1., 0., 1.]) print('Loss: ', loss.numpy()) # Loss: 0.66 ``` Usage with tf.keras API: ```python model = keras.models.Model(inputs, outputs) model.compile('sgd', loss=tf.losses.SquaredHinge()) ```
6259906976e4537e8c3f0d42
class InMemoryNonceService(NonceService): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.generated_nonces = set() <NEW_LINE> self.invalid_nonces = set() <NEW_LINE> <DEDENT> def generate_nonce(self) -> str: <NEW_LINE> <INDENT> val = super().generate_nonce() <NEW_LINE> self.generated_nonces.add(val) <NEW_LINE> return val <NEW_LINE> <DEDENT> def is_nonce_already_generated(self, nonce) -> bool: <NEW_LINE> <INDENT> return nonce in self.generated_nonces or nonce in self.invalid_nonces <NEW_LINE> <DEDENT> def is_nonce_valid(self, nonce: str) -> bool: <NEW_LINE> <INDENT> return nonce in self.generated_nonces and nonce not in self.invalid_nonces <NEW_LINE> <DEDENT> def invalidate_nonce(self, nonce: str): <NEW_LINE> <INDENT> self.invalid_nonces.add(nonce)
In memory implementation of nonce service
62599069d486a94d0ba2d77d
class SpinBalanceDialog(QDialog): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(SpinBalanceDialog, self).__init__(parent) <NEW_LINE> self.setAttribute(Qt.WA_DeleteOnClose) <NEW_LINE> self.setWindowTitle('Spin balance dialog') <NEW_LINE> self.fig1 = BlankCanvas() <NEW_LINE> self.ax1 = self.fig1.ax <NEW_LINE> self.fig2 = BlankCanvas() <NEW_LINE> self.ax2 = self.fig2.ax <NEW_LINE> self.toolbar1 = NavigationToolbar2QT(self.fig1, self) <NEW_LINE> self.toolbar2 = NavigationToolbar2QT(self.fig2, self) <NEW_LINE> self.balance_label = QLabel() <NEW_LINE> layout1 = QVBoxLayout() <NEW_LINE> layout1.addWidget(self.fig1) <NEW_LINE> layout1.addWidget(self.toolbar1) <NEW_LINE> layout2 = QVBoxLayout() <NEW_LINE> layout2.addWidget(self.fig2) <NEW_LINE> layout2.addWidget(self.toolbar2) <NEW_LINE> layout = QHBoxLayout() <NEW_LINE> layout.addLayout(layout1) <NEW_LINE> layout.addLayout(layout2) <NEW_LINE> layout.addWidget(self.balance_label) <NEW_LINE> self.setLayout(layout) <NEW_LINE> <DEDENT> def main(self, img, roi): <NEW_LINE> <INDENT> self.img = img <NEW_LINE> self.roi = roi <NEW_LINE> pwa = img[:,:,0] <NEW_LINE> pwoa = img[:,:,1] <NEW_LINE> df = img[:,:,2] <NEW_LINE> pwa=pwa[roi] <NEW_LINE> pwoa=pwoa[roi] <NEW_LINE> df=df[roi] <NEW_LINE> transimg1, odimg1 = imageprocess.calc_absimage(img,norm_edge=True) <NEW_LINE> pixcal = 10e-6 <NEW_LINE> transimg, odimg, com, n0, q, bprime = norm_and_guess(transimg1) <NEW_LINE> x, y, width_x, width_y = fitfuncs2D.gaussian_moments_2D(odimg) <NEW_LINE> guess = np.zeros(10.) <NEW_LINE> guess[0:4] = [com[0], com[1], width_x, width_y] <NEW_LINE> guess[4] = n0 <NEW_LINE> guess[5:] = [q, 0., 0., 0., 0.] <NEW_LINE> ans = fitfuncs2D.fit2dfuncraw(fitfuncs2D.idealfermi_2D_angled, pwa, pwoa, df, guess, tol=1e-10) <NEW_LINE> ToverTF, N = fitfuncs2D.ideal_fermi_numbers_2D_angled(ans, pixcal) <NEW_LINE> self.ax.text(0.1, 0.8, str(ans)) <NEW_LINE> self.ax.text(0.1, 0.4, 'T/TF = %1.3f'%(ToverTF[0])) <NEW_LINE> self.ax.text(0.1, 0.2, 'N = %1.2f million'%(N * 1e-6)) <NEW_LINE> residuals = fitfuncs2D.residuals_2D(odimg, ans, func=fitfuncs2D.idealfermi_2D_angled, smooth=4, showfig=False) <NEW_LINE> self.ax1.imshow(residuals, vmin=0, vmax=.1, cmap=mpl.cm.gray) <NEW_LINE> self.ax2.imshow(odimg, vmin=0, vmax=1.35, cmap=mpl.cm.gray)
Displays two absorption images and calculates the ratio N1/N2.
62599069627d3e7fe0e08648
@view_defaults(renderer='json') <NEW_LINE> class RestHostAddressViews(RestView): <NEW_LINE> <INDENT> @view_config( route_name='rest_host_address_1', request_method='POST', require_csrf=True, permission='view' ) <NEW_LINE> def host_address_1_view_create(self): <NEW_LINE> <INDENT> host_name = self.request.matchdict['host'] <NEW_LINE> address = self.request.matchdict['address'] <NEW_LINE> description = "" <NEW_LINE> try: <NEW_LINE> <INDENT> description = self.request.json_body.get('description', '') <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> description = "" <NEW_LINE> <DEDENT> host = self.dao.host_dao.get_host_by_name(name=host_name) <NEW_LINE> if host is None: <NEW_LINE> <INDENT> return 'host-not-found' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.dao.host_dao.add_address(host, address, description) <NEW_LINE> return {} <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return str(e) <NEW_LINE> <DEDENT> <DEDENT> @view_config( route_name='rest_host_addresses_1', request_method='GET', require_csrf=True, permission='view' ) <NEW_LINE> def host_addresses_1_view_get(self): <NEW_LINE> <INDENT> host_name = self.request.matchdict['host'] <NEW_LINE> addresses = [] <NEW_LINE> host = self.dao.host_dao.get_host_by_name(name=host_name) <NEW_LINE> if host is None: <NEW_LINE> <INDENT> return 'host-not-found' <NEW_LINE> <DEDENT> d_addr = self.dao.host_dao.get_addresses(host) <NEW_LINE> for addr in d_addr: <NEW_LINE> <INDENT> addresses.append({'name': addr.name, 'description': addr.description}) <NEW_LINE> <DEDENT> return { "addresses": addresses }
RestHostAddress View. self.request: set via parent constructor self.dao: set via parent constructor
62599069e5267d203ee6cf9d
class somsc: <NEW_LINE> <INDENT> def __init__(self, data, amount_clusters, epouch = 100, ccore = False): <NEW_LINE> <INDENT> self.__data_pointer = data; <NEW_LINE> self.__amount_clusters = amount_clusters; <NEW_LINE> self.__epouch = epouch; <NEW_LINE> self.__ccore = ccore; <NEW_LINE> self.__network = None; <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> self.__network = som(1, self.__amount_clusters, type_conn.grid_four, None, self.__ccore); <NEW_LINE> self.__network.train(self.__data_pointer, self.__epouch, True); <NEW_LINE> <DEDENT> def get_clusters(self): <NEW_LINE> <INDENT> return self.__network.capture_objects; <NEW_LINE> <DEDENT> def get_cluster_encoding(self): <NEW_LINE> <INDENT> return type_encoding.CLUSTER_INDEX_LIST_SEPARATION;
! @brief Class represents simple clustering algorithm based on self-organized feature map. @details This algorithm uses amount of clusters that should be allocated as a size of SOM map. Captured objects by neurons are clusters. Algorithm is able to process data with Gaussian distribution that has spherical forms. Example: @code # load list of points for cluster analysis sample = read_sample(path); # create instance of SOM-SC algorithm to allocated two clusters somsc_instance = somsc(sample, 2); # run cluster analysis and obtain results somsc_instance.process(); somsc_instance.get_clusters(); @endcode
625990694e4d562566373bc6
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class Reducer(object): <NEW_LINE> <INDENT> def zip_and_reduce(self, x, y): <NEW_LINE> <INDENT> if tf.contrib.framework.nest.is_sequence(x): <NEW_LINE> <INDENT> tf.contrib.framework.nest.assert_same_structure(x, y) <NEW_LINE> x_flat = tf.contrib.framework.nest.flatten(x) <NEW_LINE> y_flat = tf.contrib.framework.nest.flatten(y) <NEW_LINE> flat = [] <NEW_LINE> for x_i, y_i in zip(x_flat, y_flat): <NEW_LINE> <INDENT> flat.append(self.reduce([x_i, y_i])) <NEW_LINE> <DEDENT> return tf.contrib.framework.nest.pack_sequence_as(x, flat) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.reduce([x, y]) <NEW_LINE> <DEDENT> <DEDENT> @abc.abstractmethod <NEW_LINE> def reduce(self, inputs): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def reduce_sequence(self, inputs, sequence_lengths): <NEW_LINE> <INDENT> raise NotImplementedError()
Base class for reducers.
625990692ae34c7f260ac8a7
class ModulusPack (object): <NEW_LINE> <INDENT> def __init__(self, rpool): <NEW_LINE> <INDENT> self.pack = {} <NEW_LINE> self.discarded = [] <NEW_LINE> self.randpool = rpool <NEW_LINE> <DEDENT> def _parse_modulus(self, line): <NEW_LINE> <INDENT> timestamp, mod_type, tests, tries, size, generator, modulus = line.split() <NEW_LINE> mod_type = int(mod_type) <NEW_LINE> tests = int(tests) <NEW_LINE> tries = int(tries) <NEW_LINE> size = int(size) <NEW_LINE> generator = int(generator) <NEW_LINE> modulus = long(modulus, 16) <NEW_LINE> if (mod_type < 2) or (tests < 4) or ((tests & 4) and (tests < 8) and (tries < 100)): <NEW_LINE> <INDENT> self.discarded.append((modulus, 'does not meet basic requirements')) <NEW_LINE> return <NEW_LINE> <DEDENT> if generator == 0: <NEW_LINE> <INDENT> generator = 2 <NEW_LINE> <DEDENT> bl = util.bit_length(modulus) <NEW_LINE> if (bl != size) and (bl != size + 1): <NEW_LINE> <INDENT> self.discarded.append((modulus, 'incorrectly reported bit length %d' % size)) <NEW_LINE> return <NEW_LINE> <DEDENT> if bl not in self.pack: <NEW_LINE> <INDENT> self.pack[bl] = [] <NEW_LINE> <DEDENT> self.pack[bl].append((generator, modulus)) <NEW_LINE> <DEDENT> def read_file(self, filename): <NEW_LINE> <INDENT> self.pack = {} <NEW_LINE> f = open(filename, 'r') <NEW_LINE> for line in f: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if (len(line) == 0) or (line[0] == '#'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._parse_modulus(line) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> f.close() <NEW_LINE> <DEDENT> def get_modulus(self, min, prefer, max): <NEW_LINE> <INDENT> bitsizes = self.pack.keys() <NEW_LINE> bitsizes.sort() <NEW_LINE> if len(bitsizes) == 0: <NEW_LINE> <INDENT> raise SSHException('no moduli available') <NEW_LINE> <DEDENT> good = -1 <NEW_LINE> for b in bitsizes: <NEW_LINE> <INDENT> if (b >= prefer) and (b < max) and ((b < good) or (good == -1)): <NEW_LINE> <INDENT> good = b <NEW_LINE> <DEDENT> <DEDENT> if good == -1: <NEW_LINE> <INDENT> for b in bitsizes: <NEW_LINE> <INDENT> if (b >= min) and (b < max) and (b > good): <NEW_LINE> <INDENT> good = b <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if good == -1: <NEW_LINE> <INDENT> good = bitsizes[0] <NEW_LINE> if min > good: <NEW_LINE> <INDENT> good = bitsizes[-1] <NEW_LINE> <DEDENT> <DEDENT> n = _roll_random(self.randpool, len(self.pack[good])) <NEW_LINE> return self.pack[good][n]
convenience object for holding the contents of the /etc/ssh/moduli file, on systems that have such a file.
6259906944b2445a339b753f
class ExceptionResponse(HTTPException): <NEW_LINE> <INDENT> def __init__(self, code, errorType, message, cause=None): <NEW_LINE> <INDENT> response = errorResponse(code, errorType, message, cause) <NEW_LINE> HTTPException.__init__(self, response=response)
Exception class that will be converted to error response.
625990695fcc89381b266d36
class Solution: <NEW_LINE> <INDENT> def uniqueMorseRepresentations(self, words): <NEW_LINE> <INDENT> code=[".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."] <NEW_LINE> transformations=[] <NEW_LINE> for word in words: <NEW_LINE> <INDENT> transformation = "" <NEW_LINE> for char in word: <NEW_LINE> <INDENT> transformation+=code[ord(char)-ord('a')] <NEW_LINE> <DEDENT> if transformation not in transformations: <NEW_LINE> <INDENT> transformations.append(transformation) <NEW_LINE> <DEDENT> <DEDENT> return len(transformations)
@param words: the given list of words @return: the number of different transformations among all words we have
62599069097d151d1a2c282d
class MessageOutputDebug(object): <NEW_LINE> <INDENT> def __init__(self, nodes_visited=None, log_messages=None, branch_exited=None, branch_exited_reason=None): <NEW_LINE> <INDENT> self.nodes_visited = nodes_visited <NEW_LINE> self.log_messages = log_messages <NEW_LINE> self.branch_exited = branch_exited <NEW_LINE> self.branch_exited_reason = branch_exited_reason <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'nodes_visited' in _dict: <NEW_LINE> <INDENT> args['nodes_visited'] = [ DialogNodesVisited._from_dict(x) for x in (_dict.get('nodes_visited')) ] <NEW_LINE> <DEDENT> if 'log_messages' in _dict: <NEW_LINE> <INDENT> args['log_messages'] = [ DialogLogMessage._from_dict(x) for x in (_dict.get('log_messages')) ] <NEW_LINE> <DEDENT> if 'branch_exited' in _dict: <NEW_LINE> <INDENT> args['branch_exited'] = _dict.get('branch_exited') <NEW_LINE> <DEDENT> if 'branch_exited_reason' in _dict: <NEW_LINE> <INDENT> args['branch_exited_reason'] = _dict.get('branch_exited_reason') <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'nodes_visited') and self.nodes_visited is not None: <NEW_LINE> <INDENT> _dict['nodes_visited'] = [x._to_dict() for x in self.nodes_visited] <NEW_LINE> <DEDENT> if hasattr(self, 'log_messages') and self.log_messages is not None: <NEW_LINE> <INDENT> _dict['log_messages'] = [x._to_dict() for x in self.log_messages] <NEW_LINE> <DEDENT> if hasattr(self, 'branch_exited') and self.branch_exited is not None: <NEW_LINE> <INDENT> _dict['branch_exited'] = self.branch_exited <NEW_LINE> <DEDENT> if hasattr(self, 'branch_exited_reason' ) and self.branch_exited_reason is not None: <NEW_LINE> <INDENT> _dict['branch_exited_reason'] = self.branch_exited_reason <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Additional detailed information about a message response and how it was generated. :attr list[DialogNodesVisited] nodes_visited: (optional) An array of objects containing detailed diagnostic information about the nodes that were triggered during processing of the input message. :attr list[DialogLogMessage] log_messages: (optional) An array of up to 50 messages logged with the request. :attr bool branch_exited: (optional) Assistant sets this to true when this message response concludes or interrupts a dialog. :attr str branch_exited_reason: (optional) When `branch_exited` is set to `true` by the Assistant, the `branch_exited_reason` specifies whether the dialog completed by itself or got interrupted.
625990695fdd1c0f98e5f745
class DescribeIngressesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.EnvironmentId = None <NEW_LINE> self.ClusterNamespace = None <NEW_LINE> self.SourceChannel = None <NEW_LINE> self.IngressNames = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.EnvironmentId = params.get("EnvironmentId") <NEW_LINE> self.ClusterNamespace = params.get("ClusterNamespace") <NEW_LINE> self.SourceChannel = params.get("SourceChannel") <NEW_LINE> self.IngressNames = params.get("IngressNames") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
DescribeIngresses请求参数结构体
62599069e76e3b2f99fda1c1
class ContextTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_push_pop(self): <NEW_LINE> <INDENT> ctx = Context(self) <NEW_LINE> Context.push(ctx) <NEW_LINE> self.assertIs(ctx, Context.top()) <NEW_LINE> Context.pop(ctx) <NEW_LINE> <DEDENT> def test_contextmanager(self): <NEW_LINE> <INDENT> with Context(self) as ctx: <NEW_LINE> <INDENT> self.assertIs(ctx, Context.top()) <NEW_LINE> with Context(self) as ctx2: <NEW_LINE> <INDENT> self.assertIsNot(ctx2, ctx) <NEW_LINE> self.assertIs(ctx2, Context.top()) <NEW_LINE> <DEDENT> self.assertIs(ctx, Context.top()) <NEW_LINE> <DEDENT> <DEDENT> def test_raises_no_context(self): <NEW_LINE> <INDENT> self.assertRaises(NoContextError, Context.top) <NEW_LINE> <DEDENT> def test_raises_unbalanced_context(self): <NEW_LINE> <INDENT> ctx1 = Context(self) <NEW_LINE> ctx2 = Context(self) <NEW_LINE> Context.push(ctx1) <NEW_LINE> Context.push(ctx2) <NEW_LINE> self.assertRaises(AssertionError, Context.pop, ctx1) <NEW_LINE> <DEDENT> def test_raises_push_invalid(self): <NEW_LINE> <INDENT> self.assertRaises(AssertionError, Context.push, self)
Unit tests for compiler Context.
62599069462c4b4f79dbd1c6
class Character(Expression): <NEW_LINE> <INDENT> value: str <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return repr(self.value)
Character literals
625990693d592f4c4edbc69e
class TestAttribute(object): <NEW_LINE> <INDENT> def test_deprecated_convert_argument(self): <NEW_LINE> <INDENT> def conv(v): <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> with pytest.warns(DeprecationWarning) as wi: <NEW_LINE> <INDENT> a = Attribute( "a", True, True, True, True, True, True, convert=conv ) <NEW_LINE> <DEDENT> w = wi.pop() <NEW_LINE> assert conv == a.converter <NEW_LINE> assert ( "The `convert` argument is deprecated in favor of `converter`. " "It will be removed after 2019/01.", ) == w.message.args <NEW_LINE> assert __file__ == w.filename <NEW_LINE> <DEDENT> def test_deprecated_convert_attribute(self): <NEW_LINE> <INDENT> def conv(v): <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> a = simple_attr("a", converter=conv) <NEW_LINE> with pytest.warns(DeprecationWarning) as wi: <NEW_LINE> <INDENT> convert = a.convert <NEW_LINE> <DEDENT> w = wi.pop() <NEW_LINE> assert conv is convert is a.converter <NEW_LINE> assert ( "The `convert` attribute is deprecated in favor of `converter`. " "It will be removed after 2019/01.", ) == w.message.args <NEW_LINE> assert __file__ == w.filename <NEW_LINE> <DEDENT> def test_convert_converter(self): <NEW_LINE> <INDENT> with pytest.raises(RuntimeError) as ei: <NEW_LINE> <INDENT> Attribute( "a", True, True, True, True, True, True, convert=lambda v: v, converter=lambda v: v, ) <NEW_LINE> <DEDENT> assert ( "Can't pass both `convert` and `converter`. " "Please use `converter` only.", ) == ei.value.args
Tests for `attr.Attribute`.
62599069baa26c4b54d50a66
class ConstraintManager(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._actual_constraints = {} <NEW_LINE> <DEDENT> def get_constraint_object(self, index, constraint_data): <NEW_LINE> <INDENT> constraint = self._actual_constraints.get(index) <NEW_LINE> if constraint is None: <NEW_LINE> <INDENT> constraint_verifier = ConstraintRegistry.get_constraint(constraint_data) <NEW_LINE> self._actual_constraints[index] = constraint_verifier <NEW_LINE> <DEDENT> return self._actual_constraints[index]
There should be one such object per rule being verified. ConstraintManager collects constraints objects, one per each constraint. Constraint objects for each constraint type should not be duplicated (because e.g. two time constraints may have different time ranges), so constraints in _actual_constraints has the same numeration as in corresponding rule.
62599069aad79263cf42ff75
class Gmkl(GccToolchain, IntelMKL, IntelFFTW): <NEW_LINE> <INDENT> NAME = 'gmkl' <NEW_LINE> SUBTOOLCHAIN = GccToolchain.NAME
Compiler toolchain with GCC, Intel Math Kernel Library (MKL) and Intel FFTW wrappers.
625990692ae34c7f260ac8a8
class Page(so.SQLObject): <NEW_LINE> <INDENT> domain = so.ForeignKey('Domain', notNull=True) <NEW_LINE> path = so.UnicodeCol(notNull=True) <NEW_LINE> title = so.UnicodeCol(default=None) <NEW_LINE> created_at = so.DateTimeCol(notNull=True, default=so.DateTimeCol.now) <NEW_LINE> image_url = so.UnicodeCol(default=None) <NEW_LINE> description = so.UnicodeCol(default=None) <NEW_LINE> folder = so.ForeignKey('Folder') <NEW_LINE> unique_idx = so.DatabaseIndex(domain, path, folder, unique=True) <NEW_LINE> source = so.ForeignKey('Source', notNull=True) <NEW_LINE> labels = so.SQLRelatedJoin('Labels', intermediateTable='page_label', createRelatedTable=False) <NEW_LINE> def get_url(self): <NEW_LINE> <INDENT> return "".join((self.domain.value, self.path))
Model a URI for a webpage on the internet. Do not worry about duplicate pairs of domain and path, since we want to allow those to occur on an import and to clean them up later. A page may have a null Folder (as unsorted), though a folder must always have a parent folder, even if the top folder is "root".
6259906926068e7796d4e0f9
class DiskRestorePointReplicationStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'status': {'key': 'status', 'type': 'object'}, } <NEW_LINE> def __init__( self, *, status: Optional[Any] = None, **kwargs ): <NEW_LINE> <INDENT> super(DiskRestorePointReplicationStatus, self).__init__(**kwargs) <NEW_LINE> self.status = status
The instance view of a disk restore point. :ivar status: The resource status information. :vartype status: any
62599069f548e778e596cd4c
class KafkaConsumer: <NEW_LINE> <INDENT> def __init__( self, topic_name_pattern, message_handler, is_avro=True, offset_earliest=False, sleep_secs=1.0, consume_timeout=0.1, ): <NEW_LINE> <INDENT> self.topic_name_pattern = topic_name_pattern <NEW_LINE> self.message_handler = message_handler <NEW_LINE> self.sleep_secs = sleep_secs <NEW_LINE> self.consume_timeout = consume_timeout <NEW_LINE> self.offset_earliest = offset_earliest <NEW_LINE> if self.offset_earliest: <NEW_LINE> <INDENT> offset_policy = "earliest" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> offset_policy = "latest" <NEW_LINE> <DEDENT> self.broker_properties = { "bootstrap.servers": "PLAINTEXT://localhost:9092", } <NEW_LINE> if is_avro is True: <NEW_LINE> <INDENT> self.broker_properties["schema.registry.url"] = "http://localhost:8081" <NEW_LINE> self.consumer = AvroConsumer( { "bootstrap.servers": self.broker_properties["bootstrap.servers"], "group.id": self.topic_name_pattern, "schema.registry.url": self.broker_properties['schema.registry.url'], "auto.offset.reset": offset_policy } ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.consumer = Consumer( { "bootstrap.servers": self.broker_properties['bootstrap.servers'], "group.id": self.topic_name_pattern, "auto.offset.reset": offset_policy } ) <NEW_LINE> <DEDENT> self.consumer.subscribe( [self.topic_name_pattern], on_assign=self.on_assign ) <NEW_LINE> <DEDENT> def on_assign(self, consumer, partitions): <NEW_LINE> <INDENT> if self.offset_earliest: <NEW_LINE> <INDENT> for partition in partitions: <NEW_LINE> <INDENT> partition.offset = OFFSET_BEGINNING <NEW_LINE> <DEDENT> logger.info("partitions assigned for %s", self.topic_name_pattern) <NEW_LINE> consumer.assign(partitions) <NEW_LINE> <DEDENT> <DEDENT> async def consume(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> num_results = 1 <NEW_LINE> while num_results > 0: <NEW_LINE> <INDENT> num_results = self._consume() <NEW_LINE> <DEDENT> await gen.sleep(self.sleep_secs) <NEW_LINE> <DEDENT> <DEDENT> def _consume(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> msg = self.consumer.poll(self.consume_timeout) <NEW_LINE> <DEDENT> except SerializerError as e: <NEW_LINE> <INDENT> logger.info("Message deserialization failed for {}: {}".format(msg, e)) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> if msg is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif msg.error() is not None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.message_handler(msg) <NEW_LINE> print("topic", msg.topic()) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.consumer.close()
Defines the base kafka consumer class
62599069379a373c97d9a7df
class _Namespace(argparse.Namespace): <NEW_LINE> <INDENT> def __init__(self, conf): <NEW_LINE> <INDENT> self._conf = conf <NEW_LINE> self._parser = MultiConfigParser() <NEW_LINE> self._files_not_found = [] <NEW_LINE> self._files_permission_denied = [] <NEW_LINE> <DEDENT> def _parse_cli_opts_from_config_file(self, sections, normalized): <NEW_LINE> <INDENT> namespace = _Namespace(self._conf) <NEW_LINE> namespace._parser._add_parsed_config_file(sections, normalized) <NEW_LINE> for opt, group in self._conf._all_cli_opts(): <NEW_LINE> <INDENT> group_name = group.name if group is not None else None <NEW_LINE> try: <NEW_LINE> <INDENT> value = opt._get_from_namespace(namespace, group_name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> except ValueError as ve: <NEW_LINE> <INDENT> raise ConfigFileValueError( "Value for option %s is not valid: %s" % (opt.name, str(ve))) <NEW_LINE> <DEDENT> if group_name is None: <NEW_LINE> <INDENT> dest = opt.dest <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dest = group_name + '_' + opt.dest <NEW_LINE> <DEDENT> if opt.multi: <NEW_LINE> <INDENT> if getattr(self, dest, None) is None: <NEW_LINE> <INDENT> setattr(self, dest, []) <NEW_LINE> <DEDENT> values = getattr(self, dest) <NEW_LINE> values.extend(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, dest, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _add_parsed_config_file(self, sections, normalized): <NEW_LINE> <INDENT> self._parse_cli_opts_from_config_file(sections, normalized) <NEW_LINE> self._parser._add_parsed_config_file(sections, normalized) <NEW_LINE> <DEDENT> def _file_not_found(self, config_file): <NEW_LINE> <INDENT> self._files_not_found.append(config_file) <NEW_LINE> <DEDENT> def _file_permission_denied(self, config_file): <NEW_LINE> <INDENT> self._files_permission_denied.append(config_file) <NEW_LINE> <DEDENT> def _get_cli_value(self, names, positional): <NEW_LINE> <INDENT> for group_name, name in names: <NEW_LINE> <INDENT> name = name if group_name is None else group_name + '_' + name <NEW_LINE> value = getattr(self, name, None) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> if positional and not value: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> <DEDENT> raise KeyError <NEW_LINE> <DEDENT> def _get_value(self, names, multi, positional, current_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._get_cli_value(names, positional) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> names = [(g if g is not None else 'DEFAULT', n) for g, n in names] <NEW_LINE> values = self._parser._get(names, multi=multi, normalized=True, current_name=current_name) <NEW_LINE> return values if multi else values[-1]
An argparse namespace which also stores config file values. As we parse command line arguments, the values get set as attributes on a namespace object. However, we also want to parse config files as they are specified on the command line and collect the values alongside the option values parsed from the command line. Note, we don't actually assign values from config files as attributes on the namespace because config file options be registered after the command line has been parsed, so we may not know how to properly parse or convert a config file value at this point.
625990694428ac0f6e659cf3
class LogReport(object): <NEW_LINE> <INDENT> _format = 'csv' <NEW_LINE> _content_type = 'text/csv' <NEW_LINE> def __init__(self, data=None, user=None): <NEW_LINE> <INDENT> self.named_fields = [ {'field': 'photo', 'label': _('Photo id')}, {'field': 'title', 'label': _('Photo title')}, {'field': 'get_action_display', 'label': _('Action')}, {'field': 'user.full_name', 'label': _('User')}, {'field': 'added_at', 'label': _('Added at')}, ] <NEW_LINE> self.file = io.StringIO() <NEW_LINE> self.writer = csv.writer(self.file) <NEW_LINE> self.data = data or [] <NEW_LINE> self.username = get(user, 'username', 'anonymous_user') <NEW_LINE> self._generate_report() <NEW_LINE> <DEDENT> @property <NEW_LINE> def content_type(self): <NEW_LINE> <INDENT> return self._content_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def file_name(self): <NEW_LINE> <INDENT> return "{}_{}_{}.{}".format(self.username, slugify(self.__class__), datetime.now().isoformat(), self._format) <NEW_LINE> <DEDENT> @property <NEW_LINE> def titles(self): <NEW_LINE> <INDENT> return [named_field['label'] for named_field in self.named_fields] <NEW_LINE> <DEDENT> @property <NEW_LINE> def fields(self): <NEW_LINE> <INDENT> return [named_field['field'] for named_field in self.named_fields] <NEW_LINE> <DEDENT> @property <NEW_LINE> def csv(self): <NEW_LINE> <INDENT> return self.file.getvalue() <NEW_LINE> <DEDENT> def _generate_report(self): <NEW_LINE> <INDENT> self.writer.writerow(self.titles) <NEW_LINE> for item in self.data: <NEW_LINE> <INDENT> self.writer.writerow([get(item, field, '') for field in self.fields])
Generates a csv file with logs of downloaded and visited images
6259906901c39578d7f14315
class Solution2: <NEW_LINE> <INDENT> def getIntersectionNode(self, headA, headB): <NEW_LINE> <INDENT> if headA is None or headB is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> pa = headA <NEW_LINE> pb = headB <NEW_LINE> while pa is not pb: <NEW_LINE> <INDENT> pa = headB if pa is None else pa.next <NEW_LINE> pb = headA if pb is None else pb.next <NEW_LINE> <DEDENT> return pa
https://discuss.leetcode.com/topic/13419/concise-python-code-with-comments/2
62599069435de62698e9d5cb
class GEOSFuncFactory: <NEW_LINE> <INDENT> argtypes = None <NEW_LINE> restype = None <NEW_LINE> errcheck = None <NEW_LINE> def __init__(self, func_name, *, restype=None, errcheck=None, argtypes=None): <NEW_LINE> <INDENT> self.func_name = func_name <NEW_LINE> if restype is not None: <NEW_LINE> <INDENT> self.restype = restype <NEW_LINE> <DEDENT> if errcheck is not None: <NEW_LINE> <INDENT> self.errcheck = errcheck <NEW_LINE> <DEDENT> if argtypes is not None: <NEW_LINE> <INDENT> self.argtypes = argtypes <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> return self.func(*args) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def func(self): <NEW_LINE> <INDENT> from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc <NEW_LINE> func = GEOSFunc(self.func_name) <NEW_LINE> func.argtypes = self.argtypes or [] <NEW_LINE> func.restype = self.restype <NEW_LINE> if self.errcheck: <NEW_LINE> <INDENT> func.errcheck = self.errcheck <NEW_LINE> <DEDENT> return func
Lazy loading of GEOS functions.
6259906997e22403b383c6ce
class ItemBankSession(osid_sessions.OsidSession): <NEW_LINE> <INDENT> def can_lookup_item_bank_mappings(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def use_comparative_bank_view(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def use_plenary_bank_view(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_item_ids_by_bank(self, bank_id): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def get_items_by_bank(self, bank_id): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def get_item_ids_by_banks(self, bank_ids): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def get_items_by_banks(self, bank_ids): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def get_bank_ids_by_item(self, item_id): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def get_banks_by_item(self, item_id): <NEW_LINE> <INDENT> return
This session provides methods to retrieve ``Item`` to ``Bank`` mappings. An ``Item`` may appear in multiple ``Banks``. Each ``Bank`` may have its own authorizations governing who is allowed to look at it. This lookup session defines two views: * comparative view: elements may be silently omitted or re-ordered * plenary view: provides a complete result set or is an error condition
62599069d486a94d0ba2d77f
class GsPushCommand(WindowCommand, PushBase): <NEW_LINE> <INDENT> def run(self, local_branch_name=None, force=False): <NEW_LINE> <INDENT> self.force = force <NEW_LINE> self.local_branch_name = local_branch_name <NEW_LINE> sublime.set_timeout_async(self.run_async) <NEW_LINE> <DEDENT> def run_async(self): <NEW_LINE> <INDENT> savvy_settings = sublime.load_settings("GitSavvy.sublime-settings") <NEW_LINE> if self.local_branch_name: <NEW_LINE> <INDENT> upstream = self.get_local_branch(self.local_branch_name).tracking <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.local_branch_name = self.get_current_branch_name() <NEW_LINE> upstream = self.get_upstream_for_active_branch() <NEW_LINE> <DEDENT> if upstream: <NEW_LINE> <INDENT> remote, remote_branch = upstream.split("/", 1) <NEW_LINE> self.do_push( remote, self.local_branch_name, remote_branch=remote_branch, force=self.force) <NEW_LINE> <DEDENT> elif savvy_settings.get("prompt_for_tracking_branch"): <NEW_LINE> <INDENT> if sublime.ok_cancel_dialog(SET_UPSTREAM_PROMPT): <NEW_LINE> <INDENT> self.window.run_command("gs_push_to_branch_name", { "set_upstream": True, "force": self.force }) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.window.run_command("gs_push_to_branch_name", { "branch_name": self.local_branch_name, "set_upstream": False, "force": self.force })
Push current branch.
625990696e29344779b01e13
class See(Thread): <NEW_LINE> <INDENT> debug = False <NEW_LINE> log = True <NEW_LINE> VIEW_STOPPED = 0 <NEW_LINE> def __init__(self, outAxon): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> self.name='See' <NEW_LINE> self.outAxon = outAxon <NEW_LINE> self.inAxon = Axon(Config.DEFAULT_LOCATION) <NEW_LINE> self.inAxon.setCapabilities(self, capabilities) <NEW_LINE> self.view_queue = Queue() <NEW_LINE> self.running = False <NEW_LINE> self.canRun = True <NEW_LINE> self.view_status = See.VIEW_STOPPED <NEW_LINE> self.reported = False <NEW_LINE> self.reported_timing = False <NEW_LINE> self.reported_level = False <NEW_LINE> self.reported_single = False <NEW_LINE> self.report_time=time.time() <NEW_LINE> self.eye = None <NEW_LINE> self.view_ear_id = "camera" <NEW_LINE> self.view_ear_name = "Rapberry PI camera" <NEW_LINE> if self.canRun: <NEW_LINE> <INDENT> self.eye = Eye(id=self.view_ear_id, name=self.view_ear_name, queue=self.view_queue) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("run 'sudo python SetUpEars.py' to set up microphones to enable See") <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self.canRun: <NEW_LINE> <INDENT> self.eye.stop() <NEW_LINE> <DEDENT> self.running=False <NEW_LINE> <DEDENT> def isRunning(self): <NEW_LINE> <INDENT> return self.running <NEW_LINE> <DEDENT> def setOn(self, on): <NEW_LINE> <INDENT> self.eye.setOn(on) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if self.canRun: <NEW_LINE> <INDENT> if See.log: <NEW_LINE> <INDENT> print("Starting " + self.name) <NEW_LINE> <DEDENT> self.eye.start() <NEW_LINE> self.running=True <NEW_LINE> while self.running: <NEW_LINE> <INDENT> view=self.view_queue.get() <NEW_LINE> if See.debug: <NEW_LINE> <INDENT> print("Got view from view_queue " + self.view_ear_name + "file_path " + view.get_file_path()) <NEW_LINE> <DEDENT> self.process(view) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("Can not start " + self.name) <NEW_LINE> print("run 'sudo python SetUpEars.py' to set up microphones to enable Hearing") <NEW_LINE> <DEDENT> <DEDENT> def process(self, view): <NEW_LINE> <INDENT> id=view.get_id() <NEW_LINE> print(str(id) + " Processing " + view.get_file_path()) <NEW_LINE> sensation = Sensation() <NEW_LINE> sensation=Sensation(sensationType = Sensation.SensationType.ImageFilePath, memoryType = Sensation.MemoryType.Sensory, robotType = Sensation.RobotType.Muscle, imageFilePath=view.get_file_path()) <NEW_LINE> self.outAxon.put(sensation)
See produces visual information with camera device
6259906944b2445a339b7540
class FixedSecrets(object): <NEW_LINE> <INDENT> def __init__(self, secrets): <NEW_LINE> <INDENT> if isinstance(secrets, str): <NEW_LINE> <INDENT> secrets = secrets.split() <NEW_LINE> <DEDENT> self._secrets = secrets <NEW_LINE> <DEDENT> def get(self, node): <NEW_LINE> <INDENT> return list(self._secrets) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return []
Use a fixed set of secrets for all nodes. This class provides the same API as the Secrets class, but uses a single list of secrets for all nodes rather than using different secrets for each node. Options: - **secrets**: a list of hex-encoded secrets to use for all nodes.
625990692ae34c7f260ac8a9
class SecretStr(SecretMixin, str): <NEW_LINE> <INDENT> _P: str = "*" <NEW_LINE> def __init__(self, value="", encoding=None, errors="strict"): <NEW_LINE> <INDENT> super().__init__(value, encoding=encoding, errors=errors)
A string that hides the true value in its repr. The hidden value may be accessed via :py:attr:`SecretStr.secret` Examples -------- >>> import typic >>> mysecret = typic.SecretStr("The Ring is in Frodo's pocket.") >>> print(mysecret) ****************************** >>> print(mysecret.secret) The Ring is in Frodo's pocket. >>> f"{mysecret}" '******************************' >>> import json >>> json.dumps([mysecret]) '["The Ring is in Frodo\'s pocket."]' Notes ----- This object inherits directly from :py:class:`str` and, so is natively JSON-serializable. There is no need to add logic to extract the secret value. See Also -------- :py:class:`SecretMixin`
625990698a43f66fc4bf3953
class Memstats(object): <NEW_LINE> <INDENT> templates = {} <NEW_LINE> def __init__(self, machine, cpu, poolname, memsecs, cell_names, space_names, mappings): <NEW_LINE> <INDENT> self.env = Environment(cell_names, space_names, mappings) <NEW_LINE> self.resources = Resources(machine, cpu, poolname) <NEW_LINE> self.revision = None <NEW_LINE> for sec in memsecs: <NEW_LINE> <INDENT> sec_id, addr, size = sec <NEW_LINE> self.resources.add_phys_mem(sec_id, addr, size) <NEW_LINE> <DEDENT> <DEDENT> def set_revision(self, stats): <NEW_LINE> <INDENT> if (stats[0] is not None) or (stats[1] is not None): <NEW_LINE> <INDENT> self.revision = stats <NEW_LINE> <DEDENT> <DEDENT> def format(self): <NEW_LINE> <INDENT> output = '<?xml version="1.0"?>\n' '<!DOCTYPE memstats SYSTEM "memstats-2.0.dtd">\n' <NEW_LINE> output += '<memstats>\n' <NEW_LINE> if self.revision is not None: <NEW_LINE> <INDENT> rep, changeset = self.revision <NEW_LINE> if rep is not None: <NEW_LINE> <INDENT> rep_str = rep <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rep_str = "" <NEW_LINE> <DEDENT> if changeset is not None: <NEW_LINE> <INDENT> changeset_str = changeset <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> changeset_str = "" <NEW_LINE> <DEDENT> output += ' <revision repository="%s" changeset="%s" />\n' % (rep_str, changeset_str) <NEW_LINE> <DEDENT> output += self.env.format() <NEW_LINE> output += self.resources.format() <NEW_LINE> output += "</memstats>\n" <NEW_LINE> return output
The top-level object representing the memory statistics collected.
6259906966673b3332c31bbf
class Data_Files: <NEW_LINE> <INDENT> def __init__(self,base_dir=None,files=None,copy_to=None,template=None,preserve_path=0,strip_dirs=0): <NEW_LINE> <INDENT> self.base_dir = base_dir <NEW_LINE> self.files = files <NEW_LINE> self.copy_to = copy_to <NEW_LINE> self.template = template <NEW_LINE> self.preserve_path = preserve_path <NEW_LINE> self.strip_dirs = strip_dirs <NEW_LINE> self.finalized = 0 <NEW_LINE> <DEDENT> def warn (self, msg): <NEW_LINE> <INDENT> sys.stderr.write ("warning: %s: %s\n" % ("install_data", msg)) <NEW_LINE> <DEDENT> def debug_print (self, msg): <NEW_LINE> <INDENT> from distutils.core import DEBUG <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> print(msg) <NEW_LINE> <DEDENT> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> if self.finalized: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.files == None: <NEW_LINE> <INDENT> self.files = [] <NEW_LINE> <DEDENT> if self.template != None: <NEW_LINE> <INDENT> if type(self.template) == str: <NEW_LINE> <INDENT> self.template = str(self.template).split(";") <NEW_LINE> <DEDENT> filelist = FileList(self.warn,self.debug_print) <NEW_LINE> for line in self.template: <NEW_LINE> <INDENT> filelist.process_template_line(str(line).strip()) <NEW_LINE> <DEDENT> filelist.sort() <NEW_LINE> filelist.remove_duplicates() <NEW_LINE> self.files.extend(filelist.files) <NEW_LINE> <DEDENT> self.finalized = 1
container for list of data files. supports alternate base_dirs e.g. 'install_lib','install_header',... supports a directory where to copy files supports templates as in MANIFEST.in supports preserving of paths in filenames eg. foo/xyz is copied to base_dir/foo/xyz supports stripping of leading dirs of source paths eg. foo/bar1/xyz, foo/bar2/abc can be copied to bar1/xyz, bar2/abc
62599069462c4b4f79dbd1c8
class ChildIdAllocator(object): <NEW_LINE> <INDENT> def __init__(self, router): <NEW_LINE> <INDENT> self.router = router <NEW_LINE> self.lock = threading.Lock() <NEW_LINE> self.it = iter(xrange(0)) <NEW_LINE> <DEDENT> def allocate(self): <NEW_LINE> <INDENT> self.lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> for id_ in self.it: <NEW_LINE> <INDENT> return id_ <NEW_LINE> <DEDENT> master = self.router.context_by_id(0) <NEW_LINE> start, end = master.send_await( mitogen.core.Message(dst_id=0, handle=mitogen.core.ALLOCATE_ID) ) <NEW_LINE> self.it = iter(xrange(start, end)) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.lock.release() <NEW_LINE> <DEDENT> return self.allocate()
Allocate new context IDs from a block of unique context IDs allocated by the master process.
625990692c8b7c6e89bd4fa8