code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class OrderListView(APIView): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> orders = Order.objects.all() <NEW_LINE> serializer = OrderListSerializer(orders, many=True) <NEW_LINE> return Response(serializer.data)
Вывод списка заказов
6259907399cbb53fe68327fc
class LoggingStream(object): <NEW_LINE> <INDENT> def __init__(self, logfile, verbose_printing, formatter=None): <NEW_LINE> <INDENT> self.__logger = get_validator_logger() <NEW_LINE> self.__logger.handlers = [] <NEW_LINE> self.__logger.setLevel(logging.DEBUG) <NEW_LINE> self.__logger.propagate = False <NEW_LINE> stream_handler = logging.StreamHandler() <NEW_LINE> stream_handler.setLevel(logging.DEBUG if verbose_printing else logging.INFO) <NEW_LINE> stream_handler.setFormatter(fmt=formatter or color_formatting.ColorFormatter()) <NEW_LINE> self.__logger.addHandler(stream_handler) <NEW_LINE> if logfile: <NEW_LINE> <INDENT> logfile_handler = LogfileHandler(logfile) <NEW_LINE> logfile_handler.setLevel(logging.DEBUG) <NEW_LINE> logfile_handler.setFormatter( fmt=color_formatting.ColorFormatter(tty=False)) <NEW_LINE> self.__logger.addHandler(logfile_handler) <NEW_LINE> <DEDENT> <DEDENT> def writeln(self, message=None, lvl=logging.INFO): <NEW_LINE> <INDENT> if message is None: <NEW_LINE> <INDENT> message = '' <NEW_LINE> <DEDENT> self.__logger.log(lvl, msg=message)
A fake 'stream' to be used for logging in tests.
625990734428ac0f6e659e47
class TAGraphConvolution(Layer): <NEW_LINE> <INDENT> def __init__(self, input_dim, output_dim, placeholders, dropout=0., sparse_inputs=False, act=tf.nn.relu, bias=False, featureless=False, **kwargs): <NEW_LINE> <INDENT> super(TAGraphConvolution, self).__init__(**kwargs) <NEW_LINE> if dropout: <NEW_LINE> <INDENT> self.dropout = placeholders['dropout'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dropout = 0. <NEW_LINE> <DEDENT> self.act = act <NEW_LINE> self.support = placeholders['support'] <NEW_LINE> self.sparse_inputs = sparse_inputs <NEW_LINE> self.featureless = featureless <NEW_LINE> self.bias = bias <NEW_LINE> self.num_features_nonzero = placeholders['num_features_nonzero'] <NEW_LINE> with tf.variable_scope(self.name + '_vars'): <NEW_LINE> <INDENT> for k in range(2): <NEW_LINE> <INDENT> self.vars['weights_' + str(k)] = tf.get_variable(shape=[input_dim, output_dim], name=('weights_' + str(k)), initializer=tf.contrib.layers.xavier_initializer()) <NEW_LINE> <DEDENT> if self.bias: <NEW_LINE> <INDENT> self.vars['bias'] = zeros([output_dim], name='bias') <NEW_LINE> <DEDENT> <DEDENT> self.conv = np.zeros(output_dim,dtype=np.float32) <NEW_LINE> if self.logging: <NEW_LINE> <INDENT> self._log_vars() <NEW_LINE> <DEDENT> <DEDENT> def _call(self, inputs): <NEW_LINE> <INDENT> x = inputs <NEW_LINE> if self.sparse_inputs: <NEW_LINE> <INDENT> x = sparse_dropout(x, 1-self.dropout, self.num_features_nonzero) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = tf.nn.dropout(x, 1-self.dropout) <NEW_LINE> <DEDENT> supports = list() <NEW_LINE> for k in range(2): <NEW_LINE> <INDENT> w_k = self.support[:,:,k] <NEW_LINE> G_k = self.vars['weights_' + str(k)] <NEW_LINE> res = dot(x,G_k,sparse=self.sparse_inputs) <NEW_LINE> res = dot(w_k,res) <NEW_LINE> supports.append(res) <NEW_LINE> <DEDENT> output = tf.add_n(supports) <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> output += self.vars['bias'] <NEW_LINE> <DEDENT> return self.act(output)
Graph convolution layer.
62599073009cb60464d02e4c
class FTPClient(ftputil.FTPHost, BaseClient): <NEW_LINE> <INDENT> def __init__(self, factory=ftplib.FTP, **kwargs): <NEW_LINE> <INDENT> BaseClient.__init__(self, uri=kwargs.pop('uri', 'ftp://'), **kwargs) <NEW_LINE> ftputil.FTPHost.__init__( self, kwargs.pop('server', kwargs.pop('host', kwargs.pop('hostname', self.hostname))), kwargs.pop('username', self.username), kwargs.pop('password', self.password), port=kwargs.pop('port', self.port or 21), session_factory=factory, **kwargs)
Simplified yet flexible FTP client
6259907344b2445a339b75e7
class UnsupportedVersion(Exception): <NEW_LINE> <INDENT> pass
Indication for using an unsupported version of the API. Indicates that the user is trying to use an unsupported version of the API.
625990737d43ff248742809c
class Material(object): <NEW_LINE> <INDENT> MaterialString = property(lambda self: object(), lambda self, v: None, lambda self: None)
Material()
6259907338b623060ffaa4de
class _BulkNegativeKeyword(_SingleRecordBulkEntity): <NEW_LINE> <INDENT> def __init__(self, status=None, negative_keyword=None, parent_id=None): <NEW_LINE> <INDENT> super(_BulkNegativeKeyword, self).__init__() <NEW_LINE> self._negative_keyword = negative_keyword <NEW_LINE> self._status = status <NEW_LINE> self._parent_id = parent_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> self._status = status <NEW_LINE> <DEDENT> @property <NEW_LINE> def negative_keyword(self): <NEW_LINE> <INDENT> return self._negative_keyword <NEW_LINE> <DEDENT> @negative_keyword.setter <NEW_LINE> def negative_keyword(self, negative_keyword): <NEW_LINE> <INDENT> self._negative_keyword = negative_keyword <NEW_LINE> <DEDENT> _MAPPINGS = [ _SimpleBulkMapping( header=_StringTable.Id, field_to_csv=lambda c: bulk_str(c.negative_keyword.Id), csv_to_field=lambda c, v: setattr(c.negative_keyword, 'Id', int(v) if v else None) ), _SimpleBulkMapping( header=_StringTable.Status, field_to_csv=lambda c: bulk_str(c.status), csv_to_field=lambda c, v: setattr(c, '_status', v if v else None) ), _SimpleBulkMapping( header=_StringTable.ParentId, field_to_csv=lambda c: bulk_str(c._parent_id), csv_to_field=lambda c, v: setattr(c, '_parent_id', int(v) if v else None) ), _SimpleBulkMapping( header=_StringTable.Keyword, field_to_csv=lambda c: c.negative_keyword.Text, csv_to_field=lambda c, v: setattr(c.negative_keyword, 'Text', v) ), _SimpleBulkMapping( header=_StringTable.MatchType, field_to_csv=lambda c: bulk_str(c.negative_keyword.MatchType), csv_to_field=lambda c, v: setattr(c.negative_keyword, 'MatchType', v) ) ] <NEW_LINE> def process_mappings_to_row_values(self, row_values, exclude_readonly_data): <NEW_LINE> <INDENT> self._validate_property_not_null(self._negative_keyword, 'negative_keyword') <NEW_LINE> self.convert_to_values(row_values, _BulkNegativeKeyword._MAPPINGS) <NEW_LINE> <DEDENT> def process_mappings_from_row_values(self, row_values): <NEW_LINE> <INDENT> self._negative_keyword = _CAMPAIGN_OBJECT_FACTORY.create('NegativeKeyword') <NEW_LINE> self._negative_keyword.Type = 'NegativeKeyword' <NEW_LINE> row_values.convert_to_entity(self, _BulkNegativeKeyword._MAPPINGS) <NEW_LINE> <DEDENT> def read_additional_data(self, stream_reader): <NEW_LINE> <INDENT> super(_BulkNegativeKeyword, self).read_additional_data(stream_reader)
The base class for all bulk negative keywords. Either assigned individually to a campaign or ad group entity, or shared in a negative keyword list. *See also:* * :class:`.BulkAdGroupNegativeKeyword` * :class:`.BulkCampaignNegativeKeyword` * :class:`.BulkSharedNegativeKeyword`
625990734a966d76dd5f07fd
class SomeGraph: <NEW_LINE> <INDENT> def __init__( self, array_of_integers: typing.List[int]) -> None: <NEW_LINE> <INDENT> self.array_of_integers = array_of_integers
defines some object graph.
625990738a43f66fc4bf3aa8
class IAREmbeddedWorkbench(Exporter): <NEW_LINE> <INDENT> NAME = 'IAR' <NEW_LINE> TOOLCHAIN = 'IAR' <NEW_LINE> TARGETS = [ 'LPC1768', 'LPC1347', 'LPC11U24', 'LPC11U35_401', 'LPC11U35_501', 'LPC1114', 'LPC1549', 'LPC812', 'LPC4088', 'LPC4088_DM', 'LPC824', 'UBLOX_C027', 'ARCH_PRO', 'K20D50M', 'KL05Z', 'KL25Z', 'KL46Z', 'K22F', 'K64F', 'NUCLEO_F030R8', 'NUCLEO_F031K6', 'NUCLEO_F070RB', 'NUCLEO_F072RB', 'NUCLEO_F091RC', 'NUCLEO_F103RB', 'NUCLEO_F302R8', 'NUCLEO_F303RE', 'NUCLEO_F334R8', 'NUCLEO_F401RE', 'NUCLEO_F411RE', 'NUCLEO_F446RE', 'NUCLEO_L053R8', 'NUCLEO_L073RZ', 'NUCLEO_L152RE', 'NUCLEO_L476RG', 'DISCO_L053C8', 'DISCO_F334C8', 'DISCO_F746NG', 'DISCO_L476VG', 'MAXWSNENV', 'MAX32600MBED', 'MTS_MDOT_F405RG', 'MTS_MDOT_F411RE', 'MTS_DRAGONFLY_F411RE', 'NRF51822', 'NRF51_DK', 'NRF51_DONGLE', 'DELTA_DFCM_NNN40', 'SEEED_TINY_BLE', 'HRM1017', 'ARCH_BLE', 'MOTE_L152RC', ] <NEW_LINE> def generate(self): <NEW_LINE> <INDENT> sources = [] <NEW_LINE> sources += self.resources.c_sources <NEW_LINE> sources += self.resources.cpp_sources <NEW_LINE> sources += self.resources.s_sources <NEW_LINE> iar_files = IarFolder("", "", []) <NEW_LINE> for source in sources: <NEW_LINE> <INDENT> iar_files.insert_file(source) <NEW_LINE> <DEDENT> ctx = { 'name': self.program_name, 'include_paths': self.resources.inc_dirs, 'linker_script': self.resources.linker_script, 'object_files': self.resources.objects, 'libraries': self.resources.libraries, 'symbols': self.get_symbols(), 'source_files': iar_files.__str__(), 'binary_files': self.resources.bin_files, } <NEW_LINE> self.gen_file('iar_%s.ewp.tmpl' % self.target.lower(), ctx, '%s.ewp' % self.program_name) <NEW_LINE> self.gen_file('iar.eww.tmpl', ctx, '%s.eww' % self.program_name) <NEW_LINE> self.gen_file('iar_%s.ewd.tmpl' % self.target.lower(), ctx, '%s.ewd' % self.program_name)
Exporter class for IAR Systems.
62599073cc0a2c111447c75a
class NeXtVLADModel(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def create_model(self, video_input, audio_input, is_training = True, class_dim = None, cluster_size = None, hidden_size = None, groups = None, expansion = None, drop_rate = None, gating_reduction = None, l2_penalty = None, **unused_params): <NEW_LINE> <INDENT> video_nextvlad = NeXtVLAD(1024, cluster_size, is_training, expansion = expansion, groups = groups, inputname = 'video') <NEW_LINE> audio_nextvlad = NeXtVLAD(128, cluster_size, is_training, expansion = expansion, groups = groups, inputname = 'audio') <NEW_LINE> vlad_video = video_nextvlad.forward(video_input) <NEW_LINE> vlad_audio = audio_nextvlad.forward(audio_input) <NEW_LINE> vlad = fluid.layers.concat([vlad_video, vlad_audio], axis = 1) <NEW_LINE> if drop_rate > 0.: <NEW_LINE> <INDENT> vlad = fluid.layers.dropout(vlad, drop_rate, is_test = (not is_training)) <NEW_LINE> <DEDENT> activation = fluid.layers.fc(input = vlad, size = hidden_size, act = None, name = 'hidden1_fc', param_attr = fluid.ParamAttr( name = 'hidden1_fc_weights', initializer = fluid.initializer.MSRA(uniform=False)), bias_attr = False) <NEW_LINE> activation = fluid.layers.batch_norm(activation, is_test = (not is_training)) <NEW_LINE> gates = fluid.layers.fc(input = activation, size = hidden_size // gating_reduction, act = None, name = 'gating_fc1', param_attr = fluid.ParamAttr( name = 'gating_fc1_weights', initializer = fluid.initializer.MSRA(uniform=False)), bias_attr = False) <NEW_LINE> gates = fluid.layers.batch_norm(gates, is_test = (not is_training), act = 'relu') <NEW_LINE> gates = fluid.layers.fc(input = gates, size = hidden_size, act = 'sigmoid', name = 'gating_fc2', param_attr = fluid.ParamAttr( name = 'gating_fc2_weights', initializer = fluid.initializer.MSRA(uniform=False)), bias_attr = False) <NEW_LINE> activation = fluid.layers.elementwise_mul(activation, gates) <NEW_LINE> aggregate_model = clf_model.LogisticModel <NEW_LINE> return aggregate_model().create_model( model_input = activation, vocab_size = class_dim, l2_penalty = l2_penalty, is_training = is_training, **unused_params)
Creates a NeXtVLAD based model. Args: model_input: A LoDTensor of [-1, N] for the input video frames. vocab_size: The number of classes in the dataset.
625990734e4d562566373d1a
class Attachment(object): <NEW_LINE> <INDENT> file_name = "" <NEW_LINE> content = None <NEW_LINE> mime_type = "" <NEW_LINE> ext = "" <NEW_LINE> size = 0 <NEW_LINE> def __init__(self, file_name="", content="", mime_type="", ext="", size=0): <NEW_LINE> <INDENT> self.file_name = file_name <NEW_LINE> self.content = content <NEW_LINE> self.mime_type = mime_type <NEW_LINE> self.ext = ext <NEW_LINE> self.size = size <NEW_LINE> self.attachments_path = Config.get_value('attachments_dir') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_db(_id, db): <NEW_LINE> <INDENT> attachment_row = db.fetch_row("SELECT * FROM attachments WHERE id = " + str(_id)) <NEW_LINE> return Attachment( attachment_row['file_name'], "", attachment_row['mime_type'], attachment_row['ext'], attachment_row['size']) <NEW_LINE> <DEDENT> def set_file_name(self, file_name): <NEW_LINE> <INDENT> self.file_name = file_name <NEW_LINE> <DEDENT> def set_ext(self, ext): <NEW_LINE> <INDENT> self.ext = ext <NEW_LINE> <DEDENT> def set_content(self, content): <NEW_LINE> <INDENT> self.content = content <NEW_LINE> self.size = len(content) if content else 0 <NEW_LINE> <DEDENT> def set_mime_type(self, mime_type): <NEW_LINE> <INDENT> self.mime_type = mime_type <NEW_LINE> <DEDENT> def get_file_name(self): <NEW_LINE> <INDENT> return self.file_name <NEW_LINE> <DEDENT> def get_content(self): <NEW_LINE> <INDENT> return self.content <NEW_LINE> <DEDENT> def get_mime_type(self): <NEW_LINE> <INDENT> return self.mime_type <NEW_LINE> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> return self.size <NEW_LINE> <DEDENT> def flus_to_db(self, db, letter_id): <NEW_LINE> <INDENT> _hash = random_md5() <NEW_LINE> fh = open(self.attachments_path + "/" + _hash, 'wb') <NEW_LINE> fh.write(self.content if self.content is not None else "") <NEW_LINE> fh.close() <NEW_LINE> data_for_insert = { 'letter_id': letter_id, 'file_name': self.file_name, 'mime_type': self.mime_type[0:50], 'ext': self.ext, 'size': self.size, 'hash': _hash, } <NEW_LINE> return db.insert("attachments", data_for_insert)
Attachmnent class
625990732ae34c7f260ac9f7
class Blueprint(models.Model): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> ordering = ["module_type__name", "name", "grade"] <NEW_LINE> <DEDENT> name = models.CharField(max_length=100) <NEW_LINE> grade = models.PositiveSmallIntegerField(choices=BLUEPRINT_GRADE_CHOICES) <NEW_LINE> module_type = models.ForeignKey("ships.ModuleType", null=True, on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{bp.name}, grade {bp.grade}".format(bp=self) <NEW_LINE> <DEDENT> def _get_similar(self): <NEW_LINE> <INDENT> return Blueprint.objects.filter(module_type=self.module_type, name=self.name) <NEW_LINE> <DEDENT> def get_higher_grades(self): <NEW_LINE> <INDENT> return self._get_similar().filter(grade__gt=self.grade).order_by("grade") <NEW_LINE> <DEDENT> def get_lower_grades(self): <NEW_LINE> <INDENT> return self._get_similar().filter(grade__lt=self.grade).order_by("-grade") <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.get_higher_grades().first() <NEW_LINE> <DEDENT> except Blueprint.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def prev(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.get_lower_grades().first() <NEW_LINE> <DEDENT> except Blueprint.DoesNotExist: <NEW_LINE> <INDENT> return None
Blueprints are used to craft upgrades for a ship's module.
6259907332920d7e50bc795b
class TestSuiteOverview(_messages.Message): <NEW_LINE> <INDENT> errorCount = _messages.IntegerField(1, variant=_messages.Variant.INT32) <NEW_LINE> failureCount = _messages.IntegerField(2, variant=_messages.Variant.INT32) <NEW_LINE> name = _messages.StringField(3) <NEW_LINE> skippedCount = _messages.IntegerField(4, variant=_messages.Variant.INT32) <NEW_LINE> totalCount = _messages.IntegerField(5, variant=_messages.Variant.INT32) <NEW_LINE> xmlSource = _messages.MessageField('FileReference', 6)
A summary of a test suite result either parsed from XML or uploaded directly by a user. Note: the API related comments are for StepService only. This message is also being used in ExecutionService in a read only mode for the corresponding step. Fields: errorCount: Number of test cases in error, typically set by the service by parsing the xml_source. - In create/response: always set - In update request: never failureCount: Number of failed test cases, typically set by the service by parsing the xml_source. May also be set by the user. - In create/response: always set - In update request: never name: The name of the test suite. - In create/response: always set - In update request: never skippedCount: Number of test cases not run, typically set by the service by parsing the xml_source. - In create/response: always set - In update request: never totalCount: Number of test cases, typically set by the service by parsing the xml_source. - In create/response: always set - In update request: never xmlSource: If this test suite was parsed from XML, this is the URI where the original XML file is stored. Note: Multiple test suites can share the same xml_source Returns INVALID_ARGUMENT if the uri format is not supported. - In create/response: optional - In update request: never
6259907366673b3332c31d12
class HomeEntities: <NEW_LINE> <INDENT> all_entities = [] <NEW_LINE> @classmethod <NEW_LINE> def disable(cls): <NEW_LINE> <INDENT> for ele in cls.all_entities: <NEW_LINE> <INDENT> ele.enabled = False <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def enable(cls): <NEW_LINE> <INDENT> for ele in cls.all_entities: <NEW_LINE> <INDENT> ele.enabled = True
Home 패이지를 소멸,복구하는것을 도와주는 클래스 만약 simul설계에 처음부터 이런것을 도입했다면...
625990738e7ae83300eea9a5
class RemoteSlaveContext(IModbusSlaveContext): <NEW_LINE> <INDENT> def __init__(self, client, unit=None): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self.unit = unit <NEW_LINE> self.__build_mapping() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> raise NotImplementedException() <NEW_LINE> <DEDENT> def validate(self, fx, address, count=1): <NEW_LINE> <INDENT> _logger.debug("validate[%d] %d:%d" % (fx, address, count)) <NEW_LINE> result = self.__get_callbacks[self.decode(fx)](address, count) <NEW_LINE> return not result.isError() <NEW_LINE> <DEDENT> def getValues(self, fx, address, count=1): <NEW_LINE> <INDENT> _logger.debug("get values[%d] %d:%d" % (fx, address, count)) <NEW_LINE> result = self.__get_callbacks[self.decode(fx)](address, count) <NEW_LINE> return self.__extract_result(self.decode(fx), result) <NEW_LINE> <DEDENT> def setValues(self, fx, address, values): <NEW_LINE> <INDENT> _logger.debug("set values[%d] %d:%d" % (fx, address, len(values))) <NEW_LINE> self.__set_callbacks[self.decode(fx)](address, values) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Remote Slave Context(%s)" % self._client <NEW_LINE> <DEDENT> def __build_mapping(self): <NEW_LINE> <INDENT> kwargs = {} <NEW_LINE> if self.unit: <NEW_LINE> <INDENT> kwargs["unit"] = self.unit <NEW_LINE> <DEDENT> self.__get_callbacks = { 'd': lambda a, c: self._client.read_discrete_inputs(a, c, **kwargs), 'c': lambda a, c: self._client.read_coils(a, c, **kwargs), 'h': lambda a, c: self._client.read_holding_registers(a, c, **kwargs), 'i': lambda a, c: self._client.read_input_registers(a, c, **kwargs), } <NEW_LINE> self.__set_callbacks = { 'd': lambda a, v: self._client.write_coils(a, v, **kwargs), 'c': lambda a, v: self._client.write_coils(a, v, **kwargs), 'h': lambda a, v: self._client.write_registers(a, v, **kwargs), 'i': lambda a, v: self._client.write_registers(a, v, **kwargs), } <NEW_LINE> <DEDENT> def __extract_result(self, fx, result): <NEW_LINE> <INDENT> if not result.isError(): <NEW_LINE> <INDENT> if fx in ['d', 'c']: <NEW_LINE> <INDENT> return result.bits <NEW_LINE> <DEDENT> if fx in ['h', 'i']: <NEW_LINE> <INDENT> return result.registers <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return result
TODO This creates a modbus data model that connects to a remote device (depending on the client used)
6259907397e22403b383c817
class ExportControllersHandler(webapp.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.response.headers['Content-Type'] = 'text/plain' <NEW_LINE> results = Controller.all() <NEW_LINE> controllers = [] <NEW_LINE> for controller in Controller.all(): <NEW_LINE> <INDENT> controller_output = { 'manufacturer_name': controller.manufacturer.name, 'key': str(controller.key()), 'name': controller.name, } <NEW_LINE> if controller.link: <NEW_LINE> <INDENT> controller_output['link'] = controller.link <NEW_LINE> <DEDENT> if controller.image_url: <NEW_LINE> <INDENT> controller_output['image_url'] = controller.image_url <NEW_LINE> <DEDENT> tags = list(controller.tag_set) <NEW_LINE> if tags: <NEW_LINE> <INDENT> tags = [] <NEW_LINE> for tag in controller.tag_set: <NEW_LINE> <INDENT> tags.append(tag.tag.label) <NEW_LINE> <DEDENT> controller_output['tags'] = tags <NEW_LINE> <DEDENT> controllers.append(controller_output) <NEW_LINE> <DEDENT> self.response.out.write(json.dumps({'controllers': controllers}))
Return all controllers for the RDM Protocol Site. This is used by the rdmprotocol.org site. Don't change the format without checking in with Peter Kirkup.
6259907360cbc95b063659f7
class AirflowPostAnalyzer: <NEW_LINE> <INDENT> def get_next_state(results, in_state): <NEW_LINE> <INDENT> if results["valid"] is False: <NEW_LINE> <INDENT> raise InvalidDesign("Magnet temperature beyond limits") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state_out = deepcopy(in_state) <NEW_LINE> state_out.conditions.airflow = results <NEW_LINE> <DEDENT> return state_out
Converts a State into a problem
6259907344b2445a339b75e8
class FlashMessages(object): <NEW_LINE> <INDENT> def __init__(self, controller): <NEW_LINE> <INDENT> self.controller = controller <NEW_LINE> self.controller.events.before_render += self._on_before_render <NEW_LINE> <DEDENT> def flash(self, message, level='info'): <NEW_LINE> <INDENT> flash = self.controller.session.get('__flash', {}) <NEW_LINE> if level == 'error': <NEW_LINE> <INDENT> level = 'danger' <NEW_LINE> <DEDENT> import time <NEW_LINE> if isinstance(flash, list): <NEW_LINE> <INDENT> flash = {} <NEW_LINE> <DEDENT> flash[time.time()] = {'message': message, 'level': level} <NEW_LINE> self.controller.session['__flash'] = flash <NEW_LINE> <DEDENT> def messages(self, clear=True): <NEW_LINE> <INDENT> flashes = self.controller.session.get('__flash', None) or {} <NEW_LINE> if clear: <NEW_LINE> <INDENT> self.controller.session['__flash'] = {} <NEW_LINE> <DEDENT> return flashes <NEW_LINE> <DEDENT> def _on_before_render(self, controller, *args, **kwargs): <NEW_LINE> <INDENT> controller.context.set_dotted('this.flash_messages', self.messages().items) <NEW_LINE> <DEDENT> __call__ = flash
Flash Messages are brief messages that are stored in the session and displayed to the user on the next page. These are useful for things like create/edit/delete acknowledgements.
625990734a966d76dd5f07fe
class JudgeDetailsView(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Judge.objects.all() <NEW_LINE> serializer_class = JudgeSerializer <NEW_LINE> authentication_classes = (TokenAuthentication, BasicAuthentication)
This class handles the http GET, PUT and DELETE requests.
62599073adb09d7d5dc0be7e
class Collision(Sensor): <NEW_LINE> <INDENT> _name = "Collision" <NEW_LINE> _short_desc = "Detect objects colliding with the current object." <NEW_LINE> add_data('collision', False, "bool", "objects colliding with the current object") <NEW_LINE> add_data('objects', "", "string", "A list of colliding objects.") <NEW_LINE> add_property('_collision_property', "", 'collision_property', 'string', 'Only look for objects with this property, ' 'default "" (all objects)') <NEW_LINE> def __init__(self, obj, parent=None): <NEW_LINE> <INDENT> logger.info('%s initialization' % obj.name) <NEW_LINE> Sensor.__init__(self, obj, parent) <NEW_LINE> logger.info('Component initialized, runs at %.2f Hz', self.frequency) <NEW_LINE> <DEDENT> def default_action(self): <NEW_LINE> <INDENT> controller = blenderapi.controller() <NEW_LINE> sensor = controller.sensors[-1] <NEW_LINE> self.local_data['collision'] = sensor.positive <NEW_LINE> self.local_data['objects'] = ','.join([o.name for o in sensor.hitObjectList])
Sensor to detect objects colliding with the current object, with more settings than the Touch sensor
625990735fcc89381b266de2
class StateDependentExplorer(Explorer, ParameterContainer): <NEW_LINE> <INDENT> def __init__(self, statedim, actiondim, sigma= -2.): <NEW_LINE> <INDENT> Explorer.__init__(self, actiondim, actiondim) <NEW_LINE> self.statedim = statedim <NEW_LINE> self.actiondim = actiondim <NEW_LINE> ParameterContainer.__init__(self, actiondim, stdParams=0) <NEW_LINE> self.sigma = [sigma] * actiondim <NEW_LINE> self.explmatrix = random.normal(0., expln(self.sigma), (statedim, actiondim)) <NEW_LINE> self.state = None <NEW_LINE> <DEDENT> def _setSigma(self, sigma): <NEW_LINE> <INDENT> assert len(sigma) == self.actiondim <NEW_LINE> self._params *= 0 <NEW_LINE> self._params += sigma <NEW_LINE> <DEDENT> def _getSigma(self): <NEW_LINE> <INDENT> return self.params <NEW_LINE> <DEDENT> sigma = property(_getSigma, _setSigma) <NEW_LINE> def newEpisode(self): <NEW_LINE> <INDENT> self.explmatrix = random.normal(0., expln(self.sigma), self.explmatrix.shape) <NEW_LINE> <DEDENT> def activate(self, state, action): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> return Module.activate(self, action) <NEW_LINE> <DEDENT> def _forwardImplementation(self, inbuf, outbuf): <NEW_LINE> <INDENT> outbuf[:] = inbuf + dot(self.state, self.explmatrix) <NEW_LINE> <DEDENT> def _backwardImplementation(self, outerr, inerr, outbuf, inbuf): <NEW_LINE> <INDENT> expln_params = expln(self.params ).reshape(len(outbuf), len(self.state)) <NEW_LINE> explnPrime_params = explnPrime(self.params ).reshape(len(outbuf), len(self.state)) <NEW_LINE> idx = 0 <NEW_LINE> for j in range(len(outbuf)): <NEW_LINE> <INDENT> sigma_subst2 = dot(self.state ** 2, expln_params[j, :]**2) <NEW_LINE> for i in range(len(self.state)): <NEW_LINE> <INDENT> self._derivs[idx] = ((outbuf[j] - inbuf[j]) ** 2 - sigma_subst2) / sigma_subst2 * self.state[i] ** 2 * expln_params[j, i] * explnPrime_params[j, i] <NEW_LINE> idx += 1 <NEW_LINE> <DEDENT> inerr[j] = (outbuf[j] - inbuf[j])
A continuous explorer, that perturbs the resulting action with additive, normally distributed random noise. The exploration has parameter(s) sigma, which are related to the distribution's standard deviation. In order to allow for negative values of sigma, the real std. derivation is a transformation of sigma according to the expln() function (see pybrain.tools.functions).
6259907399fddb7c1ca63a5e
class IUSToolsError(Exception): <NEW_LINE> <INDENT> def __init__(self, value, code=1): <NEW_LINE> <INDENT> Exception.__init__(self) <NEW_LINE> self.msg = value <NEW_LINE> self.code = code <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.msg <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.msg)
Generic errors.
625990734f6381625f19a134
class ClearNewProduct(ClearProduct, NewProductView): <NEW_LINE> <INDENT> pass
Clear new product from session and redirect to Basic Info.
625990737047854f46340ccd
class User(SAFRSBase, db.Model): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> id = Column(String, primary_key=True) <NEW_LINE> name = Column(String, default = '') <NEW_LINE> email = Column(String, default = '') <NEW_LINE> crossdomain_kwargs = { 'origin' : '*', 'methods' : ['GET', 'PATCH', 'OPTIONS'], 'headers' : ['Content-Type'] } <NEW_LINE> @documented_api_method <NEW_LINE> def send_mail(self, email): <NEW_LINE> <INDENT> content = 'Mail to {} : {}\n'.format(self.name, email) <NEW_LINE> with open('/tmp/mail.txt', 'a+') as mailfile : <NEW_LINE> <INDENT> mailfile.write(content) <NEW_LINE> <DEDENT> return { 'result' : 'sent {}'.format(content)}
description: User description
625990738a43f66fc4bf3aaa
class EventsRequestInfo(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'url': {'key': 'url', 'type': 'str'}, 'success': {'key': 'success', 'type': 'str'}, 'duration': {'key': 'duration', 'type': 'float'}, 'performance_bucket': {'key': 'performanceBucket', 'type': 'str'}, 'result_code': {'key': 'resultCode', 'type': 'str'}, 'source': {'key': 'source', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, name: str=None, url: str=None, success: str=None, duration: float=None, performance_bucket: str=None, result_code: str=None, source: str=None, id: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(EventsRequestInfo, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.url = url <NEW_LINE> self.success = success <NEW_LINE> self.duration = duration <NEW_LINE> self.performance_bucket = performance_bucket <NEW_LINE> self.result_code = result_code <NEW_LINE> self.source = source <NEW_LINE> self.id = id
The request info. :param name: The name of the request :type name: str :param url: The URL of the request :type url: str :param success: Indicates if the request was successful :type success: str :param duration: The duration of the request :type duration: float :param performance_bucket: The performance bucket of the request :type performance_bucket: str :param result_code: The result code of the request :type result_code: str :param source: The source of the request :type source: str :param id: The ID of the request :type id: str
6259907363b5f9789fe86a79
class GetCurrentSceneCollection(BaseRequest): <NEW_LINE> <INDENT> name = 'GetCurrentSceneCollection' <NEW_LINE> category = 'scene collections' <NEW_LINE> fields = [] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.datain = {} <NEW_LINE> self.datain['sc-name'] = None <NEW_LINE> <DEDENT> def __call__(self, cb=None): <NEW_LINE> <INDENT> payload = {} <NEW_LINE> payload['request-type'] = 'GetCurrentSceneCollection' <NEW_LINE> ObsSocket().send(payload, cb) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def payload(): <NEW_LINE> <INDENT> payload = {} <NEW_LINE> payload['request-type'] = 'GetCurrentSceneCollection' <NEW_LINE> return payload
Get the name of the current scene collection. :Returns: *sc_name* type: String Name of the currently active scene collection.
625990732ae34c7f260ac9f9
class Document(DocumentContent): <NEW_LINE> <INDENT> grok.implements(IDocument) <NEW_LINE> meta_type = 'Silva Document' <NEW_LINE> silvaconf.version_class(DocumentVersion) <NEW_LINE> silvaconf.priority(-6) <NEW_LINE> silvaconf.icon('document.png')
A new style Document.
62599073aad79263cf4300cc
class SaveToMongoDB(FillTraining): <NEW_LINE> <INDENT> def __init__( self, db_config: Tuple[str, str], name: str, keras_model: Model, save_initial_weights: bool=True, epoch_save_condition: Callable=None): <NEW_LINE> <INDENT> super().__init__(name, keras_model) <NEW_LINE> self._epoch_save_condition = epoch_save_condition <NEW_LINE> self._save_initial_weights = save_initial_weights <NEW_LINE> self.id = None <NEW_LINE> self.mongo_con = MongoDBConnect() <NEW_LINE> self.mongo_con.add_connections_from_config(Config.get_config_parser()) <NEW_LINE> self._db = self.mongo_con.get_db(*db_config) <NEW_LINE> self._collection = self._db["training"] <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def on_train_begin(self, logs=None): <NEW_LINE> <INDENT> super().on_train_begin(logs) <NEW_LINE> self.update() <NEW_LINE> if self._save_initial_weights: <NEW_LINE> <INDENT> self.update_weights() <NEW_LINE> <DEDENT> <DEDENT> def on_batch_end(self, batch, logs=None): <NEW_LINE> <INDENT> super().on_batch_end(batch, logs) <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def on_epoch_end(self, epoch, logs=None): <NEW_LINE> <INDENT> super().on_epoch_end(epoch, logs) <NEW_LINE> self.update() <NEW_LINE> if self._epoch_save_condition is None or self._epoch_save_condition(self._training): <NEW_LINE> <INDENT> self.update_weights() <NEW_LINE> <DEDENT> <DEDENT> def on_train_end(self, logs=None): <NEW_LINE> <INDENT> super().on_train_end(logs) <NEW_LINE> self.update() <NEW_LINE> self.mongo_con.reset_connections() <NEW_LINE> print("Saved Training to Database with ObjectId: " + str(self.id)) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> data_dict = self._training.get_dict() <NEW_LINE> data_dict["metrics"] = None <NEW_LINE> data_dict["weights"] = [] <NEW_LINE> self.id = self._collection.insert_one(data_dict).inserted_id <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> data_dict = self._training.get_dict() <NEW_LINE> self._collection.update_one( {'_id': ObjectId(self.id)}, { '$set': data_dict } ) <NEW_LINE> <DEDENT> def update_weights(self): <NEW_LINE> <INDENT> fs = gridfs.GridFS(self._collection.database) <NEW_LINE> tmp_filename = "tmp_model_weights_save.h5" <NEW_LINE> if self._training.result.model is not None: <NEW_LINE> <INDENT> self._training.result.model.save(tmp_filename) <NEW_LINE> with open(tmp_filename, mode='rb') as file: <NEW_LINE> <INDENT> file_bytes = file.read() <NEW_LINE> model_gridfs = fs.put(file_bytes) <NEW_LINE> <DEDENT> os.remove(tmp_filename) <NEW_LINE> weights = { "model_gridfs": model_gridfs, "epoch": self._training.result.curr_epoch, "batch": self._training.result.curr_batch } <NEW_LINE> self._collection.update_one( {'_id': ObjectId(self.id)}, {'$push': {'weights': weights}} )
Callback to save Trainings & Results to a local MongoDB
625990738e7ae83300eea9a6
class MovFormat(BaseFormat): <NEW_LINE> <INDENT> format_name = 'mov' <NEW_LINE> ffmpeg_format_name = 'mov'
Mov container format, used mostly with H.264 video content, often for mobile platforms.
6259907376e4537e8c3f0e94
class Frequency(PropertyHolder): <NEW_LINE> <INDENT> enabled = BoolProperty(default=False, title="Report Frequency?") <NEW_LINE> report_interval = TimeDeltaProperty(default={"seconds": 1}, title="Report Interval") <NEW_LINE> averaging_interval = TimeDeltaProperty(default={"seconds": 5}, title="Averaging Interval")
An object to encapsulate frequency reporting configuration. Properties: enabled (bool): Is frequency reporting enabled? report_interval (timedelta): The interval at which to report the frequency. averaging_interval (timedelta): The period over which frequencies are calculated.
6259907391f36d47f2231b19
class IHostVideoInputDevice(Interface): <NEW_LINE> <INDENT> __uuid__ = 'a1ceae44-d65e-4156-9359-d390f93ee9a0' <NEW_LINE> __wsmap__ = 'managed' <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> ret = self._get_attr("name") <NEW_LINE> return ret <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> ret = self._get_attr("path") <NEW_LINE> return ret <NEW_LINE> <DEDENT> @property <NEW_LINE> def alias(self): <NEW_LINE> <INDENT> ret = self._get_attr("alias") <NEW_LINE> return ret
Represents one of host's video capture devices, for example a webcam.
625990738e7ae83300eea9a7
class ThumbnailBackend(object): <NEW_LINE> <INDENT> default_options = { 'format': settings.THUMBNAIL_FORMAT, 'quality': settings.THUMBNAIL_QUALITY, 'colorspace': settings.THUMBNAIL_COLORSPACE, 'upscale': settings.THUMBNAIL_UPSCALE, 'crop': False, } <NEW_LINE> extra_options = ( ('progressive', 'THUMBNAIL_PROGRESSIVE'), ('orientation', 'THUMBNAIL_ORIENTATION'), ) <NEW_LINE> file_extension = lambda inst, file_: str(file_).split('.')[-1].lower() <NEW_LINE> def _get_format(self, file_): <NEW_LINE> <INDENT> file_extension = self.file_extension(file_) <NEW_LINE> is_jpeg = re.match('jpg|jpeg', file_extension) <NEW_LINE> is_png = re.match('png', file_extension) <NEW_LINE> if is_jpeg: <NEW_LINE> <INDENT> format_ = 'JPEG' <NEW_LINE> <DEDENT> elif is_png: <NEW_LINE> <INDENT> format_ = 'PNG' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> format_ = default_settings.THUMBNAIL_FORMAT <NEW_LINE> <DEDENT> return str(format_) <NEW_LINE> <DEDENT> def get_thumbnail(self, file_, geometry_string, **options): <NEW_LINE> <INDENT> source = ImageFile(file_) <NEW_LINE> if settings.THUMBNAIL_PRESERVE_FORMAT: <NEW_LINE> <INDENT> self.default_options['format'] = self._get_format(file_) <NEW_LINE> <DEDENT> for key, value in self.default_options.iteritems(): <NEW_LINE> <INDENT> options.setdefault(key, value) <NEW_LINE> <DEDENT> for key, attr in self.extra_options: <NEW_LINE> <INDENT> value = getattr(settings, attr) <NEW_LINE> if value != getattr(default_settings, attr): <NEW_LINE> <INDENT> options.setdefault(key, value) <NEW_LINE> <DEDENT> <DEDENT> name = self._get_thumbnail_filename(source, geometry_string, options) <NEW_LINE> thumbnail = ImageFile(name, default.storage) <NEW_LINE> cached = default.kvstore.get(thumbnail) <NEW_LINE> if cached: <NEW_LINE> <INDENT> return cached <NEW_LINE> <DEDENT> if not thumbnail.exists(): <NEW_LINE> <INDENT> source_image = default.engine.get_image(source) <NEW_LINE> size = default.engine.get_image_size(source_image) <NEW_LINE> source.set_size(size) <NEW_LINE> self._create_thumbnail(source_image, geometry_string, options, thumbnail) <NEW_LINE> <DEDENT> default.kvstore.get_or_set(source) <NEW_LINE> default.kvstore.set(thumbnail, source) <NEW_LINE> return thumbnail <NEW_LINE> <DEDENT> def delete(self, file_, delete_file=True): <NEW_LINE> <INDENT> image_file = ImageFile(file_) <NEW_LINE> if delete_file: <NEW_LINE> <INDENT> image_file.delete() <NEW_LINE> <DEDENT> default.kvstore.delete(image_file) <NEW_LINE> <DEDENT> def _create_thumbnail(self, source_image, geometry_string, options, thumbnail): <NEW_LINE> <INDENT> ratio = default.engine.get_image_ratio(source_image) <NEW_LINE> geometry = parse_geometry(geometry_string, ratio) <NEW_LINE> image = default.engine.create(source_image, geometry, options) <NEW_LINE> default.engine.write(image, options, thumbnail) <NEW_LINE> size = default.engine.get_image_size(image) <NEW_LINE> thumbnail.set_size(size) <NEW_LINE> <DEDENT> def _get_thumbnail_filename(self, source, geometry_string, options): <NEW_LINE> <INDENT> key = tokey(source.key, geometry_string, serialize(options)) <NEW_LINE> path = '%s/%s/%s' % (key[:2], key[2:4], key) <NEW_LINE> return '%s%s.%s' % (settings.THUMBNAIL_PREFIX, path, EXTENSIONS[options['format']])
The main class for sorl-thumbnail, you can subclass this if you for example want to change the way destination filename is generated.
62599073d268445f2663a7e8
class EventListener(EventListenerBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.sock = None <NEW_LINE> self.ip_address = None <NEW_LINE> self.port = None <NEW_LINE> self.runner = None <NEW_LINE> self.site = None <NEW_LINE> self.session = None <NEW_LINE> self.start_lock = None <NEW_LINE> <DEDENT> def start(self, any_zone): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def listen(self, ip_address): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> async def async_start(self, any_zone): <NEW_LINE> <INDENT> if not self.start_lock: <NEW_LINE> <INDENT> self.start_lock = asyncio.Lock() <NEW_LINE> <DEDENT> async with self.start_lock: <NEW_LINE> <INDENT> if self.is_running: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ip_address = get_listen_ip(any_zone.ip_address) <NEW_LINE> if not ip_address: <NEW_LINE> <INDENT> log.exception("Could not start Event Listener: check network.") <NEW_LINE> return <NEW_LINE> <DEDENT> port = await self.async_listen(ip_address) <NEW_LINE> if not port: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.address = (ip_address, port) <NEW_LINE> client_timeout = ClientTimeout(total=10) <NEW_LINE> self.session = ClientSession(raise_for_status=True, timeout=client_timeout) <NEW_LINE> self.is_running = True <NEW_LINE> log.debug("Event Listener started") <NEW_LINE> <DEDENT> <DEDENT> async def async_listen(self, ip_address): <NEW_LINE> <INDENT> for port_number in range( self.requested_port_number, self.requested_port_number + 100 ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if port_number > self.requested_port_number: <NEW_LINE> <INDENT> log.debug("Trying next port (%d)", port_number) <NEW_LINE> <DEDENT> sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> sock.bind((ip_address, port_number)) <NEW_LINE> sock.listen(200) <NEW_LINE> self.sock = sock <NEW_LINE> self.port = port_number <NEW_LINE> break <NEW_LINE> <DEDENT> except socket.error as e: <NEW_LINE> <INDENT> log.warning("Could not bind to %s:%s: %s", ip_address, port_number, e) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> if not self.port: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> self.ip_address = ip_address <NEW_LINE> await self._async_start() <NEW_LINE> return self.port <NEW_LINE> <DEDENT> async def _async_start(self): <NEW_LINE> <INDENT> handler = EventNotifyHandler() <NEW_LINE> app = web.Application() <NEW_LINE> app.add_routes([web.route("notify", "", handler.notify)]) <NEW_LINE> self.runner = web.AppRunner(app) <NEW_LINE> await self.runner.setup() <NEW_LINE> self.site = web.SockSite(self.runner, self.sock) <NEW_LINE> await self.site.start() <NEW_LINE> log.debug("Event listener running on %s", (self.ip_address, self.port)) <NEW_LINE> <DEDENT> async def async_stop(self): <NEW_LINE> <INDENT> if self.site: <NEW_LINE> <INDENT> await self.site.stop() <NEW_LINE> self.site = None <NEW_LINE> <DEDENT> if self.runner: <NEW_LINE> <INDENT> await self.runner.cleanup() <NEW_LINE> self.runner = None <NEW_LINE> <DEDENT> if self.session: <NEW_LINE> <INDENT> await self.session.close() <NEW_LINE> self.session = None <NEW_LINE> <DEDENT> if self.sock: <NEW_LINE> <INDENT> self.sock.close() <NEW_LINE> self.sock = None <NEW_LINE> <DEDENT> self.port = None <NEW_LINE> self.ip_address = None <NEW_LINE> <DEDENT> def stop_listening(self, address): <NEW_LINE> <INDENT> asyncio.ensure_future(self.async_stop())
The Event Listener. Runs an http server which is an endpoint for ``NOTIFY`` requests from Sonos devices. Inherits from `soco.events_base.EventListenerBase`.
625990731f5feb6acb164509
class ControlTool(Tool): <NEW_LINE> <INDENT> def __init__(self, name, model): <NEW_LINE> <INDENT> super(ControlTool, self).__init__(name, model) <NEW_LINE> <DEDENT> def canDraw(self): <NEW_LINE> <INDENT> return False
ControlTool abstract class. This class defines tools able to do some sort of control.
62599073442bda511e95d9e2
class DeviceBackendTestCase(TestCase): <NEW_LINE> <INDENT> def test_mds_backend(self): <NEW_LINE> <INDENT> device = Device.objects.create(name='test_mds_device', description='Test MDS Device', data_backend='mds') <NEW_LINE> device.full_clean() <NEW_LINE> <DEDENT> def test_h1_backend(self): <NEW_LINE> <INDENT> device = Device.objects.create(name='test_h1_device', description='Test H1 Device', data_backend='h1') <NEW_LINE> device.full_clean() <NEW_LINE> <DEDENT> def test_hdf5_backend(self): <NEW_LINE> <INDENT> device = Device.objects.create(name='test_hdf5_device', description='Test HDF5 Device', data_backend='hdf5') <NEW_LINE> device.full_clean()
Make sure we can create a device for each backend.
6259907399cbb53fe6832800
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> for j in xrange(num_train): <NEW_LINE> <INDENT> dif=X[i]-self.X_train[j] <NEW_LINE> dists[i,j]=np.sqrt(np.sum(np.power(dif,2))) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> dif=self.X_train-X[i] <NEW_LINE> p=np.power(dif,2) <NEW_LINE> dists[i]=np.sqrt(np.sum(p,axis= 1)) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> x1=np.sum(X*X,axis= 1) <NEW_LINE> x2=np.reshape(x1,(num_test,1)) <NEW_LINE> y1=np.sum(self.X_train*self.X_train,axis= 1) <NEW_LINE> xy1=np.dot(X,self.X_train.T) <NEW_LINE> dists=np.sqrt(-2*xy1+x2+y1) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> closest_y = [] <NEW_LINE> dist=dists[i] <NEW_LINE> list=self.y_train[np.argsort(dist)] <NEW_LINE> pred_k=list[0:k] <NEW_LINE> tmp1=np.bincount(pred_k) <NEW_LINE> y_pred[i]=np.argmax(tmp1) <NEW_LINE> <DEDENT> return y_pred
a kNN classifier with L2 distance
62599073009cb60464d02e50
class InNode(Node): <NEW_LINE> <INDENT> def __init__(self, world): <NEW_LINE> <INDENT> super().__init__(world) <NEW_LINE> self.nchannels = world.nchannels <NEW_LINE> self.w_out = [] <NEW_LINE> for i in range(self.nchannels): <NEW_LINE> <INDENT> self.w_out.append(OutWire(self, world.buf_len)) <NEW_LINE> <DEDENT> self.w_level = InWire(self, 1) <NEW_LINE> <DEDENT> def calc_func(self): <NEW_LINE> <INDENT> level = self.w_level.get_data() <NEW_LINE> for i in range(self.nchannels): <NEW_LINE> <INDENT> self.w_out[i].set_data(self.world.inBuffer[i, :] * level)
in_wires: - w_level : controlRate out_wires: - w_out : list, audioRate
625990735166f23b2e244ceb
class gwo_test_case(_ut.TestCase): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> from .core import gwo, algorithm <NEW_LINE> from pickle import loads, dumps <NEW_LINE> uda = gwo() <NEW_LINE> uda = gwo(gen=1000, seed=5) <NEW_LINE> self.assertEqual(uda.get_seed(), 5) <NEW_LINE> a = algorithm(uda) <NEW_LINE> self.assertEqual(str(a), str(loads(dumps(a)))) <NEW_LINE> log = uda.get_log()
Test case for the UDA gwo
625990734527f215b58eb62b
class LinterFailure(Exception): <NEW_LINE> <INDENT> def __init__(self, message, repl): <NEW_LINE> <INDENT> super(LinterFailure, self).__init__() <NEW_LINE> self.message = message <NEW_LINE> self.replacement = repl <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str("{0}".format(self.message))
Exception raised when the linter reports a message.
625990734f88993c371f11ac
class NellaUser: <NEW_LINE> <INDENT> username = "" <NEW_LINE> email = "" <NEW_LINE> def __init__(self, username, email): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.email = email <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<NellaUser [\"%s\", \"%s\"]>" % (self.username, self.email)
Represents TKL user Attributes: email (str): User email address username (str): Username
62599073ec188e330fdfa1bb
class InstanceElement(use_metaclass(cache.CachedMetaClass)): <NEW_LINE> <INDENT> def __init__(self, instance, var, is_class_var=False): <NEW_LINE> <INDENT> if isinstance(var, parsing.Function): <NEW_LINE> <INDENT> var = Function(var) <NEW_LINE> <DEDENT> elif isinstance(var, parsing.Class): <NEW_LINE> <INDENT> var = Class(var) <NEW_LINE> <DEDENT> self.instance = instance <NEW_LINE> self.var = var <NEW_LINE> self.is_class_var = is_class_var <NEW_LINE> <DEDENT> @property <NEW_LINE> @cache.memoize_default() <NEW_LINE> def parent(self): <NEW_LINE> <INDENT> par = self.var.parent <NEW_LINE> if isinstance(par, Class) and par == self.instance.base or isinstance(par, parsing.Class) and par == self.instance.base.base: <NEW_LINE> <INDENT> par = self.instance <NEW_LINE> <DEDENT> elif not isinstance(par, parsing.Module): <NEW_LINE> <INDENT> par = InstanceElement(self.instance, par, self.is_class_var) <NEW_LINE> <DEDENT> return par <NEW_LINE> <DEDENT> def get_parent_until(self, *args, **kwargs): <NEW_LINE> <INDENT> return parsing.Simple.get_parent_until(self, *args, **kwargs) <NEW_LINE> <DEDENT> def get_decorated_func(self): <NEW_LINE> <INDENT> func = self.var.get_decorated_func() <NEW_LINE> if func == self.var: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return func <NEW_LINE> <DEDENT> def get_assignment_calls(self): <NEW_LINE> <INDENT> origin = self.var.get_assignment_calls() <NEW_LINE> new = helpers.fast_parent_copy(origin) <NEW_LINE> par = InstanceElement(self.instance, origin.parent_stmt, self.is_class_var) <NEW_LINE> new.parent_stmt = par <NEW_LINE> return new <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.var, name) <NEW_LINE> <DEDENT> def isinstance(self, *cls): <NEW_LINE> <INDENT> return isinstance(self.var, cls) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s of %s>" % (type(self).__name__, self.var)
InstanceElement is a wrapper for any object, that is used as an instance variable (e.g. self.variable or class methods).
625990734a966d76dd5f0801
class Commit(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> SYNCHRONOUS_COMMIT = "SYNCHRONOUS_COMMIT" <NEW_LINE> ASYNCHRONOUS_COMMIT = "ASYNCHRONOUS_COMMIT"
Replica commit mode in availability group.
62599073fff4ab517ebcf132
class ChillTouch(Spell): <NEW_LINE> <INDENT> name = "Chill Touch" <NEW_LINE> level = 0 <NEW_LINE> casting_time = "1 action" <NEW_LINE> casting_range = "120 feet" <NEW_LINE> components = ('V', 'S') <NEW_LINE> materials = "" <NEW_LINE> duration = "1 round" <NEW_LINE> magic_school = "Necromancy" <NEW_LINE> classes = ('Sorceror', 'Warlock', 'Wizard')
You create a ghostly, skeletal hand in the space of a creature within range. Make a ranged spell attack including spell attack bonus, against the creature to assail it with the chill of the grave. On a hit, the target takes 1d8 necrotic damage, and it can't regain hit points until the start of your next turn. Until then, the hand clings to the target. If you hit an undead target, it also has disadvantage on attack rolls against you until the end of your next turn. This spell's damage increases by 1d8 when you reach 5th level (2d8), 11th level (3d8), and 17th level (4d8).
625990738e7ae83300eea9a8
class OperationDisplay(Model): <NEW_LINE> <INDENT> _validation = { 'provider': {'readonly': True}, 'resource': {'readonly': True}, 'operation': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.provider = None <NEW_LINE> self.resource = None <NEW_LINE> self.operation = None
The object that represents the operation. Variables are only populated by the server, and will be ignored when sending a request. :ivar provider: Service provider: Microsoft.Cdn :vartype provider: str :ivar resource: Resource on which the operation is performed: Profile, endpoint, etc. :vartype resource: str :ivar operation: Operation type: Read, write, delete, etc. :vartype operation: str
625990731b99ca40022901c1
class rotateZy(bpy.types.Operator): <NEW_LINE> <INDENT> bl_label = "Zy 45°" <NEW_LINE> bl_idname = "mesh.face_rotate_zy45" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> bpy.ops.mesh.rot_con(axis='2', caxis='1', rdeg=45) <NEW_LINE> return {"FINISHED"}
rotate selected face > Zy 45°
6259907376e4537e8c3f0e96
class TopologyMutexFormula(ActivationOutcomesFormula): <NEW_LINE> <INDENT> def __init__(self, ts): <NEW_LINE> <INDENT> super(TopologyMutexFormula, self).__init__(sys_props = [], outcomes = ['completed'], ts = ts) <NEW_LINE> self.formulas = self.gen_mutex_formulas(self.env_props, future = True) <NEW_LINE> self.type = 'env_trans'
Generate environment assumptions/constraints that enforce mutual exclusion between the topology propositions; Eq. (1) The transition system TS, is provided in the form of a dictionary.
625990738e7ae83300eea9a9
class CircularOrientedPathway(OrientedPathway): <NEW_LINE> <INDENT> def __init__(self, start, stop, normal, angle): <NEW_LINE> <INDENT> self._normal = direction(normal) <NEW_LINE> self._angle = angle <NEW_LINE> delta = stop.position - start.position <NEW_LINE> midpoint = (stop.position + start.position) / 2 <NEW_LINE> half_height = length(delta) / 2 <NEW_LINE> half_angle = angle / 2 <NEW_LINE> self._radius = half_height / math.sin(half_angle) <NEW_LINE> center_displacement = self._radius * math.cos(half_angle) <NEW_LINE> toward_center = direction(np.cross(self._normal, delta)) <NEW_LINE> self._center = midpoint + center_displacement * toward_center <NEW_LINE> self._radial = direction(start.position - self._center) <NEW_LINE> self._rotary = np.cross(self._normal, self._radial) <NEW_LINE> super().__init__( start=OrientedNode(node=start, normal=self._normal, surface=self._radial), stop=OrientedNode(node=stop, normal=self._normal, surface=direction(stop.position - self._center)), length=self._radius * self._angle, ) <NEW_LINE> <DEDENT> def _interpolation(self, phase): <NEW_LINE> <INDENT> angle = self._angle * phase <NEW_LINE> surface = math.cos(angle) * self._radial + math.sin(angle) * self._rotary <NEW_LINE> position = self._center + self._radius * surface <NEW_LINE> return OrientedNode(position=position, normal=self._normal, surface=surface)
Special case of OrientedPathway along a circular arc.
62599073627d3e7fe0e087a0
class RandomNavigator: <NEW_LINE> <INDENT> def __init__(self, seq): <NEW_LINE> <INDENT> self.seq = seq <NEW_LINE> self.last = None <NEW_LINE> self.current = random.choice(self.seq) <NEW_LINE> <DEDENT> def move_next(self): <NEW_LINE> <INDENT> self.last = self.current <NEW_LINE> self.current = random.choice(self.seq) <NEW_LINE> while self.current == self.last: <NEW_LINE> <INDENT> self.current = random.choice(self.seq) <NEW_LINE> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> return self.current
Navigates over a sequence in a random fashion, ensuring no consecutive repetition.
625990734428ac0f6e659e4c
class CreateFolderResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Token(self): <NEW_LINE> <INDENT> return self._output.get('Token', None) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
A ResultSet with methods tailored to the values returned by the CreateFolder Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
62599073b7558d5895464bbf
class AdSchedule(models.Model): <NEW_LINE> <INDENT> name = models.CharField(blank=True, max_length=255) <NEW_LINE> description = models.TextField(blank=True) <NEW_LINE> deleted = models.BooleanField(default=False) <NEW_LINE> date_start = models.DateTimeField(help_text="Date/Time Ad Schedule Should Begin To Appear In Feeds.") <NEW_LINE> date_end = models.DateTimeField(help_text="Date/Time Ad Schedule Should Expire (Use Year 01-JAN-2100 For No Expire).") <NEW_LINE> priority = models.IntegerField(default=10, help_text="Higher Numbers Will Insert Earlier If Conflict.") <NEW_LINE> date_created = models.DateTimeField(auto_now_add=True) <NEW_LINE> date_updated = models.DateTimeField(auto_now=True) <NEW_LINE> titles = models.ManyToManyField(Title, blank=True, related_name='ad_schedules') <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> ordering = ['name'] <NEW_LINE> verbose_name_plural = "Ad Schedules" <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Ad Schedules Control What Ads Get Inserted Where For A Title.
6259907321bff66bcd724580
class TestArrayToIndexDeprecation(TestCase): <NEW_LINE> <INDENT> def test_array_to_index_error(self): <NEW_LINE> <INDENT> a = np.array([[[1]]]) <NEW_LINE> assert_raises(TypeError, operator.index, np.array([1])) <NEW_LINE> assert_raises(TypeError, np.reshape, a, (a, -1)) <NEW_LINE> assert_raises(TypeError, np.take, a, [0], a)
Creating an an index from array not 0-D is an error.
6259907360cbc95b063659f9
class AcreditadoraAutoComplete(autocomplete.Select2QuerySetView): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> queryset = Ieu.objects.all() <NEW_LINE> if self.q: <NEW_LINE> <INDENT> queryset = queryset.filter(ieu_acreditadora_edit_icontains=self.q) <NEW_LINE> <DEDENT> return queryset
AutoComplete para filtrar listado del modelo Acreditadora.
6259907399cbb53fe6832802
class LIFOQueue(FIFOQueue): <NEW_LINE> <INDENT> def pop(self): <NEW_LINE> <INDENT> return self.nodes.pop() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return reversed(self.nodes)
A last-in-first-out queue. Used to get depth first search behavior. >>> lifo = LIFOQueue() >>> lifo.push(0) >>> lifo.push(1) >>> lifo.push(2) >>> list(lifo) [2, 1, 0] >>> print(lifo.pop()) 2 >>> print(lifo.pop()) 1 >>> print(lifo.pop()) 0
6259907326068e7796d4e255
class DocsView(APIView): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> apidocs = {'products': request.build_absolute_uri('product/'), 'categories': request.build_absolute_uri('category/'), 'providers': request.build_absolute_uri('provider/'), 'user creation': request.build_absolute_uri('user_create/'), } <NEW_LINE> return Response(apidocs)
RESTFul Documentation of my app
62599073ad47b63b2c5a9167
class BaseApiTestCase(TestCaseMixin, APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> APITestCase.setUp(self) <NEW_LINE> TestCaseMixin.setUp(self) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> TestCaseMixin.tearDown(self) <NEW_LINE> APITestCase.tearDown(self)
Base Django rest framework api test case class
62599073ec188e330fdfa1bd
class Control(Point): <NEW_LINE> <INDENT> def __init__(self, point, number): <NEW_LINE> <INDENT> super().__init__(point.lat, point.lon) <NEW_LINE> self.number = number <NEW_LINE> <DEDENT> def getNumber(self): <NEW_LINE> <INDENT> return self.number <NEW_LINE> <DEDENT> def getScore(self): <NEW_LINE> <INDENT> return 10 * math.ceil(self.number / 10)
A Point with a control number.
625990734f6381625f19a136
class Message: <NEW_LINE> <INDENT> def __init__(self, t: str, *args): <NEW_LINE> <INDENT> self.t = t <NEW_LINE> self.args = args
Helper class describing an IPC message.
6259907355399d3f05627e31
class nodeStatistics(): <NEW_LINE> <INDENT> def __init__(self, nodename): <NEW_LINE> <INDENT> self.nodename = nodename <NEW_LINE> self.node_mppdb_cpu_busy_time = None <NEW_LINE> self.node_host_cpu_busy_time = None <NEW_LINE> self.node_host_cpu_total_time = None <NEW_LINE> self.node_mppdb_cpu_time_in_busy_time = None <NEW_LINE> self.node_mppdb_cpu_time_in_total_time = None <NEW_LINE> self.node_physical_memory = None <NEW_LINE> self.node_db_memory_usage = None <NEW_LINE> self.node_shared_buffer_size = None <NEW_LINE> self.node_shared_buffer_hit_ratio = None <NEW_LINE> self.node_in_memory_sorts = None <NEW_LINE> self.node_in_disk_sorts = None <NEW_LINE> self.node_in_memory_sort_ratio = None <NEW_LINE> self.node_number_of_files = None <NEW_LINE> self.node_physical_reads = None <NEW_LINE> self.node_physical_writes = None <NEW_LINE> self.node_read_time = None <NEW_LINE> self.node_write_time = None
Class for stating node message
62599073baa26c4b54d50bc7
class ReaderGroupDataType(FrozenClass): <NEW_LINE> <INDENT> ua_types = [ ('Name', 'String'), ('Enabled', 'Boolean'), ('SecurityMode', 'MessageSecurityMode'), ('SecurityGroupId', 'String'), ('SecurityKeyServices', 'ListOfEndpointDescription'), ('MaxNetworkMessageSize', 'UInt32'), ('GroupProperties', 'ListOfKeyValuePair'), ('TransportSettings', 'ExtensionObject'), ('MessageSettings', 'ExtensionObject'), ('DataSetReaders', 'ListOfDataSetReaderDataType'), ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.Name = None <NEW_LINE> self.Enabled = True <NEW_LINE> self.SecurityMode = MessageSecurityMode(0) <NEW_LINE> self.SecurityGroupId = None <NEW_LINE> self.SecurityKeyServices = [] <NEW_LINE> self.MaxNetworkMessageSize = 0 <NEW_LINE> self.GroupProperties = [] <NEW_LINE> self.TransportSettings = ExtensionObject() <NEW_LINE> self.MessageSettings = ExtensionObject() <NEW_LINE> self.DataSetReaders = [] <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'ReaderGroupDataType(Name:{self.Name}, Enabled:{self.Enabled}, SecurityMode:{self.SecurityMode}, SecurityGroupId:{self.SecurityGroupId}, SecurityKeyServices:{self.SecurityKeyServices}, MaxNetworkMessageSize:{self.MaxNetworkMessageSize}, GroupProperties:{self.GroupProperties}, TransportSettings:{self.TransportSettings}, MessageSettings:{self.MessageSettings}, DataSetReaders:{self.DataSetReaders})' <NEW_LINE> <DEDENT> __repr__ = __str__
:ivar Name: :vartype Name: String :ivar Enabled: :vartype Enabled: Boolean :ivar SecurityMode: :vartype SecurityMode: MessageSecurityMode :ivar SecurityGroupId: :vartype SecurityGroupId: String :ivar SecurityKeyServices: :vartype SecurityKeyServices: EndpointDescription :ivar MaxNetworkMessageSize: :vartype MaxNetworkMessageSize: UInt32 :ivar GroupProperties: :vartype GroupProperties: KeyValuePair :ivar TransportSettings: :vartype TransportSettings: ExtensionObject :ivar MessageSettings: :vartype MessageSettings: ExtensionObject :ivar DataSetReaders: :vartype DataSetReaders: DataSetReaderDataType
625990737b25080760ed8971
class Rectangle: <NEW_LINE> <INDENT> def __init__(self, p1: Point, p2: Point): <NEW_LINE> <INDENT> self.p1: Point = p1 <NEW_LINE> self.p2: Point = p2 <NEW_LINE> <DEDENT> def left(self) -> int: <NEW_LINE> <INDENT> return min(self.p1.x, self.p2.x) <NEW_LINE> <DEDENT> def right(self) -> int: <NEW_LINE> <INDENT> return max(self.p1.x, self.p2.x) <NEW_LINE> <DEDENT> def top(self) -> int: <NEW_LINE> <INDENT> return min(self.p1.y, self.p2.y) <NEW_LINE> <DEDENT> def bottom(self) -> int: <NEW_LINE> <INDENT> return max(self.p1.y, self.p2.y) <NEW_LINE> <DEDENT> def width(self) -> int: <NEW_LINE> <INDENT> return abs(self.p1.x - self.p2.x) + 1 <NEW_LINE> <DEDENT> def height(self) -> int: <NEW_LINE> <INDENT> return abs(self.p1.y - self.p2.y) + 1 <NEW_LINE> <DEDENT> def area(self) -> int: <NEW_LINE> <INDENT> return self.width() * self.height() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def x_y_w_h(cls, x: int, y: int, w: int, h: int) -> "Rectangle": <NEW_LINE> <INDENT> if w < 0: <NEW_LINE> <INDENT> x += w <NEW_LINE> w = abs(w) <NEW_LINE> <DEDENT> if h < 0: <NEW_LINE> <INDENT> y += h <NEW_LINE> h = abs(h) <NEW_LINE> <DEDENT> return cls(Point(x, y), Point(x + w - 1, y + h - 1)) <NEW_LINE> <DEDENT> def intersects(self, other: "Rectangle") -> bool: <NEW_LINE> <INDENT> if not self.contains_x(other.left()) and not other.contains_x(self.left()): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.contains_y(other.top()) and not other.contains_y(self.top()): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def intersection(self, other: "Rectangle") -> Optional["Rectangle"]: <NEW_LINE> <INDENT> if not self.intersects(other): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return Rectangle(Point(max(self.left(), other.left()), max(self.top(), other.top())), Point(min(self.right(), other.right()), min(self.bottom(), other.bottom()))) <NEW_LINE> <DEDENT> def contains_x(self, x: int) -> bool: <NEW_LINE> <INDENT> return self.left() <= x <= self.right() <NEW_LINE> <DEDENT> def contains_y(self, y: int) -> bool: <NEW_LINE> <INDENT> return self.top() <= y <= self.bottom() <NEW_LINE> <DEDENT> def __contains__(self, point: Point) -> bool: <NEW_LINE> <INDENT> return self.contains_x(point.x) and self.contains_y(point.y) <NEW_LINE> <DEDENT> def off_diagonal_points(self) -> Tuple[Point, Point]: <NEW_LINE> <INDENT> return Point(self.p1.x, self.p2.y), Point(self.p2.x, self.p1.y) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.left(), self.top(), self.right(), self.bottom())) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (isinstance(other, Rectangle) and self.left() == other.left() and self.right() == other.right() and self.top() == other.top() and self.bottom() == other.bottom())
Represents an axis-aligned rectangle in 2D integer Cartesian coordinates.
62599073009cb60464d02e54
class ApplicationGatewayBackendHealthHttpSettings(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'backend_http_settings': {'key': 'backendHttpSettings', 'type': 'ApplicationGatewayBackendHttpSettings'}, 'servers': {'key': 'servers', 'type': '[ApplicationGatewayBackendHealthServer]'}, } <NEW_LINE> def __init__( self, *, backend_http_settings: Optional["ApplicationGatewayBackendHttpSettings"] = None, servers: Optional[List["ApplicationGatewayBackendHealthServer"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewayBackendHealthHttpSettings, self).__init__(**kwargs) <NEW_LINE> self.backend_http_settings = backend_http_settings <NEW_LINE> self.servers = servers
Application gateway BackendHealthHttp settings. :param backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource. :type backend_http_settings: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayBackendHttpSettings :param servers: List of ApplicationGatewayBackendHealthServer resources. :type servers: list[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayBackendHealthServer]
625990735fcc89381b266de5
class SingleBookFactory(BaseBookFactory): <NEW_LINE> <INDENT> id = 999999 <NEW_LINE> title = "Performance optimisation" <NEW_LINE> publisher = SubFactory('factories.books_publisher.SinglePublisherFactory') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> django_get_or_create = ('id',) <NEW_LINE> <DEDENT> @post_generation <NEW_LINE> def authors(obj, created, extracted, **kwargs): <NEW_LINE> <INDENT> if created: <NEW_LINE> <INDENT> author = SingleAuthorFactory() <NEW_LINE> obj.authors.add(author)
Book factory, but limited to a single book.
62599073d486a94d0ba2d8d3
class ActiveDirectoryUPNException(Exception): <NEW_LINE> <INDENT> pass
Raised in case the user's login credentials cannot be mapped to a UPN
62599073fff4ab517ebcf135
class RegexValidator(Validator): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': 'This value does not match the required pattern.' } <NEW_LINE> def __init__(self, regex, error_messages=None): <NEW_LINE> <INDENT> super().__init__(error_messages) <NEW_LINE> self.regex = regex <NEW_LINE> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> if self.regex.match(value) is None: <NEW_LINE> <INDENT> self._fail('invalid', value=value, regex=self.regex.pattern) <NEW_LINE> <DEDENT> return value
Validator which succeeds if the `value` matches with the regex. :param regex: The regular expression string to use. Can also be a compiled regular expression pattern. :param dict error_messages: The error messages for various kinds of errors.
625990733317a56b869bf1d2
class EfergySensor(EfergyEntity, SensorEntity): <NEW_LINE> <INDENT> def __init__( self, api: Efergy, description: SensorEntityDescription, server_unique_id: str, period: str | None = None, currency: str | None = None, sid: str = "", ) -> None: <NEW_LINE> <INDENT> super().__init__(api, server_unique_id) <NEW_LINE> self.entity_description = description <NEW_LINE> if description.key == CONF_CURRENT_VALUES: <NEW_LINE> <INDENT> self._attr_name = f"{description.name}_{sid}" <NEW_LINE> <DEDENT> self._attr_unique_id = f"{server_unique_id}/{description.key}_{sid}" <NEW_LINE> if "cost" in description.key: <NEW_LINE> <INDENT> self._attr_native_unit_of_measurement = currency <NEW_LINE> <DEDENT> self.sid = sid <NEW_LINE> self.period = period <NEW_LINE> <DEDENT> async def async_update(self) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._attr_native_value = await self.api.async_get_reading( self.entity_description.key, period=self.period, sid=self.sid ) <NEW_LINE> <DEDENT> except (exceptions.DataError, exceptions.ConnectError) as ex: <NEW_LINE> <INDENT> if self._attr_available: <NEW_LINE> <INDENT> self._attr_available = False <NEW_LINE> _LOGGER.error("Error getting data: %s", ex) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if not self._attr_available: <NEW_LINE> <INDENT> self._attr_available = True <NEW_LINE> _LOGGER.info("Connection has resumed")
Implementation of an Efergy sensor.
625990734f88993c371f11ae
class magento_website(orm.Model): <NEW_LINE> <INDENT> _inherit = 'magento.website' <NEW_LINE> _columns = { 'import_pricelist_from_date':fields.datetime( 'Import Pricelist from date' ), } <NEW_LINE> def _get_store_view_ids(self, cr, uid, website, context=None): <NEW_LINE> <INDENT> stores = website.store_ids or [] <NEW_LINE> storeviews = [] <NEW_LINE> [storeviews.extend(x.storeview_ids) for x in stores] <NEW_LINE> magento_storeview_ids = [(i.id, i.magento_id) for i in storeviews if i.sync_pricelist] <NEW_LINE> return magento_storeview_ids <NEW_LINE> <DEDENT> def import_product_pricelist(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> if not hasattr(ids, '__iter__'): <NEW_LINE> <INDENT> ids = [ids] <NEW_LINE> <DEDENT> import_start_time = datetime.now() <NEW_LINE> storeview_obj = self.pool.get('magento.storeview') <NEW_LINE> for website in self.browse(cr, uid, ids, context=context): <NEW_LINE> <INDENT> backend_id = website.backend_id.id <NEW_LINE> if website.import_pricelist_from_date: <NEW_LINE> <INDENT> from_date = datetime.strptime( website.import_pricelist_from_date, DEFAULT_SERVER_DATETIME_FORMAT) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from_date = None <NEW_LINE> <DEDENT> magento_storeview_ids = self._get_store_view_ids(cr,uid,website,context=context) <NEW_LINE> for storeview_id, magento_store_view_id in magento_storeview_ids: <NEW_LINE> <INDENT> storeview_obj.import_product_pricelist(cr, uid, storeview_id, context=context) <NEW_LINE> <DEDENT> <DEDENT> next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER) <NEW_LINE> next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) <NEW_LINE> self.write(cr, uid, ids, {'import_pricelist_from_date': next_time}, context=context) <NEW_LINE> return True
Inherit magento website
62599073167d2b6e312b821d
class HttpNotFound(HttpException): <NEW_LINE> <INDENT> def __init__(self, uri): <NEW_LINE> <INDENT> HttpException.__init__(self, 404, "Not found\n\n'%s'" % uri)
Error "404: Not found" default exception handler.
6259907399cbb53fe6832805
class LicensesModifiedEvent(ObjectModifiedEvent): <NEW_LINE> <INDENT> implements(ILicensesModifiedEvent) <NEW_LINE> def __init__(self, product, user=None): <NEW_LINE> <INDENT> super(LicensesModifiedEvent, self).__init__( product, product, [], user)
See `ILicensesModifiedEvent`.
6259907376e4537e8c3f0e9a
class EditPostHandler(Handler): <NEW_LINE> <INDENT> def get(self, post_id): <NEW_LINE> <INDENT> post = PostData.get_by_id(int(re.escape(post_id))) <NEW_LINE> if self.user: <NEW_LINE> <INDENT> if post.creator == self.user: <NEW_LINE> <INDENT> self.render("edit.html", post=post, registered_user=self.user) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write("cannot edit post") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.redirect("/blog/login") <NEW_LINE> <DEDENT> <DEDENT> def post(self, post_id): <NEW_LINE> <INDENT> if self.user: <NEW_LINE> <INDENT> action = self.request.get("action") <NEW_LINE> if action == "cancel": <NEW_LINE> <INDENT> self.redirect('/blog') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> edit_subject = self.request.get("subject") <NEW_LINE> edit_post = self.request.get("post") <NEW_LINE> postdata = PostData.get_by_id(int(re.escape(post_id))) <NEW_LINE> postdata.subject = edit_subject <NEW_LINE> postdata.post = edit_post <NEW_LINE> postdata.put() <NEW_LINE> self.redirect('/blog/%s' % (re.escape(post_id))) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.error(404)
Edit posts
62599073e5267d203ee6d04b
class AnyBICIdentifier (pyxb.binding.datatypes.string): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AnyBICIdentifier') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/toivotuo/Dropbox/Personal/Studies/UoH/tlbop/tapestry/tapestry/router/xsd/rocs.001.001.06.xsd', 7, 2) <NEW_LINE> _Documentation = None
An atomic simple type.
6259907397e22403b383c81f
class InvalidToken(Exception): <NEW_LINE> <INDENT> pass
Raised if token is token is invalid.
62599073a17c0f6771d5d839
class StandardDaterange(AbstractDaterange): <NEW_LINE> <INDENT> def __init__(self, params, parsing=True): <NEW_LINE> <INDENT> super(StandardDaterange, self).__init__(params, parsing=parsing) <NEW_LINE> self.other = params['other'] <NEW_LINE> if 'timeranges' in params: <NEW_LINE> <INDENT> self.timeranges = [Timerange(params=t) for t in params['timeranges']] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.timeranges = [] <NEW_LINE> for timeinterval in params['other'].split(','): <NEW_LINE> <INDENT> self.timeranges.append(Timerange(timeinterval.strip())) <NEW_LINE> <DEDENT> <DEDENT> self.day = params['day'] <NEW_LINE> <DEDENT> def serialize(self, no_json=True, printing=False): <NEW_LINE> <INDENT> return { 'day': self.day, 'other': self.other, 'timeranges': [t.serialize() for t in self.timeranges] } <NEW_LINE> <DEDENT> def is_correct(self): <NEW_LINE> <INDENT> valid = self.day in Daterange.weekdays <NEW_LINE> if not valid: <NEW_LINE> <INDENT> logger.error("Error: %s is not a valid day", self.day) <NEW_LINE> <DEDENT> valid &= super(StandardDaterange, self).is_correct() <NEW_LINE> return valid <NEW_LINE> <DEDENT> def get_start_and_end_time(self, ref=None): <NEW_LINE> <INDENT> now = time.localtime(ref) <NEW_LINE> self.syear = now.tm_year <NEW_LINE> self.month = now.tm_mon <NEW_LINE> self.wday = now.tm_wday <NEW_LINE> day_id = Daterange.get_weekday_id(self.day) <NEW_LINE> today_morning = get_start_of_day(now.tm_year, now.tm_mon, now.tm_mday) <NEW_LINE> tonight = get_end_of_day(now.tm_year, now.tm_mon, now.tm_mday) <NEW_LINE> day_diff = (day_id - now.tm_wday) % 7 <NEW_LINE> morning = datetime.fromtimestamp(today_morning) + timedelta(days=day_diff) <NEW_LINE> night = datetime.fromtimestamp(tonight) + timedelta(days=day_diff) <NEW_LINE> return (int(morning.strftime("%s")), int(night.strftime("%s")))
StandardDaterange is for standard entry (weekday - weekday)
6259907321bff66bcd724584
class ProgramEntryPoint(EntryPointConstruct): <NEW_LINE> <INDENT> def type(self): <NEW_LINE> <INDENT> return 'program'
Represents a Gobstones program block
625990737d43ff24874280a1
class QueryParameterBase(QueryParameter): <NEW_LINE> <INDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self._widget_item_name <NEW_LINE> <DEDENT> def get_modified_objects( self, unused_my_objects: types.MyObjectSet ) -> types.MyObjectSet: <NEW_LINE> <INDENT> new_objects = self._get_widget_item_objects() <NEW_LINE> return new_objects
Base parameter. Can only be added to a QueryParameters in first position. A QueryParameterBase doesn't modify an existing set of objects but instead serves as the base, the first parameter.
62599073ad47b63b2c5a916b
class Batch(object): <NEW_LINE> <INDENT> def __init__(self, record_def, records, check_datatype=True): <NEW_LINE> <INDENT> if check_datatype: <NEW_LINE> <INDENT> map(lambda r: Record._chk_type(record_def, r), records) <NEW_LINE> <DEDENT> self._rdef = record_def <NEW_LINE> self._records = records <NEW_LINE> self._records_iter = 0 <NEW_LINE> <DEDENT> def record_def(self): <NEW_LINE> <INDENT> return self._rdef <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self._records_iter >= len(self._records): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> self._records_iter += 1 <NEW_LINE> return self._records[self._records_iter - 1] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.formatted_str('json') <NEW_LINE> <DEDENT> def formatted_str(self, format): <NEW_LINE> <INDENT> assert(format in ('json', 'csv')) <NEW_LINE> ret_str_list = [] <NEW_LINE> for rec in self._records: <NEW_LINE> <INDENT> if format == 'json': <NEW_LINE> <INDENT> ret_str_list.append('{') <NEW_LINE> for i in xrange(len(rec)): <NEW_LINE> <INDENT> colname, colval = self._rdef[i].name, rec[i] <NEW_LINE> ret_str_list.append('"%s":"%s"' % (colname, str(colval).replace('"', r'\"'))) <NEW_LINE> ret_str_list.append(',') <NEW_LINE> <DEDENT> ret_str_list.pop() <NEW_LINE> ret_str_list.append('}%s' % (os.linesep)) <NEW_LINE> <DEDENT> elif format == 'csv': <NEW_LINE> <INDENT> for i in xrange(len(rec)): <NEW_LINE> <INDENT> colval = rec[i] <NEW_LINE> ret_str_list.append('"%s"' % (str(colval).replace('"', r'\"'))) <NEW_LINE> ret_str_list.append(',') <NEW_LINE> <DEDENT> ret_str_list.pop() <NEW_LINE> ret_str_list.append('%s' % (os.linesep)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert(False) <NEW_LINE> <DEDENT> <DEDENT> return ''.join(ret_str_list) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if len(self._records) != len(other._records): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for i in xrange(len(self._records)): <NEW_LINE> <INDENT> if self._records[i] != other._records[i]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._records)
Set of records
625990737047854f46340cd5
class LedgerIdentityInformation(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'ledger_id': {'readonly': True}, 'ledger_tls_certificate': {'required': True}, } <NEW_LINE> _attribute_map = { 'ledger_id': {'key': 'ledgerId', 'type': 'str'}, 'ledger_tls_certificate': {'key': 'ledgerTlsCertificate', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(LedgerIdentityInformation, self).__init__(**kwargs) <NEW_LINE> self.ledger_id = None <NEW_LINE> self.ledger_tls_certificate = kwargs['ledger_tls_certificate']
Contains the information about a Confidential Ledger. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar ledger_id: Id for the ledger. :vartype ledger_id: str :param ledger_tls_certificate: Required. PEM-encoded certificate used for TLS by the Confidential Ledger. :type ledger_tls_certificate: str
625990737b180e01f3e49cf3
class AssTweetUrl(TableMixin, Base): <NEW_LINE> <INDENT> tweet_id = Column(Integer, ForeignKey( 'tweet.id', ondelete='CASCADE', onupdate='CASCADE')) <NEW_LINE> url_id = Column(Integer, ForeignKey( 'url.id', ondelete='CASCADE', onupdate='CASCADE')) <NEW_LINE> __table_args__ = (UniqueConstraint( 'tweet_id', 'url_id', name='tweet_url_uq'), Index( 'url_tweet_idx', 'url_id', 'tweet_id'))
Association table to connect table `tweet` and `url`.
62599073aad79263cf4300d4
class EndpointTestCase(AsyncHTTPTestCase): <NEW_LINE> <INDENT> def get_app(self): <NEW_LINE> <INDENT> self.app = app = make_app(debug=False) <NEW_LINE> app.db = MagicMock() <NEW_LINE> return app <NEW_LINE> <DEDENT> def _get_secure_cookie(self, cookie_name, cookie_value): <NEW_LINE> <INDENT> cookie_name, cookie_value = 'auth', '1' <NEW_LINE> secure_cookie = create_signed_value( self.app.settings["cookie_secret"], cookie_name, cookie_value) <NEW_LINE> return 'auth="' + str(secure_cookie)[2:-1] + '"'
Endpoint TestCase a base test case used for testing endpoints
62599073e5267d203ee6d04c
class Together(Builtin): <NEW_LINE> <INDENT> attributes = ['Listable'] <NEW_LINE> def apply(self, expr, evaluation): <NEW_LINE> <INDENT> expr_sympy = expr.to_sympy() <NEW_LINE> result = sympy.together(expr_sympy) <NEW_LINE> result = from_sympy(result) <NEW_LINE> result = cancel(result) <NEW_LINE> return result
<dl> <dt>'Together[$expr$]' <dd>writes sums of fractions in $expr$ together. </dl> >> Together[a / c + b / c] = (a + b) / c 'Together' operates on lists: >> Together[{x / (y+1) + x / (y+1)^2}] = {x (2 + y) / (1 + y) ^ 2} But it does not touch other functions: >> Together[f[a / c + b / c]] = f[a / c + b / c] #> f[x]/x+f[x]/x^2//Together = (1 + x) f[x] / x ^ 2
625990734428ac0f6e659e4f
class MetadataNotFound(Exception): <NEW_LINE> <INDENT> user_id = None <NEW_LINE> data_key = None
Particualr user metadata entry was not found.
625990731f5feb6acb164511
class Database: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.url = 'https://bookstore-5217.restdb.io' <NEW_LINE> self.collection = '/rest/books' <NEW_LINE> self.headers = { 'content-type': 'application/json', 'x-apikey': '5e5f9cf028222370f14d4ece', 'cache-control': 'no-cache', } <NEW_LINE> <DEDENT> def add(self, values): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = requests.request( 'POST', self.url + self.collection, data=dumps(values), headers=self.headers ) <NEW_LINE> return '_id' in response.text <NEW_LINE> <DEDENT> except requests.exceptions.ConnectionError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def search(self, parameters): <NEW_LINE> <INDENT> query = f'?q={dumps(parameters)}' <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.request( 'GET', self.url + self.collection + query, headers=self.headers ) <NEW_LINE> return loads(response.text) <NEW_LINE> <DEDENT> except requests.exceptions.ConnectionError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def delete(self, record_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = requests.request( 'DELETE', self.url + self.collection + '/' + record_id, headers=self.headers ) <NEW_LINE> return record_id in response.text <NEW_LINE> <DEDENT> except requests.exceptions.ConnectionError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def update(self, record_id, values): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = requests.request( 'PATCH', self.url + self.collection + '/' + record_id, data=dumps(values), headers=self.headers ) <NEW_LINE> return record_id in response.text <NEW_LINE> <DEDENT> except requests.exceptions.ConnectionError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def upload_image(self, path): <NEW_LINE> <INDENT> headers = self.headers.copy() <NEW_LINE> headers.pop('content-type', None) <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.request( 'POST', self.url + '/media', files={'image': open(path, 'rb')}, headers=headers ) <NEW_LINE> return loads(response.text)["ids"][0] <NEW_LINE> <DEDENT> except (TypeError, KeyError, requests.exceptions.ConnectionError): <NEW_LINE> <INDENT> return None
A class containing methods needed for communication with database. ... Attributes ---------- url : str database address collection : str name of collection in database headers : dict dictionary containing information send with request to database
62599073b7558d5895464bc2
class Command(ProjectCommand): <NEW_LINE> <INDENT> help = "Create all databases for a new project." <NEW_LINE> option_list = ProjectCommand.option_list + ( make_option("--perftest_host", action="store", dest="perftest_host", default=None, help="The host name for the perftest database"), make_option("--objectstore_host", action="store", dest="objectstore_host", default=None, help="The host name for the objectstore database"), make_option("--perftest_type", action="store", dest="perftest_type", default=None, help="The database type (e.g. 'MySQL-InnoDB') " "for the perftest database"), make_option("--objectstore_type", action="store", dest="objectstore_type", default=None, help="The database type (e.g. 'MySQL-Aria') " "for the objectstore database"), make_option("--cron_batch", action="store", dest="cron_batch", choices=CRON_BATCH_NAMES, help=("Add this new project to this cron_batch. " "This value indicates the size of the project and may determine " "how much time between intervals should be set. Larger " "projects will likely have a longer time interval between " "execution as cron jobs." "Choices are: {0}. Default to None." ).format(", ".join(CRON_BATCH_NAMES))), ) <NEW_LINE> def handle_project(self, project, **options): <NEW_LINE> <INDENT> cron_batch = options.get("cron_batch") <NEW_LINE> hosts = dict( perftest=options.get("perftest_host"), objectstore=options.get("objectstore_host"), ) <NEW_LINE> types = dict( perftest=options.get("perftest_type"), objectstore=options.get("objectstore_type"), ) <NEW_LINE> dm = PerformanceTestModel.create( project, hosts=hosts, types=types, cron_batch=cron_batch, ) <NEW_LINE> self.stdout.write("Perftest project created: {0}\n".format(project)) <NEW_LINE> dm.disconnect()
Management command to create all databases for a new project. This extends ProjectCommandBase rather than ProjectBatchCommandBase because the latter handles not just the cron_batch param, but also looping. This mgmt command is not about looping, it's about a single project, and about adding that project to a single cron_batch.
625990735fc7496912d48ef8
class log(Elementwise): <NEW_LINE> <INDENT> def __init__(self, x): <NEW_LINE> <INDENT> super(log, self).__init__(x) <NEW_LINE> <DEDENT> @Elementwise.numpy_numeric <NEW_LINE> def numeric(self, values): <NEW_LINE> <INDENT> return np.log(values[0]) <NEW_LINE> <DEDENT> def sign_from_args(self): <NEW_LINE> <INDENT> return (False, False) <NEW_LINE> <DEDENT> def is_atom_convex(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_atom_concave(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_incr(self, idx): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_decr(self, idx): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def _grad(self, values): <NEW_LINE> <INDENT> rows = self.args[0].size[0]*self.args[0].size[1] <NEW_LINE> cols = self.size[0]*self.size[1] <NEW_LINE> if np.min(values[0]) <= 0: <NEW_LINE> <INDENT> return [None] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> grad_vals = 1.0/values[0] <NEW_LINE> return [log.elemwise_grad_to_diag(grad_vals, rows, cols)] <NEW_LINE> <DEDENT> <DEDENT> def _domain(self): <NEW_LINE> <INDENT> return [self.args[0] >= 0] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def graph_implementation(arg_objs, size, data=None): <NEW_LINE> <INDENT> t = lu.create_var(size) <NEW_LINE> x = arg_objs[0] <NEW_LINE> ones = lu.create_const(np.mat(np.ones(size)), size) <NEW_LINE> return (t, [ExpCone(t, ones, x)])
Elementwise :math:`\log x`.
6259907323849d37ff8529d5
class ClaimItem(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_name = "ClaimItem" <NEW_LINE> def __init__(self, jsondict=None): <NEW_LINE> <INDENT> self.bodySite = None <NEW_LINE> self.detail = None <NEW_LINE> self.diagnosisLinkId = None <NEW_LINE> self.factor = None <NEW_LINE> self.modifier = None <NEW_LINE> self.net = None <NEW_LINE> self.points = None <NEW_LINE> self.prosthesis = None <NEW_LINE> self.provider = None <NEW_LINE> self.quantity = None <NEW_LINE> self.sequence = None <NEW_LINE> self.service = None <NEW_LINE> self.serviceDate = None <NEW_LINE> self.subSite = None <NEW_LINE> self.type = None <NEW_LINE> self.udi = None <NEW_LINE> self.unitPrice = None <NEW_LINE> super(ClaimItem, self).__init__(jsondict) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(ClaimItem, self).elementProperties() <NEW_LINE> js.extend([ ("bodySite", "bodySite", coding.Coding, False, None, False), ("detail", "detail", ClaimItemDetail, True, None, False), ("diagnosisLinkId", "diagnosisLinkId", int, True, None, False), ("factor", "factor", float, False, None, False), ("modifier", "modifier", coding.Coding, True, None, False), ("net", "net", quantity.Quantity, False, None, False), ("points", "points", float, False, None, False), ("prosthesis", "prosthesis", ClaimItemProsthesis, False, None, False), ("provider", "provider", fhirreference.FHIRReference, False, None, False), ("quantity", "quantity", quantity.Quantity, False, None, False), ("sequence", "sequence", int, False, None, True), ("service", "service", coding.Coding, False, None, True), ("serviceDate", "serviceDate", fhirdate.FHIRDate, False, None, False), ("subSite", "subSite", coding.Coding, True, None, False), ("type", "type", coding.Coding, False, None, True), ("udi", "udi", coding.Coding, False, None, False), ("unitPrice", "unitPrice", quantity.Quantity, False, None, False), ]) <NEW_LINE> return js
Goods and Services. First tier of goods and services.
625990737d43ff24874280a2
class ClassNode: <NEW_LINE> <INDENT> def __init__(self, name, parent_node, type_sort): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.parent_node = parent_node <NEW_LINE> self.children = [] <NEW_LINE> self.type_sort = type_sort <NEW_LINE> <DEDENT> def find(self, name): <NEW_LINE> <INDENT> if name == self.name: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> for c in self.children: <NEW_LINE> <INDENT> res = c.find(name) <NEW_LINE> if res: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.name) <NEW_LINE> <DEDENT> def all_children(self): <NEW_LINE> <INDENT> result = [self] <NEW_LINE> for c in self.children: <NEW_LINE> <INDENT> result += c.all_children() <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def all_parents(self): <NEW_LINE> <INDENT> result = [self] <NEW_LINE> if self.parent_node: <NEW_LINE> <INDENT> result += self.parent_node.all_parents() <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def get_literal(self): <NEW_LINE> <INDENT> if isinstance(self.name, str): <NEW_LINE> <INDENT> return getattr(self.type_sort, self.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> constr = getattr(self.type_sort, self.name[0]) <NEW_LINE> args = self.quantified() <NEW_LINE> return constr(*args) <NEW_LINE> <DEDENT> <DEDENT> def get_literal_with_args(self, var): <NEW_LINE> <INDENT> if isinstance(self.name, str): <NEW_LINE> <INDENT> return getattr(self.type_sort, self.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> constr = getattr(self.type_sort, self.name[0]) <NEW_LINE> args = [] <NEW_LINE> for arg in self.name[1:]: <NEW_LINE> <INDENT> args.append(getattr(self.type_sort, arg)(var)) <NEW_LINE> <DEDENT> return constr(*args) <NEW_LINE> <DEDENT> <DEDENT> def quantified(self): <NEW_LINE> <INDENT> res = [] <NEW_LINE> if isinstance(self.name, tuple): <NEW_LINE> <INDENT> for i, arg in enumerate(self.name[1:]): <NEW_LINE> <INDENT> sort = self.type_sort if not arg.endswith('defaults_args') else IntSort() <NEW_LINE> cur = Const("y" + str(i), sort) <NEW_LINE> res.append(cur) <NEW_LINE> <DEDENT> <DEDENT> return res
Class representing a node in a tree where each node represents a class, and has references to the base class and subclasses.
625990735166f23b2e244cf3
class Cell(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> return
This class will be a encapsulate future functionality for table cells
62599073adb09d7d5dc0be89
class Affine(snt.Linear): <NEW_LINE> <INDENT> def __init__(self, n_output, transfer=default_activation, initializers=None, transfer_based_init=False): <NEW_LINE> <INDENT> if initializers is None: <NEW_LINE> <INDENT> initializers = default_init <NEW_LINE> <DEDENT> if transfer_based_init and 'w' not in initializers: <NEW_LINE> <INDENT> initializers['w'] = activation_based_init(transfer) <NEW_LINE> <DEDENT> super(Affine, self).__init__(n_output, initializers) <NEW_LINE> self._transfer = transfer <NEW_LINE> <DEDENT> def _build(self, inpt): <NEW_LINE> <INDENT> output = super(Affine, self)._build(inpt) <NEW_LINE> if self._transfer is not None: <NEW_LINE> <INDENT> output = self._transfer(output) <NEW_LINE> <DEDENT> return output
Layer implementing an affine non-linear transformation
625990733346ee7daa3382ef
class Fellow(Person): <NEW_LINE> <INDENT> def __init__(self, first_name, second_name, wants_accommodation, office_name=None, livingspace_name=None, is_allocated=False): <NEW_LINE> <INDENT> super(Fellow, self).__init__(first_name, second_name, office_name, is_allocated) <NEW_LINE> self.wants_accommodation = wants_accommodation <NEW_LINE> self.livingspace_name = livingspace_name
Fellow class that inherits from Person but includes data for accommodation
625990735fdd1c0f98e5f89d
class Addr(common.Addr): <NEW_LINE> <INDENT> def __init__(self, host, port, ssl, default): <NEW_LINE> <INDENT> super(Addr, self).__init__((host, port)) <NEW_LINE> self.ssl = ssl <NEW_LINE> self.default = default <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fromstring(cls, str_addr): <NEW_LINE> <INDENT> parts = str_addr.split(' ') <NEW_LINE> ssl = False <NEW_LINE> default = False <NEW_LINE> host = '' <NEW_LINE> port = '' <NEW_LINE> addr = parts.pop(0) <NEW_LINE> if addr.startswith('unix:'): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> tup = addr.partition(':') <NEW_LINE> if re.match(r'^\d+$', tup[0]): <NEW_LINE> <INDENT> host = '' <NEW_LINE> port = tup[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> host = tup[0] <NEW_LINE> port = tup[2] <NEW_LINE> <DEDENT> while len(parts) > 0: <NEW_LINE> <INDENT> nextpart = parts.pop() <NEW_LINE> if nextpart == 'ssl': <NEW_LINE> <INDENT> ssl = True <NEW_LINE> <DEDENT> elif nextpart == 'default_server': <NEW_LINE> <INDENT> default = True <NEW_LINE> <DEDENT> <DEDENT> return cls(host, port, ssl, default) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> parts = '' <NEW_LINE> if self.tup[0] and self.tup[1]: <NEW_LINE> <INDENT> parts = "%s:%s" % self.tup <NEW_LINE> <DEDENT> elif self.tup[0]: <NEW_LINE> <INDENT> parts = self.tup[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parts = self.tup[1] <NEW_LINE> <DEDENT> if self.default: <NEW_LINE> <INDENT> parts += ' default_server' <NEW_LINE> <DEDENT> if self.ssl: <NEW_LINE> <INDENT> parts += ' ssl' <NEW_LINE> <DEDENT> return parts <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, self.__class__): <NEW_LINE> <INDENT> return (self.tup == other.tup and self.ssl == other.ssl and self.default == other.default) <NEW_LINE> <DEDENT> return False
Represents an Nginx address, i.e. what comes after the 'listen' directive. According to the `documentation`_, this may be address[:port], port, or unix:path. The latter is ignored here. The default value if no directive is specified is \*:80 (superuser) or \*:8000 (otherwise). If no port is specified, the default is 80. If no address is specified, listen on all addresses. .. _documentation: http://nginx.org/en/docs/http/ngx_http_core_module.html#listen .. todo:: Old-style nginx configs define SSL vhosts in a separate block instead of using 'ssl' in the listen directive. :param str addr: addr part of vhost address, may be hostname, IPv4, IPv6, "", or "\*" :param str port: port number or "\*" or "" :param bool ssl: Whether the directive includes 'ssl' :param bool default: Whether the directive includes 'default_server'
6259907316aa5153ce401df8
class BaseDigest(object): <NEW_LINE> <INDENT> def __init__(self, wall_time, locator): <NEW_LINE> <INDENT> self._wall_time = wall_time <NEW_LINE> self._locator = locator <NEW_LINE> <DEDENT> @property <NEW_LINE> def wall_time(self): <NEW_LINE> <INDENT> return self._wall_time <NEW_LINE> <DEDENT> @property <NEW_LINE> def locator(self): <NEW_LINE> <INDENT> return self._locator <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return {"wall_time": self.wall_time}
Base class for digest. Properties: wall_time: A timestamp for the digest as a `float` (unit: s). locator: A datum that allows tracng the digest to its original location. It can be either of the two: 1. Bytes offset from the beginning of the file as a single integer, for the case of all digests of the same kind coming from the same file. 2. A tuple of a file index and a byte offset. This applies to case in which the same type of debugger data may come from multple files, e.g., graph execution traces.
62599073435de62698e9d726
class ReceiverArray(object): <NEW_LINE> <INDENT> def __init__(self, quantities=None, last_value_only=False, filename=None): <NEW_LINE> <INDENT> self.quantities = quantities if quantities else ["pressure"] <NEW_LINE> self.last_value_only = last_value_only <NEW_LINE> self.filename = filename <NEW_LINE> <DEDENT> def shape(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def shape_with_pml(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def shape_without_pml(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def size_with_pml(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def size_without_pml(self): <NEW_LINE> <INDENT> pass
Receiver array.
62599073a8370b77170f1cea
class Params(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.params =[] <NEW_LINE> <DEDENT> def init_param(self, size, scale=.01, mode='n', name=''): <NEW_LINE> <INDENT> if mode == 'normal' or mode == 'n': <NEW_LINE> <INDENT> param = theano.shared(value=scale*self.numpy_rng.normal( size=size).astype(theano.config.floatX), name=name) <NEW_LINE> <DEDENT> elif mode == 'uniform' or mode == 'u': <NEW_LINE> <INDENT> if numpy.size(scale) == 1: <NEW_LINE> <INDENT> low = -scale <NEW_LINE> high = scale <NEW_LINE> <DEDENT> elif numpy.size(scale) == 2: <NEW_LINE> <INDENT> low = scale[0] <NEW_LINE> high = scale[1] <NEW_LINE> <DEDENT> param = theano.shared(value=self.numpy_rng.uniform(size=size, low=low, high=high).astype(theano.config.floatX), name=name) <NEW_LINE> <DEDENT> elif mode == 'log-uniform' or mode == 'lu': <NEW_LINE> <INDENT> param = theano.shared(value=numpy.exp(self.numpy_rng.uniform( low=scale[0], high=scale[1], size=size). astype(theano.config.floatX)), name=name) <NEW_LINE> <DEDENT> elif mode == 'repetitive' or mode == 'r': <NEW_LINE> <INDENT> param = theano.shared(value = scale*numpy.ones(size, dtype=theano.config.floatX), name=name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('\''+str(mode)+'\'' + ' is not a valid mode. ') <NEW_LINE> <DEDENT> return param <NEW_LINE> <DEDENT> def set_params(self, new_params): <NEW_LINE> <INDENT> def inplace_set(x, new): <NEW_LINE> <INDENT> x[...] = new <NEW_LINE> return x <NEW_LINE> <DEDENT> params_counter = 0 <NEW_LINE> for p in self.params: <NEW_LINE> <INDENT> pshape = p.get_value().shape <NEW_LINE> pnum = numpy.prod(pshape) <NEW_LINE> p.set_value(inplace_set(p.get_value(borrow=True), new_params[params_counter:params_counter+pnum] .reshape(*pshape)), borrow=True) <NEW_LINE> params_counter += pnum <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def get_params(self): <NEW_LINE> <INDENT> return numpy.concatenate([p.get_value(borrow=False).flatten() for p in self.params]) <NEW_LINE> <DEDENT> def save(self, filename): <NEW_LINE> <INDENT> numpy.save(filename, self.get_params()) <NEW_LINE> <DEDENT> def load(self, filename): <NEW_LINE> <INDENT> self.set_params(numpy.load(filename))
Base class: Params
62599073097d151d1a2c2992
class _Node(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.host = None <NEW_LINE> self.port = None <NEW_LINE> self.iface= None
Object representing node details.
62599073796e427e53850098
class Messenger: <NEW_LINE> <INDENT> def __init__(self, domain): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.domain = domain <NEW_LINE> import logging <NEW_LINE> self.logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> def add_info_to_req(self, request, message): <NEW_LINE> <INDENT> messages.add_message(request, messages.INFO, message) <NEW_LINE> <DEDENT> def add_warn_to_req(self, request, message): <NEW_LINE> <INDENT> messages.add_message(request, messages.WARNING, message) <NEW_LINE> <DEDENT> def add_err_to_req(self, request, message): <NEW_LINE> <INDENT> messages.add_message(request, messages.ERROR, message) <NEW_LINE> self.logger.warning("Err message raised in domain {} with message: {}".format(self.domain, message))
Simpole wrapper for the django messages
62599073cc0a2c111447c760
class NoContextBranchListingView(BranchListingView): <NEW_LINE> <INDENT> field_names = ['lifecycle'] <NEW_LINE> no_sort_by = (BranchListingSort.DEFAULT, ) <NEW_LINE> no_branch_message = ( 'There are no branches that match the current status filter.') <NEW_LINE> extra_columns = ('author', 'product', 'date_created') <NEW_LINE> def _branches(self, lifecycle_status): <NEW_LINE> <INDENT> collection = self._getCollection() <NEW_LINE> if lifecycle_status is not None: <NEW_LINE> <INDENT> collection = collection.withLifecycleStatus(*lifecycle_status) <NEW_LINE> <DEDENT> collection = collection.visibleByUser(self.user) <NEW_LINE> return collection.getBranches(eager_load=False).order_by( self._branch_order)
A branch listing that has no associated product or person.
625990737047854f46340cd7
class Translator: <NEW_LINE> <INDENT> def __init__(self, document_markup): <NEW_LINE> <INDENT> self.document_markup = document_markup
The translator is the core algorithm of ScriptTex. It takes a document markup object and an input and produces whatever is defined in the markup.
625990738e7ae83300eea9b0
class HTTPApiKeyAuth(AuthBase): <NEW_LINE> <INDENT> def __init__(self, username, key): <NEW_LINE> <INDENT> self._username = username <NEW_LINE> self._key = key <NEW_LINE> <DEDENT> def __call__(self, req): <NEW_LINE> <INDENT> req.headers['Authorization'] = 'ApiKey {0}:{1}'.format(self._username, self._key) <NEW_LINE> return req
Use TastyPie's ApiKey authentication when communicating with the API.
6259907497e22403b383c823
class RegistrationFormNoFreeEmail(RegistrationForm): <NEW_LINE> <INDENT> bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com', 'googlemail.com', 'hotmail.com', 'hushmail.com', 'msn.com', 'mail.ru', 'mailinator.com', 'live.com'] <NEW_LINE> def clean_email(self): <NEW_LINE> <INDENT> email_domain = self.cleaned_data['email'].split('@')[1] <NEW_LINE> if email_domain in self.bad_domains: <NEW_LINE> <INDENT> raise forms.ValidationError(_(u'Registration using free email addresses is prohibited. Please supply a different email address.')) <NEW_LINE> <DEDENT> return self.cleaned_data['email'] <NEW_LINE> <DEDENT> def clean_tos(self): <NEW_LINE> <INDENT> if self.cleaned_data.get('tos', False): <NEW_LINE> <INDENT> return self.cleaned_data['tos'] <NEW_LINE> <DEDENT> raise forms.ValidationError(u'You must agree to the terms to register')
Subclass of ``RegistrationForm`` which disallows registration with email addresses from popular free webmail services; moderately useful for preventing automated spam registrations. To change the list of banned domains, subclass this form and override the attribute ``bad_domains``.
6259907471ff763f4b5e90c9
class EnhancedTextWidget(forms.MultiWidget): <NEW_LINE> <INDENT> def __init__(self, textareaattrs={'class': 'enhanced_text'}, selectattrs={'class' : 'enhanced_text_format',}, initial=None): <NEW_LINE> <INDENT> self.initial=initial <NEW_LINE> widgets = (forms.Textarea(attrs=textareaattrs), forms.Select(attrs=selectattrs, choices=CONTENT_FORMATS)) <NEW_LINE> super(EnhancedTextWidget, self).__init__(widgets, None) <NEW_LINE> <DEDENT> def decompress(self, value): <NEW_LINE> <INDENT> if isinstance(value, EnhancedText): <NEW_LINE> <INDENT> return value.to_tuple() <NEW_LINE> <DEDENT> elif value: <NEW_LINE> <INDENT> return EnhancedText(value).to_tuple() <NEW_LINE> <DEDENT> elif self.initial and isinstance(self.initial, EnhancedText): <NEW_LINE> <INDENT> return self.initial.to_tuple() <NEW_LINE> <DEDENT> elif self.initial and isinstance(self.initial, str): <NEW_LINE> <INDENT> return EnhancedText(self.initial).to_tuple() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return EnhancedText("", DEFAULT_FORMAT).to_tuple() <NEW_LINE> <DEDENT> <DEDENT> def render(self, name, value, attrs=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = value.to_string() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> value = "" <NEW_LINE> <DEDENT> output = super(EnhancedTextWidget, self).render(name, value, attrs) <NEW_LINE> return output <NEW_LINE> <DEDENT> def format_output(self, rendered_widgets): <NEW_LINE> <INDENT> return u''.join([rendered_widgets[0], u"<p/>", rendered_widgets[1], u'<label>', _('Words: '), u'<span class="wordcount">', _('0'), u'</span>', u'&nbsp;&nbsp;&nbsp;', _('Characters: '), u'<span class="charcount">', _('0'), u'</span>' u'</label>', ])
A multi-widget for the EnhancedTextFormField that renders a Textarea and a select widget for choosing the format of the Textarea.
6259907492d797404e3897eb
class XML2DF(): <NEW_LINE> <INDENT> def __init__(self, xml_path): <NEW_LINE> <INDENT> self.root = ET.parse(xml_path) <NEW_LINE> <DEDENT> def find_data(self): <NEW_LINE> <INDENT> income = self.root.find('income') <NEW_LINE> return income
Converts XML file to pandas dataframe. Adapted from http://austintaylor.io/lxml/python/pandas/xml/dataframe/2016/07/08/convert-xml-to-pandas-dataframe/.
62599074379a373c97d9a940