code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class artMediumProp(SchemaProperty): <NEW_LINE> <INDENT> _prop_schema = 'artMedium' <NEW_LINE> _expected_schema = None <NEW_LINE> _enum = False <NEW_LINE> _format_as = "TextField" | SchemaField for artMedium
Usage: Include in SchemaObject SchemaFields as your_django_field = artMediumProp()
schema.org description:The material used. (e.g. Oil, Watercolour, Acrylic, Linoprint, Marble, Cyanotype, Digital, Lithograph, DryPoint, Intaglio, Pastel, Woodcut, Pencil, Mixed Media, etc.) Supersedes material.
prop_schema returns just the property without url#
format_as is used by app templatetags based upon schema.org datatype | 62599045507cdc57c63a60d0 |
class Bounds: <NEW_LINE> <INDENT> def __init__(self, minx=None, maxx=None, miny=None, maxy=None, minz=None, maxz=None, value=None, channelIndex=None): <NEW_LINE> <INDENT> if minx is not None: <NEW_LINE> <INDENT> self.value = [minx, maxx, miny, maxy, minz, maxz] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> self.channelIndex = channelIndex <NEW_LINE> <DEDENT> def setValue(self, value, channelData): <NEW_LINE> <INDENT> if self.channelIndex is not None: <NEW_LINE> <INDENT> channelData.setValue(self.channelIndex, value[0]) <NEW_LINE> channelData.setValue(self.channelIndex+1, value[1]) <NEW_LINE> channelData.setValue(self.channelIndex+2, value[2]) <NEW_LINE> channelData.setValue(self.channelIndex+3, value[3]) <NEW_LINE> channelData.setValue(self.channelIndex+4, value[4]) <NEW_LINE> channelData.setValue(self.channelIndex+5, value[5]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> <DEDENT> def getValue(self, channelData): <NEW_LINE> <INDENT> if self.channelIndex is not None: <NEW_LINE> <INDENT> return channelData.getValues(self.channelIndex, 6) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> <DEDENT> def writeXMLData(self, xmlBounds): <NEW_LINE> <INDENT> log.debug(' calling Bounds.writeXMLData') <NEW_LINE> if self.value is None and self.channelIndex is None: <NEW_LINE> <INDENT> raise ValueError('Bounds not set using static values or channelIndex') <NEW_LINE> <DEDENT> if self.value is not None and len(self.value) > 0: <NEW_LINE> <INDENT> xmlBounds.attrib['minx'] = str(float(self.value[0])) <NEW_LINE> xmlBounds.attrib['maxx'] = str(float(self.value[1])) <NEW_LINE> xmlBounds.attrib['miny'] = str(float(self.value[2])) <NEW_LINE> xmlBounds.attrib['maxy'] = str(float(self.value[3])) <NEW_LINE> xmlBounds.attrib['minz'] = str(float(self.value[4])) <NEW_LINE> xmlBounds.attrib['maxz'] = str(float(self.value[5])) <NEW_LINE> <DEDENT> if self.channelIndex is not None: <NEW_LINE> <INDENT> xmlBounds.attrib['channelIndex'] = str(self.channelIndex) <NEW_LINE> <DEDENT> <DEDENT> def readXMLData(self, xmlBounds): <NEW_LINE> <INDENT> log.debug(' calling Bounds.readXMLData') <NEW_LINE> minx = floatOrNone(xmlBounds.get('minx')) <NEW_LINE> maxx = floatOrNone(xmlBounds.get('maxx')) <NEW_LINE> miny = floatOrNone(xmlBounds.get('miny')) <NEW_LINE> maxy = floatOrNone(xmlBounds.get('maxy')) <NEW_LINE> minz = floatOrNone(xmlBounds.get('minz')) <NEW_LINE> maxz = floatOrNone(xmlBounds.get('maxz')) <NEW_LINE> if minx is not None: <NEW_LINE> <INDENT> self.value = [minx, maxx, miny, maxy, minz, maxz] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.value = None <NEW_LINE> <DEDENT> self.channelIndex = intOrNone(xmlBounds.get('channelIndex')) | Class to hold Bounding Box values. It also writes and reads the in-memory
XML representation of these values using ElementTree. | 62599045c432627299fa429c |
class MyPasswordChangeView(LoginRequiredMixin, PasswordChangeView, PageTitleMixin, ): <NEW_LINE> <INDENT> form_class = MyChangePasswordForm <NEW_LINE> template_name = 'pages/customer/password_change.html' <NEW_LINE> success_url = reverse_lazy("customer:account_change_password") <NEW_LINE> page_title = _("Modification du mot de passe") <NEW_LINE> page_subtitle = _("Changer le mot de passe de votre compte") <NEW_LINE> sidebar_active_link = 'profil' <NEW_LINE> sidebar_dropdown_show = 'c-show' <NEW_LINE> active_tab = 'account_change_password' | Django-allauth.
Changement du mot de passe | 625990453eb6a72ae038b996 |
class ComputedPath(object): <NEW_LINE> <INDENT> def __init__(self, host): <NEW_LINE> <INDENT> level = host.location.level <NEW_LINE> level.register_on_tile_change_callback(self.invalidate) <NEW_LINE> self.host = host <NEW_LINE> self.inner_map = level.inner_map <NEW_LINE> self.a_star = path.AStar(self.inner_map, 0.0) <NEW_LINE> self.path = [] <NEW_LINE> self.last_destination = None <NEW_LINE> <DEDENT> def invalidate(self): <NEW_LINE> <INDENT> self.path = [] <NEW_LINE> self.a_star = path.AStar(self.inner_map, 0.0) <NEW_LINE> if self.last_destination: <NEW_LINE> <INDENT> self.calculate(self.last_destination) <NEW_LINE> <DEDENT> <DEDENT> def calculate(self, destination_coordinates): <NEW_LINE> <INDENT> self.last_destination = destination_coordinates <NEW_LINE> origin = self.host.location.get_local_coords() <NEW_LINE> if isinstance(destination_coordinates, Point): <NEW_LINE> <INDENT> destination_coordinates = ( destination_coordinates.x, destination_coordinates.y) <NEW_LINE> <DEDENT> self.path = self.a_star.get_path(*origin, *destination_coordinates) <NEW_LINE> if not self.path: <NEW_LINE> <INDENT> self.path = self.find_close_walkable_point(origin, destination_coordinates) <NEW_LINE> <DEDENT> <DEDENT> def find_close_walkable_point(self, origin, target, recursion=0, sought_coords=None): <NEW_LINE> <INDENT> if sought_coords is None: <NEW_LINE> <INDENT> sought_coords = set() <NEW_LINE> <DEDENT> if recursion > 9: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> level = self.host.location.level <NEW_LINE> x1, y1 = target <NEW_LINE> neighbors = [ (x1 + x2, y1 + y2) for x2, y2 in move_direction_mapping.values() ] <NEW_LINE> if all(neighbor in sought_coords for neighbor in neighbors): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> walkable_neighbors = [ coord for coord in neighbors if coord not in sought_coords and level.is_coordinate_in_bounds(coord) and self.inner_map.walkable[coord] ] <NEW_LINE> if not walkable_neighbors: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> walkable_neighbors.sort(key=lambda coord: manhattan_distance_to(origin, coord)) <NEW_LINE> path = None <NEW_LINE> for neighbor in walkable_neighbors: <NEW_LINE> <INDENT> path = self.a_star.get_path(*origin, *neighbor) <NEW_LINE> if path: <NEW_LINE> <INDENT> self.last_destination = neighbor <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if path: <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> sought_coords.update(neighbors) <NEW_LINE> return self.find_close_walkable_point( origin, walkable_neighbors[0], recursion + 1, sought_coords) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.path: <NEW_LINE> <INDENT> origin = self.host.location.get_local_coords() <NEW_LINE> next_coordinate = self.path.pop(0) <NEW_LINE> x1, y1 = origin <NEW_LINE> x2, y2 = next_coordinate <NEW_LINE> return x2 - x1, y2 - y1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.last_destination = None | This object hooks on to the level and will invalidate itself
whenever a tile is changed in the map. | 6259904530dc7b76659a0b66 |
class btkAcquisitionFileReader(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, btkAcquisitionFileReader, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, btkAcquisitionFileReader, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> this = _btk.new_btkAcquisitionFileReader() <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> def __deref__(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader___deref__(self) <NEW_LINE> <DEDENT> def GetAcquisitionIO(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetAcquisitionIO(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _btk.delete_btkAcquisitionFileReader <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def GetOutput(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetOutput(self) <NEW_LINE> <DEDENT> def GetDisableFilenameExceptionState(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetDisableFilenameExceptionState(self) <NEW_LINE> <DEDENT> def SetDisableFilenameExceptionState(self, *args): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_SetDisableFilenameExceptionState(self, *args) <NEW_LINE> <DEDENT> def GetFilename(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetFilename(self) <NEW_LINE> <DEDENT> def SetFilename(self, *args): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_SetFilename(self, *args) <NEW_LINE> <DEDENT> def SetAcquisitionIO(self, *args): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_SetAcquisitionIO(self, *args) <NEW_LINE> <DEDENT> def GetInputNumber(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetInputNumber(self) <NEW_LINE> <DEDENT> def GetValidInputNumber(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetValidInputNumber(self) <NEW_LINE> <DEDENT> def GetOutputNumber(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetOutputNumber(self) <NEW_LINE> <DEDENT> def ResetState(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_ResetState(self) <NEW_LINE> <DEDENT> def Update(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_Update(self) <NEW_LINE> <DEDENT> def GetTimestamp(self): <NEW_LINE> <INDENT> return _btk.btkAcquisitionFileReader_GetTimestamp(self) | Reader for files which contain acquisition data (C3D, TRC, ...).
This class uses a btkAcquisitionFileIO object to read the data from the file corresponding to the given filename.
You have two ways to set this object.
First, you can set it manually by using the method btkAcquisitionFileReader::SetAcquisitionIO(),
Second, the correct object can be detected automatically, by checking in each registered btkAcquisitionFileIO if it can read or not the file.
The use of the manual setting should be only used if you want to read a file with a selected file format. To go back to the automatic mode
from the manual mode, you only have to use the method btkAcquisitionFileReader::SetAcquisitionIO() without any argument. | 62599045d53ae8145f919793 |
class CreateListRetrieveViewSet( CreateModelMixin, ListModelMixin, RetrieveModelMixin, GenericViewSet ): <NEW_LINE> <INDENT> pass | A viewset that provides "retrieve", "create", and "list" actions.
To use it, override the class and set the ".queryset" and ".serializer_class"
attributes" | 6259904515baa723494632c6 |
class SklearnClusteringTree(DecisionTreeRegressor, ClusteringTree): <NEW_LINE> <INDENT> def transform(self, X): <NEW_LINE> <INDENT> return self.tree_.apply(X) <NEW_LINE> <DEDENT> def get_leaf_values(self): <NEW_LINE> <INDENT> return self.tree_.value.flatten() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def prepare_data(X): <NEW_LINE> <INDENT> return numpy.array(X, dtype=DTYPE) | RegressionTree from scikit-learn, which provides transforming interface. | 6259904523e79379d538d833 |
class MsgPortModeInfoRequest(DownstreamMsg): <NEW_LINE> <INDENT> TYPE = 0x22 <NEW_LINE> INFO_NAME = 0x00 <NEW_LINE> INFO_RAW_RANGE = 0x01 <NEW_LINE> INFO_PCT_RANGE = 0x02 <NEW_LINE> INFO_SI_RANGE = 0x03 <NEW_LINE> INFO_UNITS = 0x04 <NEW_LINE> INFO_MAPPING = 0x05 <NEW_LINE> INFO_MOTOR_BIAS = 0x07 <NEW_LINE> INFO_CAPABILITY_BITS = 0x08 <NEW_LINE> INFO_VALUE_FORMAT = 0x80 <NEW_LINE> INFO_TYPES = { INFO_NAME: "Name", INFO_RAW_RANGE: "Raw range", INFO_PCT_RANGE: "Percent range", INFO_SI_RANGE: "SI value range", INFO_UNITS: "Units", INFO_MAPPING: "Mapping", INFO_MOTOR_BIAS: "Motor bias", INFO_CAPABILITY_BITS: "Capabilities", INFO_VALUE_FORMAT: "Value encoding", } <NEW_LINE> def __init__(self, port, mode, info_type): <NEW_LINE> <INDENT> super(MsgPortModeInfoRequest, self).__init__() <NEW_LINE> self.port = port <NEW_LINE> self.mode = mode <NEW_LINE> self.info_type = info_type <NEW_LINE> self.payload = pack("<B", port) + pack("<B", mode) + pack("<B", info_type) <NEW_LINE> self.needs_reply = True <NEW_LINE> <DEDENT> def is_reply(self, msg): <NEW_LINE> <INDENT> if not isinstance(msg, MsgPortModeInfo): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if msg.port != self.port or msg.mode != self.mode or msg.info_type != self.info_type: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | https://lego.github.io/lego-ble-wireless-protocol-docs/index.html#port-mode-information-request | 6259904594891a1f408ba090 |
class XMLDocument(object): <NEW_LINE> <INDENT> def __init__(self, declaration, root): <NEW_LINE> <INDENT> self.declaration = declaration <NEW_LINE> self.root = root <NEW_LINE> self.root.document = self <NEW_LINE> self.root._setDepth() | Represents an XML document.
Parameters
----------
declaration : XMLDeclaration
An XML declaration object
root : XMLElement
An XML element that is the root element of this document
Attributes
----------
declaration : XMLDeclaration
This document's XML declaration
root : XMLElement
This document's root element | 6259904507d97122c4217fd6 |
class SendSmsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Data = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("Data") is not None: <NEW_LINE> <INDENT> self.Data = [] <NEW_LINE> for item in params.get("Data"): <NEW_LINE> <INDENT> obj = SendSmsPaasDataStruct() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Data.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId") | SendSms返回参数结构体
| 62599045e76e3b2f99fd9d41 |
class ServiceException(Exception): <NEW_LINE> <INDENT> pass | Custom exception type for service calls | 6259904529b78933be26aa5d |
class RequestsHosts: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def collect(): <NEW_LINE> <INDENT> g_hosts = GaugeMetricFamily("foreman_exporter_hosts", 'foreman host status', labels=['hostname', 'domain', 'configuration', 'configuration_label', 'puppet_status', 'global_label', 'puppet_environment', 'operatingsystem', 'foreman_hostname']) <NEW_LINE> if FOREMAN_HOSTS_BODY is not None: <NEW_LINE> <INDENT> for each in FOREMAN_HOSTS_BODY['results']: <NEW_LINE> <INDENT> name = str(each['name']) <NEW_LINE> domain = str(each['domain_name']) <NEW_LINE> status = (each['global_status']) <NEW_LINE> global_label = str(each['global_status_label']) <NEW_LINE> configuration_status = str(each['configuration_status']) <NEW_LINE> configuration_status_label = str(each['configuration_status_label']) <NEW_LINE> puppet_status = str(each['puppet_status']) <NEW_LINE> environment_name = str(each['environment_name']) <NEW_LINE> operatingsystem = str(each['operatingsystem_name']) <NEW_LINE> if ( name is None or domain is None or status is None or configuration_status is None or configuration_status_label is None ): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if global_label is None: <NEW_LINE> <INDENT> global_label = '199' <NEW_LINE> <DEDENT> if puppet_status is None: <NEW_LINE> <INDENT> puppet_status = '199' <NEW_LINE> <DEDENT> if environment_name is None: <NEW_LINE> <INDENT> environment_name = 'unknown' <NEW_LINE> <DEDENT> if operatingsystem is None: <NEW_LINE> <INDENT> operatingsystem = 'unknown' <NEW_LINE> <DEDENT> g_hosts.add_metric([name, domain, configuration_status, configuration_status_label, puppet_status, global_label, environment_name, operatingsystem, REQUEST_HOSTNAME], status) <NEW_LINE> <DEDENT> yield g_hosts <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if FOREMAN_HOSTS_RESPONSE.elapsed.seconds is not None: <NEW_LINE> <INDENT> g_hosts_time = GaugeMetricFamily("foreman_exporter_hosts_request_time_seconds", 'foreman host request time seconds', labels=['foreman_hostname']) <NEW_LINE> g_hosts_time.add_metric([REQUEST_HOSTNAME], int(FOREMAN_HOSTS_RESPONSE.elapsed.seconds)) <NEW_LINE> yield g_hosts_time <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if FOREMAN_HOSTS_BODY is not None: <NEW_LINE> <INDENT> g_hosts_count = GaugeMetricFamily("foreman_exporter_hosts_count", 'foreman host count', labels=['foreman_hostname']) <NEW_LINE> g_hosts_count.add_metric([REQUEST_HOSTNAME], int(FOREMAN_HOSTS_BODY['total'])) <NEW_LINE> yield g_hosts_count <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass | Register Prometheus Metrics for Foremant's hosts | 625990451f5feb6acb163f2b |
class StartGameServerSessionPlacementRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.PlacementId = None <NEW_LINE> self.GameServerSessionQueueName = None <NEW_LINE> self.MaximumPlayerSessionCount = None <NEW_LINE> self.DesiredPlayerSessions = None <NEW_LINE> self.GameProperties = None <NEW_LINE> self.GameServerSessionData = None <NEW_LINE> self.GameServerSessionName = None <NEW_LINE> self.PlayerLatencies = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.PlacementId = params.get("PlacementId") <NEW_LINE> self.GameServerSessionQueueName = params.get("GameServerSessionQueueName") <NEW_LINE> self.MaximumPlayerSessionCount = params.get("MaximumPlayerSessionCount") <NEW_LINE> if params.get("DesiredPlayerSessions") is not None: <NEW_LINE> <INDENT> self.DesiredPlayerSessions = [] <NEW_LINE> for item in params.get("DesiredPlayerSessions"): <NEW_LINE> <INDENT> obj = DesiredPlayerSession() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.DesiredPlayerSessions.append(obj) <NEW_LINE> <DEDENT> <DEDENT> if params.get("GameProperties") is not None: <NEW_LINE> <INDENT> self.GameProperties = [] <NEW_LINE> for item in params.get("GameProperties"): <NEW_LINE> <INDENT> obj = GameProperty() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.GameProperties.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.GameServerSessionData = params.get("GameServerSessionData") <NEW_LINE> self.GameServerSessionName = params.get("GameServerSessionName") <NEW_LINE> if params.get("PlayerLatencies") is not None: <NEW_LINE> <INDENT> self.PlayerLatencies = [] <NEW_LINE> for item in params.get("PlayerLatencies"): <NEW_LINE> <INDENT> obj = PlayerLatency() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.PlayerLatencies.append(obj) | StartGameServerSessionPlacement请求参数结构体
| 62599045596a897236128f4a |
class PsplupsParser(ScupsParser): <NEW_LINE> <INDENT> filetype = 'psplups' <NEW_LINE> dtypes = [int, int, int, float, float, float, 'object'] <NEW_LINE> units = [ None, None, None, u.dimensionless_unscaled, u.Ry, u.dimensionless_unscaled, u.dimensionless_unscaled ] <NEW_LINE> headings = ['lower_level', 'upper_level', 'bt_type', 'gf', 'delta_energy', 'bt_c', 'bt_rate'] <NEW_LINE> descriptions = [ 'lower level index', 'upper level index', 'Burgess-Tully scaling type', 'oscillator strength', 'delta energy', 'Burgess-Tully scaling parameter', 'Burgess-Tully scaled collision rate' ] <NEW_LINE> def preprocessor(self, table, line, index): <NEW_LINE> <INDENT> tmp = line.strip().split() <NEW_LINE> n_spline = 5 if int(tmp[2]) == 6 else 9 <NEW_LINE> fformat = fortranformat.FortranRecordReader('(3I3,{}E10.3)'.format(3+n_spline)) <NEW_LINE> line = fformat.read(line) <NEW_LINE> row = line[:6] + [np.array(line[6:])] <NEW_LINE> table.append(row) | Spline fits to scaled collision rates for protons. These files are discussed in
section 2.2 of [young]_ and the details of how these quantities are scaled are given in [burgess]_.
Notes
-----
* Unlike the electron "scups" and "splups" files which contain the collision strengths
(upsilons), these files contain the scaled *rates*.
* The number of spline points for the rates depends on the fit type, 5 points for type 6
fits and 9 points for type 2.
References
----------
.. [young] Young, P. et al., 2003, A&AS, `135, 339 <http://adsabs.harvard.edu/abs/2003ApJS..144..135Y>`_ | 625990453c8af77a43b688d8 |
class Analysis(object): <NEW_LINE> <INDENT> def __init__(self, layout, model): <NEW_LINE> <INDENT> if not isinstance(layout, BIDSLayout): <NEW_LINE> <INDENT> layout = BIDSLayout(layout) <NEW_LINE> <DEDENT> self.layout = layout <NEW_LINE> self._load_model(model) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for b in self.steps: <NEW_LINE> <INDENT> yield b <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if isinstance(index, int): <NEW_LINE> <INDENT> return self.steps[index] <NEW_LINE> <DEDENT> level = index.lower() <NEW_LINE> name_matches = list(filter(lambda x: x.name == level, self.steps)) <NEW_LINE> if not name_matches: <NEW_LINE> <INDENT> raise KeyError('There is no block with the name "%s".' % index) <NEW_LINE> <DEDENT> return name_matches[0] <NEW_LINE> <DEDENT> def _load_model(self, model): <NEW_LINE> <INDENT> if isinstance(model, str): <NEW_LINE> <INDENT> model = json.load(open(model)) <NEW_LINE> <DEDENT> self.model = convert_JSON(model) <NEW_LINE> steps = self.model['steps'] <NEW_LINE> self.steps = [] <NEW_LINE> for i, step_args in enumerate(steps): <NEW_LINE> <INDENT> step_args['level'] = step_args['level'].lower() <NEW_LINE> step = Step(self.layout, index=i, **step_args) <NEW_LINE> self.steps.append(step) <NEW_LINE> <DEDENT> <DEDENT> def setup(self, steps=None, agg_func='mean', **kwargs): <NEW_LINE> <INDENT> input_nodes = None <NEW_LINE> selectors = self.model.get('input', {}) <NEW_LINE> selectors.update(kwargs) <NEW_LINE> for i, b in enumerate(self.steps): <NEW_LINE> <INDENT> if steps is not None and i not in steps and b.name not in steps: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> b.setup(input_nodes, **selectors) <NEW_LINE> input_nodes = b.output_nodes | Represents an entire BIDS-Model analysis.
Args:
layout (BIDSLayout, str): A BIDSLayout instance or path to pass on
to the BIDSLayout initializer.
model (str or dict): a BIDS model specification. Can either be a
string giving the path of the JSON model spec, or an already-loaded
dict containing the model info. | 6259904523e79379d538d834 |
@with_incremental_state <NEW_LINE> class LSFSTransformerDecoderLayer(LSTransformerDecoderLayer): <NEW_LINE> <INDENT> def __init__(self, config, initial_weights=None, initial_biases=None): <NEW_LINE> <INDENT> super().__init__(config, initial_weights, initial_biases) <NEW_LINE> <DEDENT> def get_self_attn_cache(self, incremental_state): <NEW_LINE> <INDENT> res = self.get_incremental_state(incremental_state, "cache") <NEW_LINE> if res is not None: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def set_self_attn_cache(self, incremental_state, cache): <NEW_LINE> <INDENT> self.set_incremental_state(incremental_state, "cache", cache) <NEW_LINE> <DEDENT> def reorder_incremental_state(self, incremental_state, new_order): <NEW_LINE> <INDENT> cache = self.get_self_attn_cache(incremental_state) <NEW_LINE> if cache is not None: <NEW_LINE> <INDENT> for k in cache.keys(): <NEW_LINE> <INDENT> if k == "encdec_kv": <NEW_LINE> <INDENT> cur_order = new_order // self.beam_size <NEW_LINE> cur_order = cur_order[:: self.beam_size] <NEW_LINE> idx = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur_order = new_order <NEW_LINE> idx = 0 <NEW_LINE> <DEDENT> value = cache[k] <NEW_LINE> cache[k] = value.index_select(idx, cur_order) <NEW_LINE> <DEDENT> self.set_self_attn_cache(incremental_state, cache) <NEW_LINE> <DEDENT> <DEDENT> def forward( self, x, encoder_out, encoder_padding_mask, incremental_state, self_attn_mask=None, ): <NEW_LINE> <INDENT> if incremental_state is None: <NEW_LINE> <INDENT> cache = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cache = self.get_self_attn_cache(incremental_state) <NEW_LINE> <DEDENT> self.beam_size = int(x.shape[0] / encoder_padding_mask.shape[0]) <NEW_LINE> res = super().forward(x, encoder_out, encoder_padding_mask, cache) <NEW_LINE> if cache: <NEW_LINE> <INDENT> self.set_self_attn_cache(incremental_state, cache) <NEW_LINE> <DEDENT> return res, None, None | Decoder layer only for inference. | 62599045a8ecb03325872547 |
class ManualMetadata(ContentMetadataSource): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def source_name(cls): <NEW_LINE> <INDENT> return "Manual" <NEW_LINE> <DEDENT> description = models.TextField() | Manually entered metadata. | 625990450fa83653e46f6212 |
class Attendee(People): <NEW_LINE> <INDENT> student_id = 0 <NEW_LINE> def __init__(self, name, address, phone): <NEW_LINE> <INDENT> super(Attendee, self).__init__(name, address, phone) <NEW_LINE> self.__id = Attendee.student_id <NEW_LINE> Attendee.student_id += 1 | docstring for Attendee. | 62599045004d5f362081f981 |
class Appliance(models.Model): <NEW_LINE> <INDENT> residential_home = models.ForeignKey( ResidentialHome, related_name='appliances', ) <NEW_LINE> name = models.CharField(max_length=64, choices=APPLIANCES_CHOICES) <NEW_LINE> location = models.CharField(max_length=32, choices=LOCATION_CHOICES) <NEW_LINE> created = models.DateTimeField(auto_now_add=True, db_index=True) <NEW_LINE> last_modified = models.DateTimeField(auto_now=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{} from {} located in {}".format(self.name, self.residential_home, self.location) | Represents the appliances the meters are attached to | 6259904507d97122c4217fd7 |
class PetHungryOrThirstyError(PetSadError): <NEW_LINE> <INDENT> _validation = { 'error_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'error_type': {'key': 'errorType', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'reason': {'key': 'reason', 'type': 'str'}, 'hungry_or_thirsty': {'key': 'hungryOrThirsty', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, error_message: Optional[str] = None, reason: Optional[str] = None, hungry_or_thirsty: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(PetHungryOrThirstyError, self).__init__(error_message=error_message, reason=reason, **kwargs) <NEW_LINE> self.error_type: str = 'PetHungryOrThirstyError' <NEW_LINE> self.hungry_or_thirsty = hungry_or_thirsty | PetHungryOrThirstyError.
All required parameters must be populated in order to send to Azure.
:param error_type: Required. Constant filled by server.
:type error_type: str
:param error_message: the error message.
:type error_message: str
:param reason: why is the pet sad.
:type reason: str
:param hungry_or_thirsty: is the pet hungry or thirsty or both.
:type hungry_or_thirsty: str | 6259904507f4c71912bb0768 |
class EditToolbarDeluxe(hildon.EditToolbar): <NEW_LINE> <INDENT> def __init__(self, label_text, button_text): <NEW_LINE> <INDENT> hildon.EditToolbar.__init__(self, label_text, button_text) <NEW_LINE> alignment, separator, close_button = self.get_children() <NEW_LINE> hbox = alignment.get_child() <NEW_LINE> label, image, button = hbox.get_children() <NEW_LINE> self._action_button = button <NEW_LINE> expand, fill, padding, pack_type = hbox.query_child_packing(label) <NEW_LINE> event_box = gtk.EventBox() <NEW_LINE> event_box.connect('expose-event', self._on_expose_event) <NEW_LINE> label.reparent(event_box) <NEW_LINE> event_box.connect('button-release-event', self._on_label_clicked) <NEW_LINE> hbox.add(event_box) <NEW_LINE> hbox.reorder_child(event_box, 0) <NEW_LINE> hbox.set_child_packing(event_box, expand, fill, padding, pack_type) <NEW_LINE> self._label = label <NEW_LINE> <DEDENT> def set_button_sensitive(self, sensitivity): <NEW_LINE> <INDENT> self._action_button.set_sensitive(sensitivity) <NEW_LINE> <DEDENT> def _on_expose_event(self, widget, event): <NEW_LINE> <INDENT> style = self.get_style() <NEW_LINE> style.paint_flat_box(widget.window, gtk.STATE_NORMAL, gtk.SHADOW_NONE, event.area, widget, 'edit-toolbar', 0, 0, widget.allocation.width, widget.allocation.height) <NEW_LINE> child = widget.get_child() <NEW_LINE> widget.propagate_expose(child, event) <NEW_LINE> return True <NEW_LINE> <DEDENT> def _on_label_clicked(self, widget, event): <NEW_LINE> <INDENT> parent = self.get_parent() <NEW_LINE> app_menu = parent.get_app_menu() <NEW_LINE> if app_menu is not None: <NEW_LINE> <INDENT> app_menu.popup(parent) | HildonEditToolbar with sensitivity and AppMenu action
* Public method to set the button sensitivity
* Open the parent window's AppMenu when touching titlebar
(Enhancement requests filed as Maemo bugs #5166 and #5167) | 62599045a79ad1619776b3b6 |
class ArchiveJobsCommand(Command): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def setup(parser): <NEW_LINE> <INDENT> parser.add_argument( "ids", nargs="*", metavar="ID", help="the job ID(s) to archive") <NEW_LINE> parser.add_argument( "-a", "--all", action="store_true", help="whether to archive all jobs") <NEW_LINE> parser.add_argument( "-f", "--force", action="store_true", help="whether to force archive all without confirmation") <NEW_LINE> parser.add_argument( "--dry-run", action="store_true", help="whether to print job IDs that would be archived rather than " "actually performing the action") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def execute(parser, args): <NEW_LINE> <INDENT> api = API() <NEW_LINE> if args.all: <NEW_LINE> <INDENT> query = (JobsQuery() .add_all_fields() .add_search("archived", False) .sort_by("upload_date", descending=False)) <NEW_LINE> jobs = api.query_jobs(query)["jobs"] <NEW_LINE> job_ids = [job["id"] for job in jobs] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> job_ids = args.ids <NEW_LINE> <DEDENT> if args.dry_run: <NEW_LINE> <INDENT> for job_id in job_ids: <NEW_LINE> <INDENT> print(job_id) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> num_jobs = len(job_ids) <NEW_LINE> if args.all: <NEW_LINE> <INDENT> print("Found %d job(s) to archive" % num_jobs) <NEW_LINE> if num_jobs > 0 and not args.force: <NEW_LINE> <INDENT> _abort_if_requested() <NEW_LINE> <DEDENT> <DEDENT> if num_jobs == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> response = api.batch_archive_jobs(job_ids) <NEW_LINE> _log_batch_response( response, success_message="job(s) archived", failure_message="failed to archive job") | Archive jobs on the platform.
Examples:
# Archive specific jobs
voxel51 jobs archive <id> [...]
# Archive all jobs
voxel51 jobs archive --all | 6259904594891a1f408ba091 |
class TestLoginUser(testLib.RestTestCase): <NEW_LINE> <INDENT> def assertResponse(self, respData, count = 1, errCode = testLib.RestTestCase.SUCCESS): <NEW_LINE> <INDENT> expected = { 'errCode' : errCode } <NEW_LINE> if count is not None: <NEW_LINE> <INDENT> expected['count'] = count <NEW_LINE> <DEDENT> self.assertDictEqual(expected, respData) <NEW_LINE> <DEDENT> def testLoginUser(self): <NEW_LINE> <INDENT> self.makeRequest("/users/add", method="POST", data = { 'user' : 'test1', 'password' : 'password'} ) <NEW_LINE> respData = self.makeRequest("/users/login", method="POST", data = { 'user' : 'test1', 'password' : 'password'} ) <NEW_LINE> self.assertResponse(respData, 2, 1) <NEW_LINE> <DEDENT> def testLoginBadUserPasswordInput(self): <NEW_LINE> <INDENT> self.makeRequest("/users/add", method="POST", data = { 'user' : 'test1', 'password' : 'password'} ) <NEW_LINE> respData = self.makeRequest("/users/login", method="POST", data = { 'user' : 'test1', 'password' : 'password1'} ) <NEW_LINE> self.assertResponse(respData, None, -1) <NEW_LINE> <DEDENT> def testLoginBadUserUserNameInput(self): <NEW_LINE> <INDENT> self.makeRequest("/users/add", method="POST", data = { 'user' : 'test1', 'password' : 'password'} ) <NEW_LINE> respData = self.makeRequest("/users/login", method="POST", data = { 'user' : 'test', 'password' : 'password'} ) <NEW_LINE> self.assertResponse(respData, None, -1) | Test adding users | 6259904576d4e153a661dc11 |
class ContestList(generics.ListAPIView): <NEW_LINE> <INDENT> permission_classes = [IsLogined] <NEW_LINE> queryset = Contest.objects.all() <NEW_LINE> serializer_class = ContestSerializer | List all contests. | 62599045d4950a0f3b1117dd |
class HScalingTreeColumnView (Gtk.TreeViewColumn): <NEW_LINE> <INDENT> def __init__(self, name, renderer, identifier): <NEW_LINE> <INDENT> self.renderer = renderer <NEW_LINE> Gtk.TreeViewColumn.__init__(self, name, renderer, identifier=identifier) <NEW_LINE> <DEDENT> def on_scale_change(self, scale): <NEW_LINE> <INDENT> w, _ = scale() <NEW_LINE> if w != self.renderer.width: <NEW_LINE> <INDENT> self.renderer.width = w <NEW_LINE> self.queue_resize() | Gtk.TreeViewColumn that scales its width according to the scale
object it should to be subscribed to.
Hookup the renderer to the scale objects on_scale_change:
scale.add(object of HScalingTreeColumnView) | 62599045b830903b9686ee16 |
class VKOAuth2(BaseOAuth2): <NEW_LINE> <INDENT> name = 'vk-oauth2' <NEW_LINE> ID_KEY = 'id' <NEW_LINE> AUTHORIZATION_URL = 'http://oauth.vk.com/authorize' <NEW_LINE> ACCESS_TOKEN_URL = 'https://oauth.vk.com/access_token' <NEW_LINE> ACCESS_TOKEN_METHOD = 'POST' <NEW_LINE> EXTRA_DATA = [ ('id', 'id'), ('expires_in', 'expires') ] <NEW_LINE> def get_user_details(self, response): <NEW_LINE> <INDENT> fullname, first_name, last_name = self.get_user_names( first_name=response.get('first_name'), last_name=response.get('last_name') ) <NEW_LINE> return {'username': response.get('screen_name'), 'email': response.get('email', ''), 'fullname': fullname, 'first_name': first_name, 'last_name': last_name} <NEW_LINE> <DEDENT> def user_data(self, access_token, *args, **kwargs): <NEW_LINE> <INDENT> request_data = ['first_name', 'last_name', 'screen_name', 'nickname', 'photo'] + self.setting('EXTRA_DATA', []) <NEW_LINE> fields = ','.join(set(request_data)) <NEW_LINE> data = vk_api(self, 'users.get', { 'access_token': access_token, 'fields': fields, }) <NEW_LINE> if data and data.get('error'): <NEW_LINE> <INDENT> error = data['error'] <NEW_LINE> msg = error.get('error_msg', 'Unknown error') <NEW_LINE> if error.get('error_code') == 5: <NEW_LINE> <INDENT> raise AuthTokenRevoked(self, msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AuthException(self, msg) <NEW_LINE> <DEDENT> <DEDENT> if data: <NEW_LINE> <INDENT> data = data.get('response')[0] <NEW_LINE> data['user_photo'] = data.get('photo') <NEW_LINE> <DEDENT> return data or {} | VKOAuth2 authentication backend | 62599045be383301e0254b51 |
class BasisFunctionRegression(LinearRegression): <NEW_LINE> <INDENT> _basis_funcs = {'gaussian': gaussian_basis} <NEW_LINE> def __init__(self, basis_func='gaussian', fit_intercept=True, regularization='none', kwds=None, **kwargs): <NEW_LINE> <INDENT> self.basis_func = basis_func <NEW_LINE> self.kwargs = kwargs <NEW_LINE> LinearRegression.__init__(self, fit_intercept, regularization, kwds) <NEW_LINE> <DEDENT> def _transform_X(self, X): <NEW_LINE> <INDENT> if callable(self.basis_func): <NEW_LINE> <INDENT> basis_func = self.basis_func <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> basis_func = self._basis_funcs.get(self.basis_func, None) <NEW_LINE> <DEDENT> X = basis_func(X, **self.kwargs) <NEW_LINE> if self.fit_intercept: <NEW_LINE> <INDENT> X = np.hstack([np.ones((X.shape[0], 1)), X]) | Basis Function with errors in y
Parameters
----------
basis_func : str or function
specify the basis function to use. This should take an input matrix
of size (n_samples, n_features), along with optional parameters,
and return a matrix of size (n_samples, n_bases).
fit_intercept : bool (optional)
if True (default) then fit the intercept of the data
regularization : string (optional)
['l1'|'l2'|'none'] Use L1 (Lasso) or L2 (Ridge) regression
kwds: dict
additional keyword arguments passed to sklearn estimators:
LinearRegression, Lasso (L1), or Ridge (L2) | 6259904507d97122c4217fda |
class MockCollection(object): <NEW_LINE> <INDENT> def __init__(self, label, service): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> self.service = service <NEW_LINE> self.items = [] <NEW_LINE> <DEDENT> def create_item(self, label, attr, value, replace=True): <NEW_LINE> <INDENT> item = MockItem(label, self, attr, value) <NEW_LINE> self.items.append(item) <NEW_LINE> return defer.succeed(item) | A collection of items containing secrets. | 62599045c432627299fa429f |
class NavigableMap(p.SingletonPlugin): <NEW_LINE> <INDENT> p.implements(p.IConfigurer, inherit=True) <NEW_LINE> p.implements(p.IResourceView, inherit=True) <NEW_LINE> def update_config(self, config): <NEW_LINE> <INDENT> p.toolkit.add_template_directory(config, 'theme/templates') <NEW_LINE> p.toolkit.add_resource('theme/public', 'mapviews') <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> schema = { 'geojson_url': [not_empty, url_is_relative_or_in_same_domain], 'geojson_key_field': [not_empty], 'resource_key_field': [not_empty], 'resource_label_field': [not_empty], 'redirect_to_url': [ignore_missing], 'filter_fields': [ignore_missing], } <NEW_LINE> return {'name': 'navigable-map', 'title': 'Navigable Map', 'icon': 'map-marker', 'sizex': 6, 'sizey': 4, 'schema': schema, 'iframed': False} <NEW_LINE> <DEDENT> def can_view(self, data_dict): <NEW_LINE> <INDENT> return data_dict['resource'].get('datastore_active', False) <NEW_LINE> <DEDENT> def setup_template_variables(self, context, data_dict): <NEW_LINE> <INDENT> resource = data_dict['resource'] <NEW_LINE> resource_view = data_dict['resource_view'] <NEW_LINE> filter_fields = aslist(resource_view.get('filter_fields', [])) <NEW_LINE> resource_view['filter_fields'] = filter_fields <NEW_LINE> geojson_resources = _get_geojson_resources() <NEW_LINE> fields = _get_fields(resource) <NEW_LINE> fields_without_id = _remove_id_and_prepare_to_template(fields) <NEW_LINE> numeric_fields = _filter_numeric_fields_without_id(fields) <NEW_LINE> textual_fields = _filter_textual_fields_without_id(fields) <NEW_LINE> return {'resource': resource, 'resource_view': resource_view, 'geojson_resources': geojson_resources, 'fields': fields_without_id, 'numeric_fields': numeric_fields, 'textual_fields': textual_fields} <NEW_LINE> <DEDENT> def view_template(self, context, data_dict): <NEW_LINE> <INDENT> return 'navigablemap_view.html' <NEW_LINE> <DEDENT> def form_template(self, context, data_dict): <NEW_LINE> <INDENT> return 'navigablemap_form.html' | Creates a map view | 62599045b57a9660fecd2db6 |
class Entry(models.Model): <NEW_LINE> <INDENT> blog = models.ForeignKey(Blog, on_delete=models.CASCADE) <NEW_LINE> headline = models.CharField(max_length=255) <NEW_LINE> body_text = models.TextField() <NEW_LINE> pub_date = models.DateField(auto_now_add=True) <NEW_LINE> mod_date = models.DateField(auto_now=True) <NEW_LINE> authors = models.ManyToManyField(Author) <NEW_LINE> n_comments = models.IntegerField(default=0) <NEW_LINE> n_pingbacks = models.IntegerField(default=0) <NEW_LINE> rating = models.IntegerField(default=5) <NEW_LINE> search_vector = search.SearchVectorField(null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.headline | A blog entry with authors field. | 625990458a43f66fc4bf34ce |
class SpannerProjectsInstancesDatabasesSessionsBatchCreateRequest(_messages.Message): <NEW_LINE> <INDENT> batchCreateSessionsRequest = _messages.MessageField('BatchCreateSessionsRequest', 1) <NEW_LINE> database = _messages.StringField(2, required=True) | A SpannerProjectsInstancesDatabasesSessionsBatchCreateRequest object.
Fields:
batchCreateSessionsRequest: A BatchCreateSessionsRequest resource to be
passed as the request body.
database: Required. The database in which the new sessions are created. | 62599045097d151d1a2c23a5 |
class TargetSmokyDingo(SmokyDingo, metaclass=ABCMeta): <NEW_LINE> <INDENT> group = "admin" <NEW_LINE> permission = "target" | A SmokyDingo which checks for the 'target' or 'admin' permission
after activation. | 62599045a4f1c619b294f825 |
class UBloxError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> Exception.__init__(self, msg) <NEW_LINE> self.message = msg | Ublox error class | 62599045004d5f362081f983 |
class VisitorsResource(CachedResource): <NEW_LINE> <INDENT> date = fields.DateField('date') <NEW_LINE> visitors = fields.IntegerField('visitors', default=0) <NEW_LINE> def get_object_list(self, request): <NEW_LINE> <INDENT> kind = MetricKind.objects.get(code=VISITORS_METRIC_CODE) <NEW_LINE> qs = Metric.objects.filter(kind=kind).order_by('-start') <NEW_LINE> return [Struct(date=m.start, visitors=m.value) for m in qs] <NEW_LINE> <DEDENT> class Meta(object): <NEW_LINE> <INDENT> cache = SimpleCache() <NEW_LINE> resource_name = 'kpi_visitors' <NEW_LINE> allowed_methods = ['get'] | Returns the number of unique visitors per day. | 625990451d351010ab8f4e54 |
class MyRoot(Tk): <NEW_LINE> <INDENT> def __init__(self, rows, cols): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.rows = rows <NEW_LINE> self.cols = cols <NEW_LINE> self.configure() <NEW_LINE> <DEDENT> def configure(self): <NEW_LINE> <INDENT> for c in range(self.cols): <NEW_LINE> <INDENT> self.columnconfigure(c, weight = 1) <NEW_LINE> <DEDENT> for r in range(self.rows): <NEW_LINE> <INDENT> if r == 0: <NEW_LINE> <INDENT> weight = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weight = 1 <NEW_LINE> <DEDENT> self.rowconfigure(r, weight = weight) | Class derived from tkinter.Tk.
Sets and configures the row and columns
of the main widget. | 6259904523e79379d538d838 |
class PollingHandler(BaseSocketHandler): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(PollingHandler, self).__init__(*args, **kwargs) <NEW_LINE> self.tracker = PeriodicCallback(self.get_location, 5000) <NEW_LINE> self.q = Queue(maxsize=5) <NEW_LINE> self.updating = False <NEW_LINE> <DEDENT> async def get_location(self): <NEW_LINE> <INDENT> location = await self.character(self.user_id, '/location/', 'GET') <NEW_LINE> if location: <NEW_LINE> <INDENT> self.updating = True <NEW_LINE> user = self.user <NEW_LINE> graph_data = await user['router'].update( location['solarSystem']['name'] ) <NEW_LINE> if graph_data: <NEW_LINE> <INDENT> message = ['update', graph_data] <NEW_LINE> logging.warning(graph_data) <NEW_LINE> await self.safe_write(message) <NEW_LINE> <DEDENT> self.updating = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = ['warning', 'Log into game to track your route'] <NEW_LINE> await self.safe_write(message) <NEW_LINE> <DEDENT> <DEDENT> async def scheduler(self): <NEW_LINE> <INDENT> logging.info(f"Scheduler started for {self.request.remote_ip}") <NEW_LINE> async for item in self.q: <NEW_LINE> <INDENT> logging.debug(f"Started resolving task for {item}...") <NEW_LINE> user = self.user <NEW_LINE> try: <NEW_LINE> <INDENT> if item == 'recover': <NEW_LINE> <INDENT> await self.safe_write(['recover', user['router'].recovery]) <NEW_LINE> <DEDENT> elif item == 'track': <NEW_LINE> <INDENT> if not self.tracker.is_running(): <NEW_LINE> <INDENT> self.tracker.start() <NEW_LINE> <DEDENT> <DEDENT> elif item in ['stop', 'reset']: <NEW_LINE> <INDENT> if self.tracker.is_running(): <NEW_LINE> <INDENT> self.tracker.stop() <NEW_LINE> <DEDENT> if item == 'reset': <NEW_LINE> <INDENT> await user['router'].reset() <NEW_LINE> <DEDENT> <DEDENT> elif item[0] == 'backup': <NEW_LINE> <INDENT> if not self.updating: <NEW_LINE> <INDENT> await user['router'].backup(item[1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self.q.task_done() <NEW_LINE> logging.debug(f'Task "{item}" done.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def task(self, item): <NEW_LINE> <INDENT> await self.q.put(item) <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> logging.info(f"Connection received from {self.request.remote_ip}") <NEW_LINE> if self.user_id: <NEW_LINE> <INDENT> self.spawn(self.scheduler) <NEW_LINE> self.vagrants.append(self) <NEW_LINE> self.spawn(self.task, 'recover') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> <DEDENT> def on_message(self, message): <NEW_LINE> <INDENT> self.spawn(self.task, json_decode(message)) <NEW_LINE> <DEDENT> def on_close(self): <NEW_LINE> <INDENT> self.vagrants.remove(self) <NEW_LINE> if self.tracker.is_running(): <NEW_LINE> <INDENT> self.tracker.stop() <NEW_LINE> <DEDENT> logging.info("Connection closed, " + self.request.remote_ip) | This class represents separate websocket connection.
Attributes:
tracker: tornado.ioloop.PeriodicCallback with get_location method as a
callback. Starts when user pushes "track" button. When started, it
runs every 5 seconds to find out and update character's location.
q: tornado.queues.Queue used for running tasks successively.
updating: A flag indicates if router is being updated or not. Required
to avoid race conditions. | 625990450fa83653e46f6216 |
class List(RedisSortable, Sequence): <NEW_LINE> <INDENT> __slots__ = ("_key", "_client", "_pipe") <NEW_LINE> def __init__(self, client, key, iter=[]): <NEW_LINE> <INDENT> super(List, self).__init__(client, key) <NEW_LINE> if hasattr(iter, "__iter__") and len(iter): <NEW_LINE> <INDENT> for val in iter: <NEW_LINE> <INDENT> self._pipe.rpush(self.key, val) <NEW_LINE> <DEDENT> self._pipe.execute() <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, el): <NEW_LINE> <INDENT> return el in self._client.lrange(self.key, 0, -1) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for el in self._client.lrange(self.key, 0, -1): <NEW_LINE> <INDENT> yield el <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._client.llen(self.key) <NEW_LINE> <DEDENT> def __reversed__(self): <NEW_LINE> <INDENT> return self._client.lrange(self.key, 0, -1).reverse() <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> return self._client.lindex(self.key, idx) <NEW_LINE> <DEDENT> def __setitem__(self, idx, el): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._client.lset(self.key, idx, el) <NEW_LINE> <DEDENT> except ResponseError: <NEW_LINE> <INDENT> raise IndexError("Index out of range") <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, idx): <NEW_LINE> <INDENT> raise NotImplementedError("Method '__delitem__' not implemented yet") <NEW_LINE> <DEDENT> def append(self, el): <NEW_LINE> <INDENT> self._client.rpush(self.key, el) <NEW_LINE> <DEDENT> def count(self, el): <NEW_LINE> <INDENT> return self._client.lrange(self.key, 0, -1).count(el) <NEW_LINE> <DEDENT> def extend(self, iter): <NEW_LINE> <INDENT> if hasattr(iter, "__iter__"): <NEW_LINE> <INDENT> map(lambda el: self._pipe.rpush(el), iter) <NEW_LINE> self._pipe.execute() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RedisTypeError("Argument must be iterable") <NEW_LINE> <DEDENT> <DEDENT> def insert(self, idx, el): <NEW_LINE> <INDENT> count = self._client.llen(self.key) <NEW_LINE> if count < idx: <NEW_LINE> <INDENT> raise IndexError("Index out of range") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._client.lset(self.key, idx, el) <NEW_LINE> <DEDENT> <DEDENT> def index(self, el): <NEW_LINE> <INDENT> return self._client.lindex(self.key, el) <NEW_LINE> <DEDENT> def pop(self, idx=None): <NEW_LINE> <INDENT> if idx is not None: <NEW_LINE> <INDENT> return self._client.rpop(self.key) <NEW_LINE> <DEDENT> elif isinstance(idx, int): <NEW_LINE> <INDENT> self.__delitem__(idx) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RedisTypeError("Argument must be type of 'int' or 'NoneType'") <NEW_LINE> <DEDENT> <DEDENT> def remove(self, val, n=1, all=False): <NEW_LINE> <INDENT> if all: <NEW_LINE> <INDENT> if self._client.lrem(self.key, val, 0): return <NEW_LINE> <DEDENT> elif isinstance(n, int): <NEW_LINE> <INDENT> if self._client.lrem(self.key, val, n): return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RedisTypeError("Argument 'count' must be type of 'int'") <NEW_LINE> <DEDENT> raise RedisValueError("Value '" + str(val) + "' not present") <NEW_LINE> <DEDENT> def reverse(self): <NEW_LINE> <INDENT> raise NotImplementedError("Method 'reverse' not yet implemented") | Sequence datatype that tries to meme a native ``list`` datatype by
implementing *most* of its methods. Be aware that some methods still exist,
but will throw ``NotImplementedError``s. | 6259904516aa5153ce401828 |
class PDBWarning(ChemWarning): <NEW_LINE> <INDENT> pass | A non-fatal error to indicate a problematic PDB file | 6259904573bcbd0ca4bcb5c7 |
class RoomSafetyItem(models.Model): <NEW_LINE> <INDENT> unit = models.ForeignKey(Unit, null=False, blank=False, on_delete=models.PROTECT) <NEW_LINE> label = models.CharField(null=False, blank=False, max_length=50) <NEW_LINE> description = models.CharField(null=True, blank=True, max_length=500) <NEW_LINE> hidden = models.BooleanField(null=False, blank=False, default=False, editable=False) <NEW_LINE> config = JSONField(null=False, blank=False, default=dict, editable=False) <NEW_LINE> def autoslug(self): <NEW_LINE> <INDENT> return make_slug(self.unit.slug + '-' + self.label) <NEW_LINE> <DEDENT> slug = AutoSlugField(populate_from='autoslug', null=False, editable=False, unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.label <NEW_LINE> <DEDENT> objects = RoomSafetyItemQuerySet.as_manager() <NEW_LINE> def delete(self): <NEW_LINE> <INDENT> self.hidden = True <NEW_LINE> self.save() | Allow each unit to manage the categories which are now included in a visit. | 6259904526238365f5fade96 |
class DescribeChartPlaceholder(object): <NEW_LINE> <INDENT> def it_can_insert_a_chart_into_itself(self, request, part_prop_): <NEW_LINE> <INDENT> slide_part_ = instance_mock(request, SlidePart) <NEW_LINE> slide_part_.add_chart_part.return_value = "rId6" <NEW_LINE> part_prop_.return_value = slide_part_ <NEW_LINE> graphicFrame = element("p:graphicFrame") <NEW_LINE> _new_chart_graphicFrame_ = method_mock( request, ChartPlaceholder, "_new_chart_graphicFrame", return_value=graphicFrame, ) <NEW_LINE> _replace_placeholder_with_ = method_mock( request, ChartPlaceholder, "_replace_placeholder_with" ) <NEW_LINE> placeholder_graphic_frame_ = instance_mock(request, PlaceholderGraphicFrame) <NEW_LINE> PlaceholderGraphicFrame_ = class_mock( request, "pptx.shapes.placeholder.PlaceholderGraphicFrame", return_value=placeholder_graphic_frame_, ) <NEW_LINE> chart_data_ = instance_mock(request, ChartData) <NEW_LINE> chart_ph = ChartPlaceholder( element("p:sp/p:spPr/a:xfrm/(a:off{x=1,y=2},a:ext{cx=3,cy=4})"), "parent" ) <NEW_LINE> ph_graphic_frame = chart_ph.insert_chart(XCT.PIE, chart_data_) <NEW_LINE> slide_part_.add_chart_part.assert_called_once_with(XCT.PIE, chart_data_) <NEW_LINE> _new_chart_graphicFrame_.assert_called_once_with(chart_ph, "rId6", 1, 2, 3, 4) <NEW_LINE> _replace_placeholder_with_.assert_called_once_with(chart_ph, graphicFrame) <NEW_LINE> PlaceholderGraphicFrame_.assert_called_once_with(graphicFrame, chart_ph._parent) <NEW_LINE> assert ph_graphic_frame is placeholder_graphic_frame_ <NEW_LINE> <DEDENT> def it_creates_a_graphicFrame_element_to_help(self, new_fixture): <NEW_LINE> <INDENT> chart_ph, rId, x, y, cx, cy, expected_xml = new_fixture <NEW_LINE> graphicFrame = chart_ph._new_chart_graphicFrame(rId, x, y, cx, cy) <NEW_LINE> assert graphicFrame.xml == expected_xml <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def new_fixture(self): <NEW_LINE> <INDENT> sp_cxml = "p:sp/p:nvSpPr/p:cNvPr{id=4,name=bar}" <NEW_LINE> chart_ph = ChartPlaceholder(element(sp_cxml), None) <NEW_LINE> rId, x, y, cx, cy = "rId42", 1, 2, 3, 4 <NEW_LINE> expected_xml = snippet_seq("placeholders")[1] <NEW_LINE> return chart_ph, rId, x, y, cx, cy, expected_xml <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def part_prop_(self, request, slide_): <NEW_LINE> <INDENT> return property_mock(request, ChartPlaceholder, "part", return_value=slide_) <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def slide_(self, request): <NEW_LINE> <INDENT> return instance_mock(request, SlidePart) | Unit-test suite for `pptx.shapes.placeholder.ChartPlaceholder` object. | 62599045d4950a0f3b1117df |
class BIM(Attack): <NEW_LINE> <INDENT> def __init__(self, model, eps=4/255, alpha=1/255, steps=0): <NEW_LINE> <INDENT> super(BIM, self).__init__("BIM", model) <NEW_LINE> self.eps = eps <NEW_LINE> self.alpha = alpha <NEW_LINE> if steps == 0: <NEW_LINE> <INDENT> self.steps = int(min(eps*255 + 4, 1.25*eps*255)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.steps = steps <NEW_LINE> <DEDENT> <DEDENT> def forward(self, images, labels): <NEW_LINE> <INDENT> images = images.clone().detach().to(self.device) <NEW_LINE> labels = labels.clone().detach().to(self.device) <NEW_LINE> labels = self._transform_label(images, labels) <NEW_LINE> loss = nn.CrossEntropyLoss() <NEW_LINE> ori_images = images.clone().detach() <NEW_LINE> for i in range(self.steps): <NEW_LINE> <INDENT> images.requires_grad = True <NEW_LINE> outputs = self.model(images) <NEW_LINE> cost = self._targeted*loss(outputs, labels) <NEW_LINE> grad = torch.autograd.grad(cost, images, retain_graph=False, create_graph=False)[0] <NEW_LINE> adv_images = images + self.alpha*grad.sign() <NEW_LINE> a = torch.clamp(ori_images - self.eps, min=0) <NEW_LINE> b = (adv_images >= a).float()*adv_images + (adv_images < a).float()*a <NEW_LINE> c = (b > ori_images+self.eps).float()*(ori_images+self.eps) + (b <= ori_images + self.eps).float()*b <NEW_LINE> images = torch.clamp(c, max=1).detach() <NEW_LINE> <DEDENT> return images | BIM or iterative-FGSM in the paper 'Adversarial Examples in the Physical World'
[https://arxiv.org/abs/1607.02533]
Distance Measure : Linf
Arguments:
model (nn.Module): model to attack.
eps (float): maximum perturbation. (DEFALUT: 4/255)
alpha (float): step size. (DEFALUT: 1/255)
steps (int): number of steps. (DEFALUT: 0)
.. note:: If steps set to 0, steps will be automatically decided following the paper.
Shape:
- images: :math:`(N, C, H, W)` where `N = number of batches`, `C = number of channels`, `H = height` and `W = width`. It must have a range [0, 1].
- labels: :math:`(N)` where each value :math:`y_i` is :math:`0 \leq y_i \leq` `number of labels`.
- output: :math:`(N, C, H, W)`.
Examples::
>>> attack = torchattacks.BIM(model, eps=4/255, alpha=1/255, steps=0)
>>> adv_images = attack(images, labels) | 62599045c432627299fa42a0 |
class Section(TexEnvironment): <NEW_LINE> <INDENT> def __init__(self, name, label=''): <NEW_LINE> <INDENT> super().__init__('section', name, label=label) <NEW_LINE> <DEDENT> def new_subsection(self, name, label=''): <NEW_LINE> <INDENT> return self.new(Subsection(name, label=label)) | Implements a LaTeX section. | 62599045b57a9660fecd2db8 |
class Config(object): <NEW_LINE> <INDENT> def __init__(self, config_file, env_tag): <NEW_LINE> <INDENT> self.configs = self.load_config_file(config_file, env_tag) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def load_config_file(config_file, env_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(config_file) as f: <NEW_LINE> <INDENT> configs = yaml.load(f, Loader=Loader)[env_name] <NEW_LINE> <DEDENT> <DEDENT> except (OSError, TypeError, IOError): <NEW_LINE> <INDENT> configs = dict() <NEW_LINE> <DEDENT> return configs <NEW_LINE> <DEDENT> def get_variable(self, variable_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self.configs[variable_name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> if value is None: <NEW_LINE> <INDENT> raise ValueError("Failed to get variable") <NEW_LINE> <DEDENT> return value | Test Env Config | 62599045b830903b9686ee18 |
class OS(Collection): <NEW_LINE> <INDENT> _validation = { 'on_save': False, 'on_set': False, 'allow_foreign_fields': True } <NEW_LINE> _fields = { 'name': Field(), 'description': Field() } | The operating system which DataModels
may apply to. | 62599045d6c5a102081e3458 |
class ArrayValue(_messages.Message): <NEW_LINE> <INDENT> values = _messages.MessageField('Value', 1, repeated=True) | An array value.
Fields:
values: Values in the array. The order of this array may not be preserved
if it contains a mix of indexed and unindexed values. | 6259904521a7993f00c672a5 |
class DagExecutor(object): <NEW_LINE> <INDENT> def __init__(self, dag_config, req_state): <NEW_LINE> <INDENT> self.dag_config = dag_config <NEW_LINE> self.context = state.DagExecutionContext( dag_config, req_state, ) <NEW_LINE> <DEDENT> def call_hook(self, hook_name): <NEW_LINE> <INDENT> if hook_name not in self.dag_config.config['hook_dag_map']: <NEW_LINE> <INDENT> raise RuntimeError('InvalidHook!! {0} not in {1}'.format(hook_name, self.dag_config.config['hook_dag_map'].keys())) <NEW_LINE> <DEDENT> hook_meta = self.dag_config.config['hook_dag_map'][hook_name] <NEW_LINE> dag = self.dag_config.dags[hook_meta['dag_name']] <NEW_LINE> try: <NEW_LINE> <INDENT> orig_options = self.context.options <NEW_LINE> if 'global_option_data_key' in hook_meta: <NEW_LINE> <INDENT> self.context.options = self.dag_config.config['global_option_data'][hook_meta['global_option_data_key']] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.context.options = {} <NEW_LINE> <DEDENT> return dag(self.context) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.context.options = orig_options | This object is responsible for stepping a transaction through a Dagyr
| 6259904515baa723494632ce |
class PruebaForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Prueba <NEW_LINE> fields = ('titulo', 'subtitulo', 'cantidad') <NEW_LINE> widgets = { 'titulo': forms.TextInput(attrs={'size': 12, 'Placeholder': 'Ingrese titulo'}), 'subtitulo': forms.TextInput(attrs={'size': 10, 'Placeholder': 'Ingrese subtitulo'}), 'cantidad': forms.TextInput(attrs={'size': 10, 'Placeholder': 'Ingrese Cantidad'}), } <NEW_LINE> <DEDENT> def clean_cantidad (self): <NEW_LINE> <INDENT> cantidad = self.cleaned_data['cantidad'] <NEW_LINE> if cantidad < 10: <NEW_LINE> <INDENT> raise forms.ValidationError('Ingrese un numero mayor a 10') <NEW_LINE> <DEDENT> print(cantidad) <NEW_LINE> return cantidad | Form definition for Prueba. | 6259904507d97122c4217fdd |
class Static(Downloadable): <NEW_LINE> <INDENT> @property <NEW_LINE> def should_refresh(self): <NEW_LINE> <INDENT> return False | For requests that do not change once downloaded. | 6259904507f4c71912bb076e |
class TraceVariable: <NEW_LINE> <INDENT> def __init__(self, func, interval: float = 1.0, name: str = 'variable', verbose: bool = False, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.name = name <NEW_LINE> self.func = func <NEW_LINE> self.interval = interval <NEW_LINE> self.verbose = verbose <NEW_LINE> self._traced = [] <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> print(f'Trace started: {self.name}') <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._timer.cancel() <NEW_LINE> print(f'Trace canceled: {self.name}') <NEW_LINE> return self._traced <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> ret = self.func() <NEW_LINE> now = datetime.datetime.now().strftime('%H:%M:%S.%f') <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print(f'{now} | Trace {self.name}: {ret}') <NEW_LINE> <DEDENT> self._traced.append((now, ret)) <NEW_LINE> self._timer = threading.Timer(self.interval, self.update) <NEW_LINE> self._timer.start() | Simple class to trace a variable over time.
Usage:
t = TraceVariable(ctrl.stage.get, verbose=True)
t.start()
t.stage.set(x=0, y=0, wait=False)
...
values = t.stop() | 62599045379a373c97d9a367 |
class DDD(D): <NEW_LINE> <INDENT> regex = "ddd(?!d)" <NEW_LINE> def format(self, value: Union[date, datetime], tokens: List[Token]) -> str: <NEW_LINE> <INDENT> return value.strftime("%a") <NEW_LINE> <DEDENT> def parse(self, value: str, tokens: List[Token]) -> Tuple[Dict[str, Any], str]: <NEW_LINE> <INDENT> return {"weekday": datetime.strptime(value[:3], "%a").weekday()}, value[3:] | Day of the week, three letter abbreviation (e.g., "Mon"). | 6259904596565a6dacd2d928 |
class RetargetScheduleProperties(Model): <NEW_LINE> <INDENT> _attribute_map = { 'current_resource_id': {'key': 'currentResourceId', 'type': 'str'}, 'target_resource_id': {'key': 'targetResourceId', 'type': 'str'}, } <NEW_LINE> def __init__(self, current_resource_id=None, target_resource_id=None): <NEW_LINE> <INDENT> super(RetargetScheduleProperties, self).__init__() <NEW_LINE> self.current_resource_id = current_resource_id <NEW_LINE> self.target_resource_id = target_resource_id | Properties for retargeting a virtual machine schedule.
:param current_resource_id: The resource Id of the virtual machine on
which the schedule operates
:type current_resource_id: str
:param target_resource_id: The resource Id of the virtual machine that the
schedule should be retargeted to
:type target_resource_id: str | 6259904526238365f5fade98 |
class DeletePost(FlaskForm): <NEW_LINE> <INDENT> post = HiddenField() <NEW_LINE> reason = StringField() <NEW_LINE> send_to_admin = BooleanField() | Post deletion form. | 62599045d6c5a102081e3459 |
class ProcessingInstallationError(InaSAFEError): <NEW_LINE> <INDENT> pass | When there is an error with Processing. | 625990450a366e3fb87ddd23 |
class CompetitionGroup(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=128) <NEW_LINE> competition_type = models.ForeignKey(CompetitionType, on_delete=models.PROTECT, null=False) <NEW_LINE> end_date = models.DateField() <NEW_LINE> active = models.BooleanField(default=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s" % (self.name) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_comp_entrants(*group_types): <NEW_LINE> <INDENT> clause = None <NEW_LINE> for group_type in group_types: <NEW_LINE> <INDENT> q = models.Q(competition_type__id=group_type) <NEW_LINE> if clause is None: <NEW_LINE> <INDENT> clause = q <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> clause |= q <NEW_LINE> <DEDENT> <DEDENT> groups = CompetitionGroup.objects.filter(clause).filter(active=True) .prefetch_related("competition_set__entrant_set__player1", "competition_set__entrant_set__player2") <NEW_LINE> players = set() <NEW_LINE> for group in groups: <NEW_LINE> <INDENT> for comp in group.competition_set.all(): <NEW_LINE> <INDENT> for entrants in comp.entrant_set.all(): <NEW_LINE> <INDENT> players.add(entrants.player1) <NEW_LINE> if entrants.player2 is not None: <NEW_LINE> <INDENT> players.add(entrants.player2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return players <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ["-end_date", "competition_type"] <NEW_LINE> verbose_name = "Competition Group" | A grouping of competitions, e.g. a set of league boxes | 62599045b57a9660fecd2dba |
class _ipv4iter(object): <NEW_LINE> <INDENT> def __init__(self, addr): <NEW_LINE> <INDENT> self.mask = addr.netmask() <NEW_LINE> self.last = addr.net() <NEW_LINE> self.cur = addr.addr() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> m = 0xffffffff & (self.cur & self.mask) <NEW_LINE> if m != self.last: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> n = self.cur <NEW_LINE> self.cur += 1 <NEW_LINE> return ipv4(n, self.mask) | An IPv4 address iterator | 62599045498bea3a75a58e5c |
class WebHandler(DjangoMixin, APIRequestHandler): <NEW_LINE> <INDENT> def initialize(self): <NEW_LINE> <INDENT> self.config = Settings.instance() | Base class for user <-> core APIs (client/async). | 6259904550485f2cf55dc2c5 |
class Worker(BlockchainObject): <NEW_LINE> <INDENT> type_id = 14 <NEW_LINE> def refresh(self): <NEW_LINE> <INDENT> worker = self.transnet.rpc.get_object(self.identifier) <NEW_LINE> if not worker: <NEW_LINE> <INDENT> raise WorkerDoesNotExistsException <NEW_LINE> <DEDENT> worker["work_end_date"] = formatTimeString(worker["work_end_date"]) <NEW_LINE> worker["work_begin_date"] = formatTimeString(worker["work_begin_date"]) <NEW_LINE> super(Worker, self).__init__(worker, transnet_instance=self.transnet) <NEW_LINE> self.cached = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def account(self): <NEW_LINE> <INDENT> return Account( self["worker_account"], transnet_instance=self.transnet) | Read data about a worker in the chain
:param str id: id of the worker
:param transnet transnet_instance: Transnet() instance to use when
accesing a RPC | 62599045a4f1c619b294f827 |
class StaticService(object): <NEW_LINE> <INDENT> def __init__(self, name, ip, system_template, user_template=None, user_defined=True): <NEW_LINE> <INDENT> self.name = name.lower() <NEW_LINE> self.ip = IP(ip) <NEW_LINE> self.system_template = system_template <NEW_LINE> self.user_template = user_template <NEW_LINE> self.user_defined = user_defined <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "StaticService({}, {}, '{}', {}, user_defined={})".format( self.name, self.ip.as_string(), self.system_template, self.user_template, self.user_defined ) | A Static Service is a particular container that, when it is executed,
uses the static network
Args:
- name (str): the name of the service
- ip (str): the ip on the static network vpn
- system_template (str): the first part of the Dockerfile that will be
used to build the image
- user_template (str): the last part of the Dockerfile, written by the
user (if it is None, just the system_template will be used)
- user_defined (boolean): This is False if this static service is a
service build by the system without any request by the user. The
services that are not user_defined are | 6259904523e79379d538d83c |
class Meta(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, namespace, **kwargs): <NEW_LINE> <INDENT> return super().__new__(cls, name, bases, namespace, **kwargs) <NEW_LINE> <DEDENT> def __init__(cls, name, bases, namespace, **kwargs): <NEW_LINE> <INDENT> super().__init__(name, bases, namespace, **kwargs) <NEW_LINE> if not hasattr(cls, 'registory'): <NEW_LINE> <INDENT> cls.registory = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls.registory[name] = cls | Meta class of LayerCreator. | 6259904524f1403a9268626c |
class Group(Base): <NEW_LINE> <INDENT> __tablename__ = 'group' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String(64), unique=True, nullable=False) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<id=%s,name=%s>" % (self.id, self.name) | 主机组表关联主机用户和堡垒机用户 | 62599045d10714528d69f02c |
class Ipv6NdNeighborOriginEnum(Enum): <NEW_LINE> <INDENT> other = 0 <NEW_LINE> static = 1 <NEW_LINE> dynamic = 2 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv6_nd_oper as meta <NEW_LINE> return meta._meta_table['Ipv6NdNeighborOriginEnum'] | Ipv6NdNeighborOriginEnum
IPv6 ND Neighbor Origin Type
.. data:: other = 0
Other Address
.. data:: static = 1
Static Address
.. data:: dynamic = 2
Dynamic Address | 6259904516aa5153ce40182c |
class set_environ: <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.kwargs = dict( kwargs ) <NEW_LINE> self.save_environ = None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.save_environ = dict( os.environ ) <NEW_LINE> for n,v in self.kwargs.items(): <NEW_LINE> <INDENT> if v == None: <NEW_LINE> <INDENT> if n in os.environ: <NEW_LINE> <INDENT> del os.environ[n] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> os.environ[n] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> if self.save_environ != None: <NEW_LINE> <INDENT> for n,v in list( os.environ.items() ): <NEW_LINE> <INDENT> if n not in self.save_environ: <NEW_LINE> <INDENT> del os.environ[n] <NEW_LINE> <DEDENT> <DEDENT> for n,v in self.save_environ.items(): <NEW_LINE> <INDENT> if n not in os.environ or os.environ[n] != v: <NEW_LINE> <INDENT> os.environ[n] = v | with set_environ( name=value, name=value, ... ):
pass | 6259904521bff66bcd723fab |
class PERDocsViewset(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = NiceDocument.objects.all() <NEW_LINE> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> get_request_user_regions = RegionRestrictedAdmin.get_request_user_regions <NEW_LINE> get_filtered_queryset = RegionRestrictedAdmin.get_filtered_queryset <NEW_LINE> filter_class = PERDocsFilter <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = NiceDocument.objects.all() <NEW_LINE> cond1 = Q() <NEW_LINE> cond2 = Q() <NEW_LINE> cond3 = Q() <NEW_LINE> if 'new' in self.request.query_params.keys(): <NEW_LINE> <INDENT> last_duedate = settings.PER_LAST_DUEDATE <NEW_LINE> timezone = pytz.timezone("Europe/Zurich") <NEW_LINE> if not last_duedate: <NEW_LINE> <INDENT> last_duedate = timezone.localize(datetime(2000, 11, 15, 9, 59, 25, 0)) <NEW_LINE> <DEDENT> cond1 = Q(created_at__gt=last_duedate) <NEW_LINE> <DEDENT> if 'country' in self.request.query_params.keys(): <NEW_LINE> <INDENT> cid = self.request.query_params.get('country', None) or 0 <NEW_LINE> country = Country.objects.filter(pk=cid) <NEW_LINE> if country: <NEW_LINE> <INDENT> cond2 = Q(country_id=country[0].id) <NEW_LINE> <DEDENT> <DEDENT> if 'visible' in self.request.query_params.keys(): <NEW_LINE> <INDENT> cond3 = Q(visibility=1) <NEW_LINE> <DEDENT> queryset = NiceDocument.objects.filter(cond1 & cond2 & cond3) <NEW_LINE> if queryset.exists(): <NEW_LINE> <INDENT> queryset = self.get_filtered_queryset(self.request, queryset, 4) <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == 'list': <NEW_LINE> <INDENT> return ListNiceDocSerializer <NEW_LINE> <DEDENT> ordering_fields = ('name', 'country',) | To collect PER Documents | 6259904523e79379d538d83d |
class _ProxyTraversedSite(ProxyBase): <NEW_LINE> <INDENT> def __new__(cls, base, site_manager): <NEW_LINE> <INDENT> return ProxyBase.__new__(cls, base) <NEW_LINE> <DEDENT> def __init__(self, base, site_manager): <NEW_LINE> <INDENT> ProxyBase.__init__(self, base) <NEW_LINE> self.__site_manager = site_manager <NEW_LINE> <DEDENT> @non_overridable <NEW_LINE> def getSiteManager(self): <NEW_LINE> <INDENT> return self.__site_manager <NEW_LINE> <DEDENT> @non_overridable <NEW_LINE> def setSiteManager(self, new_man): <NEW_LINE> <INDENT> raise ValueError("Cannot set site manager on proxy") | We need to be able to control the site manager used
by sites we traverse to in order to ensure that host
configuration is at the right place in the resolution order.
But a site can be literally any type of object. So we fake out the
siteManager methods but proxy everything else. | 62599045ec188e330fdf9bda |
class itemizeResponseType (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'itemizeResponseType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 663, 2) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __request = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'request'), 'request', '__urnvpromediaupdate2009_itemizeResponseType_urnvpromediaupdate2009request', False, pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 666, 8), ) <NEW_LINE> request = property(__request.value, __request.set, None, None) <NEW_LINE> __liverequest = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'liverequest'), 'liverequest', '__urnvpromediaupdate2009_itemizeResponseType_urnvpromediaupdate2009liverequest', False, pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 667, 8), ) <NEW_LINE> liverequest = property(__liverequest.value, __liverequest.set, None, None) <NEW_LINE> __result = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'result'), 'result', '__urnvpromediaupdate2009_itemizeResponseType_urnvpromediaupdate2009result', True, pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 669, 6), ) <NEW_LINE> result = property(__result.value, __result.set, None, None) <NEW_LINE> __id = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__urnvpromediaupdate2009_itemizeResponseType_urnvpromediaupdate2009id', False, pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 671, 6), ) <NEW_LINE> id = property(__id.value, __id.set, None, None) <NEW_LINE> __success = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'success'), 'success', '__urnvpromediaupdate2009_itemizeResponseType_success', pyxb.binding.datatypes.boolean, required=True) <NEW_LINE> __success._DeclarationLocation = pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 673, 4) <NEW_LINE> __success._UseLocation = pyxb.utils.utility.Location('https://poms-test.omroep.nl/schema/update/vproMediaUpdate.xsd', 673, 4) <NEW_LINE> success = property(__success.value, __success.set, None, None) <NEW_LINE> _ElementMap.update({ __request.name() : __request, __liverequest.name() : __liverequest, __result.name() : __result, __id.name() : __id }) <NEW_LINE> _AttributeMap.update({ __success.name() : __success }) | Complex type {urn:vpro:media:update:2009}itemizeResponseType with content type ELEMENT_ONLY | 625990450a366e3fb87ddd25 |
class CharRNN(Chain): <NEW_LINE> <INDENT> def __init__(self, n_vocab, n_units): <NEW_LINE> <INDENT> super(CharRNN, self).__init__( embed = F.EmbedID(n_vocab, n_units), l1_x = L.Linear(n_units, 4*n_units), l1_h = L.Linear(n_units, 4*n_units), l2_h = L.Linear(n_units, 4*n_units), l2_x = L.Linear(n_units, 4*n_units), l3 = L.Linear(n_units, n_vocab), ) <NEW_LINE> for param in self.params(): <NEW_LINE> <INDENT> param.data[:] = np.random.uniform(-0.08, 0.08, param.data.shape) <NEW_LINE> <DEDENT> <DEDENT> def forward_one_step(self, x_data, y_data, state, train=True, dropout_ratio=0.5): <NEW_LINE> <INDENT> x = Variable(x_data, volatile=not train) <NEW_LINE> t = Variable(y_data, volatile=not train) <NEW_LINE> h0 = self.embed(x) <NEW_LINE> h1_in = self.l1_x(F.dropout(h0, ratio=dropout_ratio, train=train)) + self.l1_h(state['h1']) <NEW_LINE> c1, h1 = F.lstm(state['c1'], h1_in) <NEW_LINE> h2_in = self.l2_x(F.dropout(h1, ratio=dropout_ratio, train=train)) + self.l2_h(state['h2']) <NEW_LINE> c2, h2 = F.lstm(state['c2'], h2_in) <NEW_LINE> y = self.l3(F.dropout(h2, ratio=dropout_ratio, train=train)) <NEW_LINE> state = {'c1': c1, 'h1': h1, 'c2': c2, 'h2': h2} <NEW_LINE> if train: <NEW_LINE> <INDENT> return state, F.softmax_cross_entropy(y, t) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return state, F.softmax(y) <NEW_LINE> <DEDENT> <DEDENT> def featurize(self, x_data, y_data, state, dropout_ratio=0.5): <NEW_LINE> <INDENT> pass | Class: CharRNNFeaturizer
========================
Contains a CharRNN that is capable of featurizing arbitrary text, one vector
per character.
Usage:
-----
>> model = CharRNNFeaturizer(n_vocab, n_units)
>> model.train(train_data)
>> model.import(other_model)
>> model. | 62599045b57a9660fecd2dbc |
class DemoSiteImporter(object): <NEW_LINE> <INDENT> def __init__(self, logger): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> <DEDENT> @atomic <NEW_LINE> def handle(self, product_class_name, filepath): <NEW_LINE> <INDENT> product_class = ProductClass.objects.get( name=product_class_name) <NEW_LINE> attribute_codes = [] <NEW_LINE> with UnicodeCSVReader(filepath) as reader: <NEW_LINE> <INDENT> for row in reader: <NEW_LINE> <INDENT> if row[1] == 'UPC': <NEW_LINE> <INDENT> attribute_codes = row[9:] <NEW_LINE> continue <NEW_LINE> <DEDENT> self.create_product(product_class, attribute_codes, row) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def create_product(self, product_class, attribute_codes, row): <NEW_LINE> <INDENT> (ptype, upc, title, description, category, partner, sku, price, stock) = row[0:9] <NEW_LINE> is_child = ptype.lower() == 'variant' <NEW_LINE> is_parent = ptype.lower() == 'group' <NEW_LINE> if upc: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> product = Product.objects.get(upc=upc) <NEW_LINE> <DEDENT> except Product.DoesNotExist: <NEW_LINE> <INDENT> product = Product(upc=upc) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> product = Product() <NEW_LINE> <DEDENT> if is_child: <NEW_LINE> <INDENT> product.structure = Product.CHILD <NEW_LINE> product.parent = self.parent <NEW_LINE> <DEDENT> elif is_parent: <NEW_LINE> <INDENT> product.structure = Product.PARENT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> product.structure = Product.STANDALONE <NEW_LINE> <DEDENT> if not product.is_child: <NEW_LINE> <INDENT> product.title = title <NEW_LINE> product.description = description <NEW_LINE> product.product_class = product_class <NEW_LINE> <DEDENT> if not product.is_parent: <NEW_LINE> <INDENT> for code, value in zip(attribute_codes, row[9:]): <NEW_LINE> <INDENT> attr = product_class.attributes.get( code=code) <NEW_LINE> if attr.is_option: <NEW_LINE> <INDENT> value = attr.option_group.options.get(option=value) <NEW_LINE> <DEDENT> if attr.type == 'date': <NEW_LINE> <INDENT> value = datetime.strptime(value, "%d/%m/%Y").date() <NEW_LINE> <DEDENT> setattr(product.attr, code, value) <NEW_LINE> <DEDENT> <DEDENT> product.save() <NEW_LINE> if is_parent: <NEW_LINE> <INDENT> self.parent = product <NEW_LINE> <DEDENT> if category: <NEW_LINE> <INDENT> leaf = create_from_breadcrumbs(category) <NEW_LINE> ProductCategory.objects.get_or_create( product=product, category=leaf) <NEW_LINE> <DEDENT> if partner: <NEW_LINE> <INDENT> partner, __ = Partner.objects.get_or_create(name=partner) <NEW_LINE> try: <NEW_LINE> <INDENT> record = StockRecord.objects.get(product=product) <NEW_LINE> <DEDENT> except StockRecord.DoesNotExist: <NEW_LINE> <INDENT> record = StockRecord(product=product) <NEW_LINE> <DEDENT> record.partner = partner <NEW_LINE> record.partner_sku = sku <NEW_LINE> record.price_excl_tax = D(price) <NEW_LINE> if stock != 'NULL': <NEW_LINE> <INDENT> record.num_in_stock = stock <NEW_LINE> <DEDENT> record.save() | Another quick and dirty catalogue product importer. Used to built the
demo site, and most likely not useful outside of it. | 6259904515baa723494632d1 |
class GoogleDirectionsFinder(DirectionsFinder, APIRequest): <NEW_LINE> <INDENT> def __init__(self, cfg): <NEW_LINE> <INDENT> DirectionsFinder.__init__(self) <NEW_LINE> APIRequest.__init__(self, cfg, 'google-directions', 'Google directions query') <NEW_LINE> self.directions_url = 'http://maps.googleapis.com/maps/api/directions/json' <NEW_LINE> <DEDENT> @lru_cache(maxsize=10) <NEW_LINE> def get_directions(self, waypoints, departure_time=None, arrival_time=None): <NEW_LINE> <INDENT> data = { 'origin': ('"zastávka %s", %s, Česká republika' % (waypoints.from_stop, waypoints.from_city)).encode('utf-8'), 'destination': ('"zastávka %s", %s, Česká republika' % (waypoints.to_stop, waypoints.to_city)).encode('utf-8'), 'region': 'cz', 'sensor': 'false', 'alternatives': 'true', 'mode': 'transit', } <NEW_LINE> if departure_time: <NEW_LINE> <INDENT> data['departure_time'] = int(time.mktime(departure_time.timetuple())) <NEW_LINE> <DEDENT> elif arrival_time: <NEW_LINE> <INDENT> data['arrival_time'] = int(time.mktime(arrival_time.timetuple())) <NEW_LINE> <DEDENT> self.system_logger.info("Google Directions request:\n" + str(data)) <NEW_LINE> page = urllib.urlopen(self.directions_url + '?' + urllib.urlencode(data)) <NEW_LINE> response = json.load(page) <NEW_LINE> self._log_response_json(response) <NEW_LINE> directions = GoogleDirections(input_json=response, travel=waypoints) <NEW_LINE> self.system_logger.info("Google Directions response:\n" + unicode(directions)) <NEW_LINE> return directions | Transit direction finder using the Google Maps query engine. | 62599045009cb60464d02874 |
class Bullet(Sprite): <NEW_LINE> <INDENT> dmg = 0.0 <NEW_LINE> sprite_move_action = None <NEW_LINE> def __init__(self, image, dmg=0.1): <NEW_LINE> <INDENT> super(Bullet, self).__init__(image) <NEW_LINE> self.scale = 0.25 <NEW_LINE> self.dmg = dmg <NEW_LINE> self.cshape = collision.AARectShape(self.position, self.width, self.height) <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> info = "Dmg: %f\nFather: %s\nPosition: %s" % (self.dmg, self.father, self.position) <NEW_LINE> return info + '\n-------------------\n' | Gereneric representation for spaceship and rohenians bullets. | 625990458a43f66fc4bf34d4 |
class BaseConsensusRules(): <NEW_LINE> <INDENT> voting = Voting <NEW_LINE> @staticmethod <NEW_LINE> def validate_transaction(bigchain, transaction): <NEW_LINE> <INDENT> return transaction.validate(bigchain) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def validate_block(bigchain, block): <NEW_LINE> <INDENT> return block.validate(bigchain) | Base consensus rules for Bigchain.
A consensus plugin must expose a class inheriting from this one via an entry_point.
All methods listed below must be implemented. | 625990453c8af77a43b688dd |
class ScManager(Manager, models.BaseModelManager): <NEW_LINE> <INDENT> def __init__(self, model_class): <NEW_LINE> <INDENT> self.model_class = model_class <NEW_LINE> <DEDENT> @property <NEW_LINE> def _db(self): <NEW_LINE> <INDENT> return env.mongo_connection | Mix of managers | 6259904515baa723494632d2 |
class BlockedRequestList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._requests_data = deque() <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def redis_conn(self): <NEW_LINE> <INDENT> if settings.RQ_BLOCKED_REQUESTS_LIST: <NEW_LINE> <INDENT> return get_redis_connection("default") <NEW_LINE> <DEDENT> <DEDENT> def lpush(self, request_data): <NEW_LINE> <INDENT> if self.redis_conn: <NEW_LINE> <INDENT> self.redis_conn.lpush(settings.RQ_BLOCKED_REQUESTS_LIST, json.dumps(request_data)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._requests_data.appendleft(request_data) <NEW_LINE> <DEDENT> <DEDENT> def lpop(self): <NEW_LINE> <INDENT> if self.redis_conn: <NEW_LINE> <INDENT> return json.loads(self.redis_conn.lpop(settings.RQ_BLOCKED_REQUESTS_LIST)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._requests_data.popleft() <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> if self.redis_conn: <NEW_LINE> <INDENT> return self.redis_conn.llen(settings.RQ_BLOCKED_REQUESTS_LIST) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return len(self._requests_data) <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> if self.redis_conn: <NEW_LINE> <INDENT> while len(self) > 0: <NEW_LINE> <INDENT> self.lpop() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._requests_data.clear() | Singleton indirection between our code and Redis to isolate how we're enqueing the request
data to be further read by the system. Doing this also allow us to unit test it without
relying on complicated mocking strategies. Import as:
from traffic_control.blocked_list import blocked_requests | 6259904507d97122c4217fe1 |
class GeventTask(Task): <NEW_LINE> <INDENT> def start(self, *args, **kwargs): <NEW_LINE> <INDENT> return spawn(functools.partial(Task.start, self, *args, **kwargs)) | Task that spawns a greenlet | 62599045b5575c28eb713669 |
class Txt: <NEW_LINE> <INDENT> def __init__(self,path, mode='r',coding='utf8'): <NEW_LINE> <INDENT> self.data = [] <NEW_LINE> self.f = None <NEW_LINE> if mode =='r': <NEW_LINE> <INDENT> for line in open(path, encoding=coding): <NEW_LINE> <INDENT> self.data.append(line) <NEW_LINE> <DEDENT> for i in range(self.data.__len__()): <NEW_LINE> <INDENT> self.data[i] = self.data[i].encode('utf-8').decode('utf-8-sig') <NEW_LINE> self.data[i] = self.data[i].replace('\n','') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if mode == 'w': <NEW_LINE> <INDENT> self.f = open(path,'a',encoding=coding) <NEW_LINE> return <NEW_LINE> <DEDENT> if mode == 'rw': <NEW_LINE> <INDENT> for line in open(path,encoding=coding): <NEW_LINE> <INDENT> self.data.append(line) <NEW_LINE> <DEDENT> for i in range(self.data.__len__()): <NEW_LINE> <INDENT> self.data[i] = self.data[i].encode('utf-8').decode('utf-8-sig') <NEW_LINE> self.data[i] = self.data[i].replace('\n','') <NEW_LINE> <DEDENT> self.f = open(path,'a',encoding=coding) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> def read(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def writeline(self,s): <NEW_LINE> <INDENT> if self.f is None: <NEW_LINE> <INDENT> logger.error("未打开可写入txt文件") <NEW_LINE> <DEDENT> self.f.write(str(s)) <NEW_LINE> <DEDENT> def save_close(self): <NEW_LINE> <INDENT> if self.f is None: <NEW_LINE> <INDENT> logger.error("未打开可写入txt文件") <NEW_LINE> <DEDENT> self.f.close() | 用来读写文件 | 6259904523849d37ff8523fc |
class NoResponse(TestServerError): <NEW_LINE> <INDENT> pass | Raised by get_response method when no response data is available
to hande the request | 6259904526068e7796d4dc87 |
class RequireDocs(type): <NEW_LINE> <INDENT> def __new__(mcs, name, bases, dct): <NEW_LINE> <INDENT> for attr_name, value in dct.items(): <NEW_LINE> <INDENT> if callable(value) and not hasattr(value, "__doc__"): <NEW_LINE> <INDENT> raise exception.DocstringsMissing(attr_name) <NEW_LINE> <DEDENT> <DEDENT> return super(RequireDocs, mcs).__new__(mcs, name, bases, dct) | Requires from all methods to include docstrings | 6259904530c21e258be99b47 |
class ArtifactFileInfoResponse(ArtifactInfoResponseBase): <NEW_LINE> <INDENT> downloadUri: Optional[str] = None <NEW_LINE> remoteUrl: Optional[str] = None <NEW_LINE> mimeType: Optional[str] = None <NEW_LINE> size: Optional[int] = None <NEW_LINE> checksums: Optional[Checksums] = None <NEW_LINE> originalChecksums: Optional[OriginalChecksums] = None | Models an artifact file info response. | 62599045507cdc57c63a60de |
class Commands: <NEW_LINE> <INDENT> def setRawIOLoopback( self, enable=True ): <NEW_LINE> <INDENT> data.rawio_loopback = enable <NEW_LINE> <DEDENT> def setRawIOPacketSize( self, packet_size=8 ): <NEW_LINE> <INDENT> control.kiibohd.HIDIO_packet_size.argtypes = [ c_uint16 ] <NEW_LINE> return control.kiibohd.HIDIO_packet_size( packet_size ) <NEW_LINE> <DEDENT> def HIDIO_test_2_request( self, payload_len, payload_value ): <NEW_LINE> <INDENT> control.kiibohd.HIDIO_test_2_request.argtypes = [ c_uint16, c_uint16 ] <NEW_LINE> return control.kiibohd.HIDIO_test_2_request( payload_len, payload_value ) <NEW_LINE> <DEDENT> def HIDIO_invalid_65535_request( self ): <NEW_LINE> <INDENT> control.kiibohd.HIDIO_invalid_65535_request.argtypes = [] <NEW_LINE> return control.kiibohd.HIDIO_invalid_65535_request() | Container class of commands available to control the host-side KLL implementation | 62599045c432627299fa42a3 |
class MathInlineMixin(object): <NEW_LINE> <INDENT> def enable_math(self): <NEW_LINE> <INDENT> self.rules.math = re.compile(r'^\$(.+?)\$') <NEW_LINE> self.default_rules.insert(0, 'math') <NEW_LINE> self.rules.text = re.compile( r'^[\s\S]+?(?=[\\<!\[_*`~\$]|https?://| {2,}\n|\{\{|$)') <NEW_LINE> <DEDENT> def output_math(self, m): <NEW_LINE> <INDENT> return self.renderer.math(m.group(1)) | Math mixin for InlineLexer, mix this with InlineLexer::
class MathInlineLexer(InlineLexer, MathInlineMixin):
def __init__(self, *args, **kwargs):
super(MathInlineLexer, self).__init__(*args, **kwargs)
self.enable_math() | 62599045b830903b9686ee1b |
class UserSessionData(db.Model): <NEW_LINE> <INDENT> session = db.ReferenceProperty(UserSession) <NEW_LINE> keyname = db.StringProperty() <NEW_LINE> content = db.BlobProperty() | Model for the session data in the datastore. | 625990456fece00bbaccccf7 |
class Employee: <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, annual_salary): <NEW_LINE> <INDENT> self.first_name = first_name.title() <NEW_LINE> self.last_name = last_name.title() <NEW_LINE> self.annual_salary = annual_salary <NEW_LINE> <DEDENT> def give_raise(self, amount=5000): <NEW_LINE> <INDENT> self.annual_salary += amount | Collect the first name, last name and annual salary and store them. | 62599045cad5886f8bdc5a1f |
class PreparerSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('id', 'first_name', 'last_name', 'username') | Serializer for User Info in a workflow | 6259904573bcbd0ca4bcb5ce |
class Poster(models.Model): <NEW_LINE> <INDENT> poster = models.ImageField(upload_to='posters/') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '{}'.format(self.poster) | Genre model : Table for movie Genres | 6259904523849d37ff8523fe |
class _BrowserSwitch(str): <NEW_LINE> <INDENT> def string_to_browser(self, browser_string): <NEW_LINE> <INDENT> method = getattr(self, browser_string.lower(), lambda: "invalid") <NEW_LINE> return method() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def android(): <NEW_LINE> <INDENT> return webdriver.Android() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def chrome(): <NEW_LINE> <INDENT> return webdriver.Chrome() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def edge(): <NEW_LINE> <INDENT> return webdriver.Edge() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def ff(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def firefox(): <NEW_LINE> <INDENT> return webdriver.Firefox() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def ie(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def internet_explorer(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def internetexplorer(): <NEW_LINE> <INDENT> return webdriver.Ie() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def opera(): <NEW_LINE> <INDENT> return webdriver.Opera() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def phantom(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def phantomjs(): <NEW_LINE> <INDENT> return webdriver.PhantomJS() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def safari(): <NEW_LINE> <INDENT> return webdriver.Safari() | Get a new instance of a specific driver. (ChromeDriver, FirefoxDriver,
etc). | 62599045a79ad1619776b3c2 |
class AiRecognitionTaskFaceResult(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Status = None <NEW_LINE> self.ErrCode = None <NEW_LINE> self.Message = None <NEW_LINE> self.Input = None <NEW_LINE> self.Output = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Status = params.get("Status") <NEW_LINE> self.ErrCode = params.get("ErrCode") <NEW_LINE> self.Message = params.get("Message") <NEW_LINE> if params.get("Input") is not None: <NEW_LINE> <INDENT> self.Input = AiRecognitionTaskFaceResultInput() <NEW_LINE> self.Input._deserialize(params.get("Input")) <NEW_LINE> <DEDENT> if params.get("Output") is not None: <NEW_LINE> <INDENT> self.Output = AiRecognitionTaskFaceResultOutput() <NEW_LINE> self.Output._deserialize(params.get("Output")) | 人脸识别结果。
| 625990458da39b475be04532 |
class TestS3SearchOptionsWidget(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.resource = current.manager.define_resource("hrm", "human_resource") <NEW_LINE> <DEDENT> def testQuery(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testWidget(self): <NEW_LINE> <INDENT> widget = S3SearchOptionsWidget("virtual_field", options={}) <NEW_LINE> output = widget.widget(self.resource, {}) <NEW_LINE> self.assertEqual(str(output), str(SPAN(T("No options available"), _class="no-options-available"))) <NEW_LINE> widget = S3SearchOptionsWidget("virtual_field", options={1:"One"}) <NEW_LINE> output = widget.widget(self.resource, {}) <NEW_LINE> self.assertEqual(str(output), str(TABLE(TR(TD(LABEL(INPUT(_name="human_resource_search_select_virtual_field", _type="checkbox", _value="1"), "One"))), _class="generic-widget", _id="None_human_resource_search_select_virtual_field", _name="human_resource_search_select_virtual_field"))) | Test S3SearchOptionsWidget | 6259904576d4e153a661dc17 |
class StaticSite(StoredFileMixin, db.Model): <NEW_LINE> <INDENT> __tablename__ = 'static_sites' <NEW_LINE> __table_args__ = {'schema': 'events'} <NEW_LINE> add_file_date_column = False <NEW_LINE> file_required = False <NEW_LINE> id = db.Column( db.Integer, primary_key=True ) <NEW_LINE> event_id = db.Column( db.Integer, db.ForeignKey('events.events.id'), index=True, nullable=False ) <NEW_LINE> state = db.Column( PyIntEnum(StaticSiteState), default=StaticSiteState.pending, nullable=False ) <NEW_LINE> requested_dt = db.Column( UTCDateTime, default=now_utc, nullable=False ) <NEW_LINE> creator_id = db.Column( db.Integer, db.ForeignKey('users.users.id'), index=True, nullable=False ) <NEW_LINE> creator = db.relationship( 'User', lazy=False, backref=db.backref( 'static_sites', lazy='dynamic' ) ) <NEW_LINE> event = db.relationship( 'Event', lazy=True, backref=db.backref( 'static_sites', lazy='dynamic' ) ) <NEW_LINE> @property <NEW_LINE> def locator(self): <NEW_LINE> <INDENT> return {'confId': self.event_id, 'id': self.id} <NEW_LINE> <DEDENT> def _build_storage_path(self): <NEW_LINE> <INDENT> path_segments = ['event', strict_unicode(self.event.id), 'static'] <NEW_LINE> self.assign_id() <NEW_LINE> filename = '{}-{}'.format(self.id, self.filename) <NEW_LINE> path = posixpath.join(*(path_segments + [filename])) <NEW_LINE> return config.STATIC_SITE_STORAGE, path <NEW_LINE> <DEDENT> @return_ascii <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return format_repr(self, 'id', 'event_id', 'state') | Static site for an Indico event. | 62599045e76e3b2f99fd9d4f |
class NameNotSpecifiedException(MCVirtException): <NEW_LINE> <INDENT> pass | A name has not been specified and cannot be determined by the path/URL. | 6259904545492302aabfd81c |
class NZBGetConfigFlow(ConfigFlow, domain=DOMAIN): <NEW_LINE> <INDENT> VERSION = 1 <NEW_LINE> @staticmethod <NEW_LINE> @callback <NEW_LINE> def async_get_options_flow(config_entry): <NEW_LINE> <INDENT> return NZBGetOptionsFlowHandler(config_entry) <NEW_LINE> <DEDENT> async def async_step_import( self, user_input: dict[str, Any] | None = None ) -> FlowResult: <NEW_LINE> <INDENT> if CONF_SCAN_INTERVAL in user_input: <NEW_LINE> <INDENT> user_input[CONF_SCAN_INTERVAL] = user_input[ CONF_SCAN_INTERVAL ].total_seconds() <NEW_LINE> <DEDENT> return await self.async_step_user(user_input) <NEW_LINE> <DEDENT> async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> FlowResult: <NEW_LINE> <INDENT> if self._async_current_entries(): <NEW_LINE> <INDENT> return self.async_abort(reason="single_instance_allowed") <NEW_LINE> <DEDENT> errors = {} <NEW_LINE> if user_input is not None: <NEW_LINE> <INDENT> if CONF_VERIFY_SSL not in user_input: <NEW_LINE> <INDENT> user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> await self.hass.async_add_executor_job( validate_input, self.hass, user_input ) <NEW_LINE> <DEDENT> except NZBGetAPIException: <NEW_LINE> <INDENT> errors["base"] = "cannot_connect" <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> _LOGGER.exception("Unexpected exception") <NEW_LINE> return self.async_abort(reason="unknown") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.async_create_entry( title=user_input[CONF_HOST], data=user_input, ) <NEW_LINE> <DEDENT> <DEDENT> data_schema = { vol.Required(CONF_HOST): str, vol.Optional(CONF_NAME, default=DEFAULT_NAME): str, vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool, } <NEW_LINE> if self.show_advanced_options: <NEW_LINE> <INDENT> data_schema[ vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL) ] = bool <NEW_LINE> <DEDENT> return self.async_show_form( step_id="user", data_schema=vol.Schema(data_schema), errors=errors or {}, ) | Handle a config flow for NZBGet. | 6259904550485f2cf55dc2ca |
class BaseFlowModel(models.Model): <NEW_LINE> <INDENT> title = models.CharField(verbose_name=_('Title'), max_length=50) <NEW_LINE> description = models.TextField(verbose_name=_('Description')) <NEW_LINE> stage = models.CharField(verbose_name=_('Stage'), max_length=25) <NEW_LINE> remind_me = models.BooleanField(verbose_name=_('Notify'), default=False) <NEW_LINE> receivers = models.ManyToManyField( to=MailGroup, verbose_name=_('Receivers')) <NEW_LINE> result_url = models.URLField(verbose_name=_( 'Result URL'), null=True, blank=True, editable=False) <NEW_LINE> active = models.BooleanField(verbose_name=_('Status'), default=True) <NEW_LINE> create_time = models.DateTimeField( verbose_name=_('Create Time'), auto_now_add=True) <NEW_LINE> update_time = models.DateTimeField( verbose_name=_('Create Time'), auto_now=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> ordering = ('-active', '-stage', 'update_time') | 流程 | 62599045435de62698e9d147 |
class RoutePather(IPather): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> for _s in [IPather]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{})) <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, RoutePather, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> for _s in [IPather]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{})) <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, RoutePather, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> this = _fife.new_RoutePather() <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _fife.delete_RoutePather <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def getName(self): <NEW_LINE> <INDENT> return _fife.RoutePather_getName(self) | Proxy of C++ FIFE::RoutePather class | 62599045b57a9660fecd2dc0 |
class SquareCompactness: <NEW_LINE> <INDENT> def __init__(self, gdf, areas=None, perimeters=None): <NEW_LINE> <INDENT> self.gdf = gdf <NEW_LINE> gdf = gdf.copy() <NEW_LINE> if perimeters is None: <NEW_LINE> <INDENT> gdf["mm_p"] = gdf.geometry.length <NEW_LINE> perimeters = "mm_p" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(perimeters, str): <NEW_LINE> <INDENT> gdf["mm_p"] = perimeters <NEW_LINE> perimeters = "mm_p" <NEW_LINE> <DEDENT> <DEDENT> self.perimeters = gdf[perimeters] <NEW_LINE> if areas is None: <NEW_LINE> <INDENT> areas = gdf.geometry.area <NEW_LINE> <DEDENT> if not isinstance(areas, str): <NEW_LINE> <INDENT> gdf["mm_a"] = areas <NEW_LINE> areas = "mm_a" <NEW_LINE> <DEDENT> self.areas = gdf[areas] <NEW_LINE> self.series = ((np.sqrt(gdf[areas]) * 4) / gdf[perimeters]) ** 2 | Calculates compactness index of each object in given GeoDataFrame.
.. math::
\begin{equation*}
\left(\frac{4 \sqrt{area}}{perimeter}\right) ^ 2
\end{equation*}
Adapted from :cite:`feliciotti2018`.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects
areas : str, list, np.array, pd.Series (default None)
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is
stored area value. If set to ``None``, function will calculate areas
during the process without saving them separately.
perimeters : str, list, np.array, pd.Series (default None)
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is
stored perimeter value. If set to ``None``, function will calculate perimeters
during the process without saving them separately.
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
areas : Series
Series containing used area values
perimeters : Series
Series containing used perimeter values
Examples
--------
>>> buildings_df['squ_comp'] = momepy.SquareCompactness(buildings_df).series
>>> buildings_df['squ_comp'][0]
0.6193872538650996 | 62599045b830903b9686ee1c |
class Inform(UserOwnedModel, EntityModel): <NEW_LINE> <INDENT> STATUS_PENDING = 'PENDING' <NEW_LINE> STATUS_SUCCESS = 'SUCCESS' <NEW_LINE> STATUS_FAIL = 'FAIL' <NEW_LINE> STATUS_CHOICES = ( (STATUS_PENDING, '等待处理'), (STATUS_SUCCESS, '举报成功'), (STATUS_FAIL, '举报失败'), ) <NEW_LINE> status = models.CharField( verbose_name='状态', max_length=20, choices=STATUS_CHOICES, default=STATUS_PENDING, ) <NEW_LINE> excerpt = models.CharField( verbose_name='摘要', max_length=150, blank=True, default='', ) <NEW_LINE> images = models.ManyToManyField( verbose_name='图片', to=ImageModel, related_name='informs', blank=True, ) <NEW_LINE> inform_type = models.CharField( verbose_name='举报类型', max_length=50, blank=True, default='', ) <NEW_LINE> reason = models.TextField( verbose_name='举报内容', blank=True, default='', ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = '举报' <NEW_LINE> verbose_name_plural = '举报' <NEW_LINE> db_table = 'core_inform' <NEW_LINE> <DEDENT> def get_accused_object(self): <NEW_LINE> <INDENT> if self.lives.first(): <NEW_LINE> <INDENT> return self.lives.first() <NEW_LINE> <DEDENT> elif self.activeevents.first(): <NEW_LINE> <INDENT> return self.activeevents.first() <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def accused_person(self): <NEW_LINE> <INDENT> accused_object = self.get_accused_object() <NEW_LINE> return dict( accused_id=accused_object.author.id if accused_object else None, accused_mobile=accused_object.author.member.mobile if accused_object else None, ) <NEW_LINE> <DEDENT> def accused_object_info(self): <NEW_LINE> <INDENT> accused_object = self.get_accused_object() <NEW_LINE> if not accused_object: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return dict( object_id=accused_object.id, object_type=type(accused_object)._meta.model_name, object_name=accused_object.name if hasattr(accused_object, 'name') else accused_object.author.member.nickname, ) | 举报消息
| 6259904582261d6c52730867 |
class FIOMinutely(object): <NEW_LINE> <INDENT> minutely = None <NEW_LINE> def __init__(self, forecast_io): <NEW_LINE> <INDENT> if forecast_io.has_minutely(): <NEW_LINE> <INDENT> self.minutely = forecast_io.get_minutely() <NEW_LINE> for item in list(forecast_io.get_minutely().keys()): <NEW_LINE> <INDENT> setattr(self, item, forecast_io.get_minutely()[item]) <NEW_LINE> <DEDENT> for minute in range(0, self.minutes()): <NEW_LINE> <INDENT> for item in list(self.get_minute(minute).keys()): <NEW_LINE> <INDENT> setattr(self, 'minute_'+str(minute+1)+'_'+item, self.get_minute(minute)[item]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get(self, minute=None): <NEW_LINE> <INDENT> if minute is None: <NEW_LINE> <INDENT> return self.minutely <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.get_minute(minute) <NEW_LINE> <DEDENT> <DEDENT> def get_minute(self, minute): <NEW_LINE> <INDENT> if minute > self.minutes(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.get()['data'][minute-1] <NEW_LINE> <DEDENT> <DEDENT> def minutes(self): <NEW_LINE> <INDENT> return len(self.get()['data']) | This class recieves an ForecastIO object and holds the minutely weather
conditions. It has one class for this purpose. | 625990456fece00bbaccccf9 |
class PkgIndex(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.index_db = TinyDB(path, create_dirs=True) <NEW_LINE> <DEDENT> def add(self, pkg): <NEW_LINE> <INDENT> query = Query() <NEW_LINE> if not self.index_db.search(query.name == pkg["pkg_name"]): <NEW_LINE> <INDENT> self.index_db.insert({"name": pkg["pkg_name"], "ver": pkg["pkg_ver"], "desc": pkg["pkg_desc"] }) <NEW_LINE> <DEDENT> elif self.index_db.search((query.name == pkg["pkg_name"]) & (query.ver != pkg["pkg_ver"])): <NEW_LINE> <INDENT> self.index_db.update(self.__update_record(pkg), query.name == pkg["pkg_name"]) <NEW_LINE> <DEDENT> <DEDENT> def get_installed(self): <NEW_LINE> <INDENT> pkgs = [] <NEW_LINE> for pkg in self.index_db.all(): <NEW_LINE> <INDENT> pkgs.append([pkg["name"], pkg["ver"], pkg["desc"]]) <NEW_LINE> <DEDENT> return pkgs <NEW_LINE> <DEDENT> def get_pkg(self, pkg_name): <NEW_LINE> <INDENT> return self.index_db.search(where('name') == std_helper.stringify(pkg_name)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __update_record(pkg): <NEW_LINE> <INDENT> def transform(element): <NEW_LINE> <INDENT> element["ver"] = pkg["pkg_ver"] <NEW_LINE> element["desc"] = pkg["pkg_desc"] <NEW_LINE> <DEDENT> return transform | Helper for managing the packages index.json file | 62599045b5575c28eb71366b |
class CGInfo(TypedDict): <NEW_LINE> <INDENT> id: int <NEW_LINE> users: List[str] | Data representing a koji Content Generator. A dict of these are
typically obtained via the ``listCGs`` XMLRPC call, mapping their
friendly names to the CGInfo structure | 6259904573bcbd0ca4bcb5d1 |
class UserDetailView(RetrieveAPIView): <NEW_LINE> <INDENT> serializer_class = serializers.UserDetailSerializer <NEW_LINE> permission_classes = [IsAuthenticated] <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return self.request.user | 用户的基本信息详情
RetrieveAPIView提供get方法:
将数据库的信息序列化后包裹在Response的data中返回 | 6259904507d97122c4217fe6 |
class DataChangeFilter(FrozenClass): <NEW_LINE> <INDENT> ua_types = [ ('Trigger', 'DataChangeTrigger'), ('DeadbandType', 'UInt32'), ('DeadbandValue', 'Double'), ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.Trigger = DataChangeTrigger(0) <NEW_LINE> self.DeadbandType = 0 <NEW_LINE> self.DeadbandValue = 0 <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'DataChangeFilter(' + 'Trigger:' + str(self.Trigger) + ', ' + 'DeadbandType:' + str(self.DeadbandType) + ', ' + 'DeadbandValue:' + str(self.DeadbandValue) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__ | :ivar Trigger:
:vartype Trigger: DataChangeTrigger
:ivar DeadbandType:
:vartype DeadbandType: UInt32
:ivar DeadbandValue:
:vartype DeadbandValue: Double | 625990458da39b475be04534 |
class Document( object ): <NEW_LINE> <INDENT> def __init__( self, file, outpath ): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.filename = os.path.splitext( os.path.basename( self.file ) )[0] <NEW_LINE> self.outpath = outpath <NEW_LINE> self.extract() <NEW_LINE> <DEDENT> def ext_text( self, doc ): <NEW_LINE> <INDENT> pages = [] <NEW_LINE> for idx, page in enumerate(doc, start=1): <NEW_LINE> <INDENT> text = page.getText() <NEW_LINE> if re.search( '(figure|fig\.|abbildung|abb\.)', text, re.IGNORECASE ): <NEW_LINE> <INDENT> pages.append( idx ) <NEW_LINE> <DEDENT> <DEDENT> return pages <NEW_LINE> <DEDENT> def sav_text( self, pages ): <NEW_LINE> <INDENT> with open( self.outpath + self.filename + '.txt', 'w' ) as text_out: <NEW_LINE> <INDENT> for p in pages: <NEW_LINE> <INDENT> text_out.write( str( p ) ) <NEW_LINE> text_out.write( '\n' ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def ext_img( self, doc ): <NEW_LINE> <INDENT> for f in range( len( doc ) ): <NEW_LINE> <INDENT> tot_ims = len( doc.getPageImageList( f ) ) <NEW_LINE> if tot_ims > 5: <NEW_LINE> <INDENT> for img in doc.getPageImageList( f ): <NEW_LINE> <INDENT> xref = img[0] <NEW_LINE> pix = fitz.Pixmap( doc, xref ) <NEW_LINE> if pix.n < 2: <NEW_LINE> <INDENT> pix.writePNG( self.outpath + self.filename + "_p_%s.png" % (f + 1) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pix1 = fitz.Pixmap( fitz.csRGB, pix ) <NEW_LINE> pix1.writePNG( self.outpath + self.filename + "_p_%s.png" % (f + 1) ) <NEW_LINE> pix1 = None <NEW_LINE> <DEDENT> pix = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> count_i = 1 <NEW_LINE> for img in doc.getPageImageList( f ): <NEW_LINE> <INDENT> xref = img[0] <NEW_LINE> pix = fitz.Pixmap( doc, xref ) <NEW_LINE> if pix.n < 2: <NEW_LINE> <INDENT> pix.writePNG( self.outpath + self.filename + "_p_%s_%s.png" % (f + 1, count_i) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pix1 = fitz.Pixmap( fitz.csRGB, pix ) <NEW_LINE> pix1.writePNG( self.outpath + self.filename + "_p_%s_%s.png" % (f + 1, count_i) ) <NEW_LINE> pix1 = None <NEW_LINE> <DEDENT> pix = None <NEW_LINE> count_i += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def extract( self ): <NEW_LINE> <INDENT> with fitz.open( self.file ) as doc: <NEW_LINE> <INDENT> pages = self.ext_text( doc ) <NEW_LINE> if pages: <NEW_LINE> <INDENT> self.sav_text( pages ) <NEW_LINE> <DEDENT> try: self.ext_img( doc ) <NEW_LINE> except: pass | document object | 6259904521bff66bcd723fb1 |
class Point: <NEW_LINE> <INDENT> def index(self): <NEW_LINE> <INDENT> return "point" | A Point object describes a position in three different ways. These are accessed through the following attributes:
image
The pixel coordinates of the point in the image, with the origin (0,0) in the top-left of the image. This has two attributes: x and y.
world
The Cartesian coordinates of the point in 3D space. This has three attributes: x, y, and z, each of which specifies a distance in metres. Positions in front of, to the right, or above the camera are positive. Positions to the left or below are negative.
polar
The polar coordinates of the point in 3D space.
This has three attributes:
length
The distance to the point.
rot_x
Rotation about the x-axis in degrees. Positions above the camera are positive.
rot_y
Rotation about the y-axis in degrees. Positions to the right of the camera are positive. | 6259904563b5f9789fe864b1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.