code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class DlnaOrgPs(Enum): <NEW_LINE> <INDENT> INVALID = 0 <NEW_LINE> NORMAL = 1
DLNA.ORG_PS (PlaySpeed ) flags.
6259904915baa7234946333f
class LayerModel(QgsMapLayerProxyModel): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.setFilters(QgsMapLayerProxyModel.VectorLayer) <NEW_LINE> <DEDENT> def name_list(self): <NEW_LINE> <INDENT> names = [] <NEW_LINE> for i in range(self.rowCount()): <NEW_LINE> <INDENT> names.append(super().data(self.index(i, 0))) <NEW_LINE> <DEDENT> return names <NEW_LINE> <DEDENT> def non_unique_names(self): <NEW_LINE> <INDENT> return [k for k, v in Counter(self.name_list()).items() if v > 1] <NEW_LINE> <DEDENT> def include_all(self): <NEW_LINE> <INDENT> for layer in QgsProject.instance().mapLayers().values(): <NEW_LINE> <INDENT> layer.removeCustomProperty("plugins/multilayerselect/excluded") <NEW_LINE> <DEDENT> self.dataChanged.emit( self.index(0, 0), self.index(self.rowCount(), 0), [Qt.CheckStateRole] ) <NEW_LINE> <DEDENT> def exlude_all(self): <NEW_LINE> <INDENT> for layer in QgsProject.instance().mapLayers().values(): <NEW_LINE> <INDENT> layer.setCustomProperty("plugins/multilayerselect/excluded", True) <NEW_LINE> <DEDENT> self.dataChanged.emit( self.index(0, 0), self.index(self.rowCount(), 0), [Qt.CheckStateRole] ) <NEW_LINE> <DEDENT> def flags(self, index): <NEW_LINE> <INDENT> return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable <NEW_LINE> <DEDENT> def setData(self, index, value, role=Qt.DisplayRole): <NEW_LINE> <INDENT> layer = super().data(index, QgsMapLayerModel.LayerRole) <NEW_LINE> if not layer: <NEW_LINE> <INDENT> return super().setData(index, value, role) <NEW_LINE> <DEDENT> if role == Qt.CheckStateRole: <NEW_LINE> <INDENT> layer.setCustomProperty( "plugins/multilayerselect/excluded", value == Qt.Unchecked ) <NEW_LINE> return True <NEW_LINE> <DEDENT> return super().setData(index, value, role) <NEW_LINE> <DEDENT> def data(self, index, role=Qt.DisplayRole): <NEW_LINE> <INDENT> layer = super().data(index, QgsMapLayerModel.LayerRole) <NEW_LINE> if not layer: <NEW_LINE> <INDENT> return super().data(index, role) <NEW_LINE> <DEDENT> if role == Qt.DisplayRole: <NEW_LINE> <INDENT> original_name = super().data(index, role) <NEW_LINE> if original_name in self.non_unique_names(): <NEW_LINE> <INDENT> return f"{original_name} ({super().data(index, QgsMapLayerModel.LayerIdRole)})" <NEW_LINE> <DEDENT> <DEDENT> if role == Qt.DecorationRole: <NEW_LINE> <INDENT> return icon_from_layer(layer) <NEW_LINE> <DEDENT> if role == Qt.CheckStateRole: <NEW_LINE> <INDENT> excluded = layer.customProperty( "plugins/multilayerselect/excluded", False ) in (True, "true", "True", "1") <NEW_LINE> return 0 if excluded else 2 <NEW_LINE> <DEDENT> return super().data(index, role)
Checkable Layer Model to include / exclude vector layers from the multilayer selection tools
6259904945492302aabfd881
class CollectionManagerForModelWithDynamicField(CollectionManagerForModelWithDynamicFieldMixin, ExtendedCollectionManager): <NEW_LINE> <INDENT> pass
A colleciton manager based on ExtendedCollectionManager that add the "dynamic_filter" method.
6259904950485f2cf55dc337
class SysTray(wx.TaskBarIcon): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> wx.TaskBarIcon.__init__(self) <NEW_LINE> self.parentApp = parent <NEW_LINE> self.menu = None <NEW_LINE> self.CreateMenu() <NEW_LINE> <DEDENT> def CreateMenu(self): <NEW_LINE> <INDENT> self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.ShowMenu) <NEW_LINE> self.menu = wx.Menu() <NEW_LINE> self.menu.Append(wx.ID_VIEW_LIST, 'Select a Project') <NEW_LINE> self.menu.Append(wx.ID_NEW, 'Session') <NEW_LINE> self.menu.Append(wx.ID_INDEX, 'Open Project Directory') <NEW_LINE> self.menu.Append(wx.ID_SETUP, 'Preferences') <NEW_LINE> self.menu.Append(wx.ID_ABOUT, 'About') <NEW_LINE> self.menu.AppendSeparator() <NEW_LINE> self.menu.Append(wx.ID_EXIT, 'Exit') <NEW_LINE> <DEDENT> def ShowMenu(self, event): <NEW_LINE> <INDENT> event = event <NEW_LINE> self.PopupMenu(self.menu) <NEW_LINE> <DEDENT> def ShowNotification(self, title, message): <NEW_LINE> <INDENT> args = [title, message] <NEW_LINE> notify_thread = threading.Thread(target=self._show_notification, name='Notification', args=args) <NEW_LINE> notify_thread.run() <NEW_LINE> <DEDENT> def _show_notification(self, title, message): <NEW_LINE> <INDENT> self.ShowBalloon(title, message, 10000, wx.ICON_NONE)
Taskbar Icon class. :param parent: Parent frame :type parent: :class:`wx.Frame`
62599049baa26c4b54d50658
class StorageClient(object): <NEW_LINE> <INDENT> def __init__( self, url, swissnum, treq=treq ): <NEW_LINE> <INDENT> self._base_url = url <NEW_LINE> self._swissnum = swissnum <NEW_LINE> self._treq = treq <NEW_LINE> <DEDENT> def relative_url(self, path): <NEW_LINE> <INDENT> return self._base_url.click(path) <NEW_LINE> <DEDENT> def _get_headers(self, headers): <NEW_LINE> <INDENT> if headers is None: <NEW_LINE> <INDENT> headers = Headers() <NEW_LINE> <DEDENT> headers.addRawHeader( "Authorization", swissnum_auth_header(self._swissnum), ) <NEW_LINE> return headers <NEW_LINE> <DEDENT> def request( self, method, url, lease_renew_secret=None, lease_cancel_secret=None, upload_secret=None, headers=None, **kwargs ): <NEW_LINE> <INDENT> headers = self._get_headers(headers) <NEW_LINE> for secret, value in [ (Secrets.LEASE_RENEW, lease_renew_secret), (Secrets.LEASE_CANCEL, lease_cancel_secret), (Secrets.UPLOAD, upload_secret), ]: <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> headers.addRawHeader( "X-Tahoe-Authorization", b"%s %s" % (secret.value.encode("ascii"), b64encode(value).strip()), ) <NEW_LINE> <DEDENT> return self._treq.request(method, url, headers=headers, **kwargs)
Low-level HTTP client that talks to the HTTP storage server.
6259904976d4e153a661dc4d
class DocManager(): <NEW_LINE> <INDENT> def __init__(self, url=None, unique_key='_id'): <NEW_LINE> <INDENT> self.unique_key = unique_key <NEW_LINE> self.doc_dict = {} <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def upsert(self, doc): <NEW_LINE> <INDENT> self.doc_dict[doc[self.unique_key]] = doc <NEW_LINE> <DEDENT> def remove(self, doc): <NEW_LINE> <INDENT> del self.doc_dict[doc[self.unique_key]] <NEW_LINE> <DEDENT> def search(self, start_ts, end_ts): <NEW_LINE> <INDENT> ret_list = [] <NEW_LINE> for stored_doc in self.doc_dict.values(): <NEW_LINE> <INDENT> ts = stored_doc['_ts'] <NEW_LINE> if ts <= end_ts or ts >= start_ts: <NEW_LINE> <INDENT> ret_list.append(stored_doc) <NEW_LINE> <DEDENT> <DEDENT> return ret_list <NEW_LINE> <DEDENT> def commit(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_last_doc(self): <NEW_LINE> <INDENT> last_doc = None <NEW_LINE> last_ts = None <NEW_LINE> for stored_doc in self.doc_dict.values(): <NEW_LINE> <INDENT> ts = stored_doc['_ts'] <NEW_LINE> if last_ts is None or ts >= last_ts: <NEW_LINE> <INDENT> last_doc = stored_doc <NEW_LINE> last_ts = ts <NEW_LINE> <DEDENT> <DEDENT> return last_doc <NEW_LINE> <DEDENT> def _search(self): <NEW_LINE> <INDENT> ret_list = [] <NEW_LINE> for doc in self.doc_dict.values(): <NEW_LINE> <INDENT> ret_list.append(doc) <NEW_LINE> <DEDENT> return ret_list <NEW_LINE> <DEDENT> def _delete(self): <NEW_LINE> <INDENT> self.doc_dict = {}
BackendSimulator emulates both a target DocManager and a server. The DocManager class creates a connection to the backend engine and adds/removes documents, and in the case of rollback, searches for them. The reason for storing id/doc pairs as opposed to doc's is so that multiple updates to the same doc reflect the most up to date version as opposed to multiple, slightly different versions of a doc.
625990498a349b6b436875fb
class LexCompileError(LexError): <NEW_LINE> <INDENT> pass
Errors raised during compilation of lexical analysis rules.
62599049d7e4931a7ef3d425
class DirectoryListing(datatype('DirectoryListing', ['directory', 'dependencies', 'exists'])): <NEW_LINE> <INDENT> pass
A list of Stat objects representing a directory listing. If exists=False, then the entries list will be empty.
62599049d10714528d69f065
class TensorBoardDebugWrapperSession(GrpcDebugWrapperSession): <NEW_LINE> <INDENT> def __init__(self, sess, grpc_debug_server_addresses, thread_name_filter=None, log_usage=True): <NEW_LINE> <INDENT> def _gated_grpc_watch_fn(fetches, feeds): <NEW_LINE> <INDENT> del fetches, feeds <NEW_LINE> return framework.WatchOptions( debug_ops=["DebugIdentity(gated_grpc=true)"]) <NEW_LINE> <DEDENT> super(TensorBoardDebugWrapperSession, self).__init__( sess, grpc_debug_server_addresses, watch_fn=_gated_grpc_watch_fn, thread_name_filter=thread_name_filter, log_usage=log_usage) <NEW_LINE> self._sent_graph_version = -1 <NEW_LINE> <DEDENT> def run(self, fetches, feed_dict=None, options=None, run_metadata=None, callable_runner=None, callable_runner_args=None): <NEW_LINE> <INDENT> self._sent_graph_version = publish_traceback( self._grpc_debug_server_urls, self.graph, feed_dict, fetches, self._sent_graph_version) <NEW_LINE> return super(TensorBoardDebugWrapperSession, self).run( fetches, feed_dict=feed_dict, options=options, run_metadata=run_metadata, callable_runner=callable_runner, callable_runner_args=callable_runner_args)
A tfdbg Session wrapper that can be used with TensorBoard Debugger Plugin. This wrapper is the same as `GrpcDebugWrapperSession`, except that it uses a predefined `watch_fn` that 1) uses `DebugIdentity` debug ops with the `gated_grpc` attribute set to `True` to allow the interactive enabling and disabling of tensor breakpoints. 2) watches all tensors in the graph. This saves the need for the user to define a `watch_fn`.
6259904915baa72349463340
class HubKerasLayerV1V2(hub.KerasLayer): <NEW_LINE> <INDENT> def _setup_layer(self, trainable=False, **kwargs): <NEW_LINE> <INDENT> if self._is_hub_module_v1: <NEW_LINE> <INDENT> self._setup_layer_v1(trainable, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(HubKerasLayerV1V2, self)._setup_layer(trainable, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def _check_trainability(self): <NEW_LINE> <INDENT> if self._is_hub_module_v1: <NEW_LINE> <INDENT> self._check_trainability_v1() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(HubKerasLayerV1V2, self)._check_trainability() <NEW_LINE> <DEDENT> <DEDENT> def _setup_layer_v1(self, trainable=False, **kwargs): <NEW_LINE> <INDENT> super(hub.KerasLayer, self).__init__(trainable=trainable, **kwargs) <NEW_LINE> if not self._is_hub_module_v1: <NEW_LINE> <INDENT> raise ValueError( 'Only supports to set up v1 hub module in this function.') <NEW_LINE> <DEDENT> if hasattr(self._func, 'trainable_variables'): <NEW_LINE> <INDENT> for v in self._func.trainable_variables: <NEW_LINE> <INDENT> self._add_existing_weight(v, trainable=True) <NEW_LINE> <DEDENT> trainable_variables = {id(v) for v in self._func.trainable_variables} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> trainable_variables = set() <NEW_LINE> <DEDENT> if not hasattr(self._func, '_self_unconditional_checkpoint_dependencies'): <NEW_LINE> <INDENT> raise ValueError('_func doesn\'t contains attribute ' '_self_unconditional_checkpoint_dependencies.') <NEW_LINE> <DEDENT> dependencies = self._func._self_unconditional_checkpoint_dependencies <NEW_LINE> for dep in dependencies: <NEW_LINE> <INDENT> if dep.name == 'variables': <NEW_LINE> <INDENT> for v in dep.ref: <NEW_LINE> <INDENT> if id(v) not in trainable_variables: <NEW_LINE> <INDENT> self._add_existing_weight(v, trainable=True) <NEW_LINE> trainable_variables.add(id(v)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if hasattr(self._func, 'variables'): <NEW_LINE> <INDENT> for v in self._func.variables: <NEW_LINE> <INDENT> if id(v) not in trainable_variables: <NEW_LINE> <INDENT> self._add_existing_weight(v, trainable=False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if hasattr(self._func, 'regularization_losses'): <NEW_LINE> <INDENT> for l in self._func.regularization_losses: <NEW_LINE> <INDENT> if not callable(l): <NEW_LINE> <INDENT> raise ValueError( 'hub.KerasLayer(obj) expects obj.regularization_losses to be an ' 'iterable of callables, each returning a scalar loss term.') <NEW_LINE> <DEDENT> self.add_loss(self._call_loss_if_trainable(l)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _check_trainability_v1(self): <NEW_LINE> <INDENT> if self._is_hub_module_v1: <NEW_LINE> <INDENT> return
Class to loads TF v1 and TF v1 hub modules that could be fine-tuned. Since TF v1 modules couldn't be retrained in hub.KerasLayer. This class provides a workaround for retraining the whole tf1 model in tf2. In particular, it extract self._func._self_unconditional_checkpoint_dependencies into trainable variable in tf1. Doesn't update moving-mean/moving-variance for BatchNormalization during fine-tuning.
62599049596a897236128f86
class BaseSubmissionDeleteView(DbfvFormMixin, generic.DeleteView): <NEW_LINE> <INDENT> permission_required = 'submission.delete_submissiongym' <NEW_LINE> template_name = 'delete.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(BaseSubmissionDeleteView, self).get_context_data(**kwargs) <NEW_LINE> context['title'] = u'Antrag {0} löschen?'.format(self.object.id) <NEW_LINE> return context
Deletes a submission
6259904923849d37ff85246c
class PandasTable(Table): <NEW_LINE> <INDENT> TRANSFORMS_MAPPING = { 'query': query_transform, 'sample': sample_transform, 'sort': sort_transform, 'quantile_range': quantile_range_transform, 'search': search_transform, 'nans': nans_transform, 'histogram': histogram_transform } <NEW_LINE> def __init__(self, dataframe, side_result=None): <NEW_LINE> <INDENT> super(PandasTable, self).__init__(PandasTable.TRANSFORMS_MAPPING, side_result) <NEW_LINE> self.dataframe = dataframe <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.dataframe) <NEW_LINE> <DEDENT> def apply_bounds(self, offset, length): <NEW_LINE> <INDENT> return PandasTable(self.dataframe.iloc[offset:(offset + length)]) <NEW_LINE> <DEDENT> def to_pandas(self): <NEW_LINE> <INDENT> return self.dataframe
Pynorama table backed by a fully-materialised Pandas dataframe
625990498e05c05ec3f6f832
class GetImages(APIView): <NEW_LINE> <INDENT> permission_classes = (AllowAny, ) <NEW_LINE> def get(self, request, pk): <NEW_LINE> <INDENT> images = Image.objects.filter(title__pk=pk) <NEW_LINE> serializer = ImageSerializer(images, context={"request": request}, many=True) <NEW_LINE> return Response(serializer.data)
Returns all images for a specific title
6259904907d97122c4218052
class Attenuation(Enum): <NEW_LINE> <INDENT> CONSTANT = 'constant' <NEW_LINE> LINEAR = 'linear' <NEW_LINE> QUADRATIC = 'quadratic'
Light attenuation attributes
6259904907f4c71912bb07e2
class SurveyController(Survey): <NEW_LINE> <INDENT> @http.route(['/survey/start/<model("survey.survey"):survey>', '/survey/start/<model("survey.survey"):survey>/<string:token>'], type='http', auth='public', website=True, sitemap=False) <NEW_LINE> def start_survey(self, survey, token=None, **post): <NEW_LINE> <INDENT> return super().start_survey(survey, token, **post) <NEW_LINE> <DEDENT> @http.route(['/survey/fill/<model("survey.survey"):survey>/<string:token>', '/survey/fill/<model("survey.survey"):survey>/<string:token>/' '<string:prev>'], type='http', auth='public', website=True, sitemap=False) <NEW_LINE> def fill_survey(self, survey, token, prev=None, **post): <NEW_LINE> <INDENT> return super().fill_survey(survey, token, prev, **post) <NEW_LINE> <DEDENT> @http.route(['/survey/prefill/<model("survey.survey"):survey>/<string:token>', '/survey/prefill/<model("survey.survey"):survey>/<string:token>/' '<model("survey.page"):page>'], type='http', auth='public', website=True, sitemap=False) <NEW_LINE> def prefill(self, survey, token, page=None, **post): <NEW_LINE> <INDENT> return super().prefill(survey, token, page, **post) <NEW_LINE> <DEDENT> @http.route(['/survey/scores/<model("survey.survey"):survey>/<string:token>'], type='http', auth='public', website=True, sitemap=False) <NEW_LINE> def get_scores(self, survey, token, page=None, **post): <NEW_LINE> <INDENT> return super().get_scores(survey, token, page, **post) <NEW_LINE> <DEDENT> @http.route(['/survey/results/<model("survey.survey"):survey>'], type='http', auth='user', website=True, sitemap=False) <NEW_LINE> def survey_reporting(self, survey, token=None, **post): <NEW_LINE> <INDENT> return super().survey_reporting(survey, token, **post)
Disable survey from sitemaps
62599049287bf620b6272f98
class Scanner: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.coord_mapping = None <NEW_LINE> <DEDENT> def scan_for_changes(self): <NEW_LINE> <INDENT> pass
Base class for scanners.
625990498e71fb1e983bce74
class UpperBoundCondition(BoundCondition): <NEW_LINE> <INDENT> upper = True
A condition that checks a numeric attribute against an upper bound, and raises the alert if it exceeds that bound :: UpperBoundCondition('temperature', error_bound = 85, message = "Maximum operating temperature exceeded")
625990493cc13d1c6d466ae7
class EnclosedDocDescriptor: <NEW_LINE> <INDENT> _DOC_CLS = 'document_class' <NEW_LINE> _RECURSIVE_REF_CONST = 'self' <NEW_LINE> def __init__(self, enclosed_cls_type): <NEW_LINE> <INDENT> if enclosed_cls_type in ('embedded', 'reference'): <NEW_LINE> <INDENT> self.attr_name = '_{}_{}'.format(enclosed_cls_type, self._DOC_CLS) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if not instance: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> value = getattr(instance, self.attr_name, None) <NEW_LINE> if isinstance(value, str): <NEW_LINE> <INDENT> if self._RECURSIVE_REF_CONST == value: <NEW_LINE> <INDENT> value = getattr(instance, self._DOC_CLS) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = resolve(value) <NEW_LINE> <DEDENT> self.__set__(instance, value) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> setattr(instance, self.attr_name, value) <NEW_LINE> <DEDENT> def __delete__(self, instance): <NEW_LINE> <INDENT> delattr(instance, self.attr_name)
Descriptor for accessing an enclosed documens within an embedded (:py:class:`yadm.fields.embedded.EmbeddedDocumentField`) and a reference (:py:class:`yadm.fields.reference.ReferenceField`) fields. :param str enclosed_cls_type: Enclosed class type. Can take `embedded` or `reference` value. Otherwise :py:exc:`ValueError` will be raised.
625990498a43f66fc4bf3545
class Beeper: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.pin = machine.Pin(2) <NEW_LINE> self.pwm = machine.PWM(self.pin) <NEW_LINE> self.freq = {DIT: 500, DAH: 500} <NEW_LINE> self.duty = {DIT: 512, DAH: 512} <NEW_LINE> <DEDENT> def beep(self, signal): <NEW_LINE> <INDENT> if signal == WORD_PAUSE: <NEW_LINE> <INDENT> time.sleep(DIT * 7) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pwm.init(freq=500, duty=512) <NEW_LINE> time.sleep(signal) <NEW_LINE> self.pwm.deinit() <NEW_LINE> <DEDENT> <DEDENT> def on(self, tone_index=DIT): <NEW_LINE> <INDENT> self.pwm.init(freq=self.freq[tone_index], duty=self.duty[tone_index]) <NEW_LINE> <DEDENT> def off(self): <NEW_LINE> <INDENT> self.pwm.deinit()
Piezo PWM Beeper
6259904926068e7796d4dcf4
class SleuthError(Exception): <NEW_LINE> <INDENT> def __init__(self, error_type, message=None): <NEW_LINE> <INDENT> self.error_type = error_type <NEW_LINE> if message is not None: <NEW_LINE> <INDENT> self.message = message <NEW_LINE> return <NEW_LINE> <DEDENT> if error_type is ErrorTypes.UNEXPECTED_SERVER_ERROR: <NEW_LINE> <INDENT> self.message = 'An unexpected error occurred in the Sleuth ' + 'backend during the handling of your request.' <NEW_LINE> <DEDENT> elif error_type is ErrorTypes.SOLR_CONNECTION_ERROR: <NEW_LINE> <INDENT> self.message = 'Failed to connect to the Solr instance' <NEW_LINE> <DEDENT> elif error_type is ErrorTypes.SOLR_SEARCH_ERROR: <NEW_LINE> <INDENT> self.message = 'Solr returned an error response to the search query.' <NEW_LINE> <DEDENT> elif error_type is ErrorTypes.INVALID_SEARCH_REQUEST: <NEW_LINE> <INDENT> self.message = 'Must supply a search term with the parameter "q".' <NEW_LINE> <DEDENT> elif error_type is ErrorTypes.INVALID_GETDOCUMENT_REQUEST: <NEW_LINE> <INDENT> self.message = 'Must supply an ID (url) with the parameter "id".' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid error type. Must be on of ErrorTypes.') <NEW_LINE> <DEDENT> <DEDENT> def json(self): <NEW_LINE> <INDENT> error = { 'message': self.message, 'errorType': self.error_type.name, } <NEW_LINE> return json.dumps(error)
Represents an error that occurred during the processing of a request to the Sleuth API.
62599049b5575c28eb7136a1
class GuestMonitor(Monitor, threading.Thread): <NEW_LINE> <INDENT> def __init__(self, config, id, libvirt_iface): <NEW_LINE> <INDENT> threading.Thread.__init__(self, name="guest:%s" % id) <NEW_LINE> self.config = config <NEW_LINE> self.logger = logging.getLogger('mom.GuestMonitor') <NEW_LINE> self.libvirt_iface = libvirt_iface <NEW_LINE> self.guest_domain = self.libvirt_iface.getDomainFromID(id) <NEW_LINE> info = self.get_guest_info() <NEW_LINE> if info is None: <NEW_LINE> <INDENT> self.logger.error("Failed to get guest:%s information -- monitor " "can't start", id) <NEW_LINE> return <NEW_LINE> <DEDENT> self.setName("GuestMonitor-%s" % info['name']) <NEW_LINE> Monitor.__init__(self, config, self.getName()) <NEW_LINE> self.setDaemon(True) <NEW_LINE> self.data_sem.acquire() <NEW_LINE> self.properties.update(info) <NEW_LINE> self.properties['id'] = id <NEW_LINE> self.properties['libvirt_iface'] = libvirt_iface <NEW_LINE> self.data_sem.release() <NEW_LINE> collector_list = self.config.get('guest', 'collectors') <NEW_LINE> self.collectors = Collector.get_collectors(collector_list, self.properties, self.config) <NEW_LINE> if self.collectors is None: <NEW_LINE> <INDENT> self.logger.error("Guest Monitor initialization failed") <NEW_LINE> return <NEW_LINE> <DEDENT> self.start() <NEW_LINE> <DEDENT> def get_guest_info(self): <NEW_LINE> <INDENT> if self.guest_domain is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> data = {} <NEW_LINE> data['uuid'] = self.libvirt_iface.domainGetUUID(self.guest_domain) <NEW_LINE> data['name'] = self.libvirt_iface.domainGetName(self.guest_domain) <NEW_LINE> data['pid'] = self.get_guest_pid(data['uuid']) <NEW_LINE> if None in data.values(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.logger.info("%s starting", self.getName()) <NEW_LINE> interval = self.config.getint('main', 'guest-monitor-interval') <NEW_LINE> while self._should_run(): <NEW_LINE> <INDENT> self.collect() <NEW_LINE> time.sleep(interval) <NEW_LINE> <DEDENT> self.logger.info("%s ending", self.getName()) <NEW_LINE> <DEDENT> def get_guest_pid(self, uuid): <NEW_LINE> <INDENT> p1 = Popen(["ps", "axww"], stdout=PIPE).communicate()[0] <NEW_LINE> matches = re.findall("^\s*(\d+)\s+.*" + uuid, p1, re.M) <NEW_LINE> if len(matches) < 1: <NEW_LINE> <INDENT> self.logger.warn("No matching process for domain with uuid %s", uuid) <NEW_LINE> return None <NEW_LINE> <DEDENT> elif len(matches) > 1: <NEW_LINE> <INDENT> self.logger.warn("Too many process matches for domain with uuid %s", uuid) <NEW_LINE> return None <NEW_LINE> <DEDENT> return int(matches[0]) <NEW_LINE> <DEDENT> def getGuestName(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.properties['name'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None
A GuestMonitor thread collects and reports statistics about 1 running guest
6259904971ff763f4b5e8b55
class PlaybackTarget(object): <NEW_LINE> <INDENT> def on_event(self, event): <NEW_LINE> <INDENT> raise Exception("must implement this method") <NEW_LINE> <DEDENT> def debug_message(self, message): <NEW_LINE> <INDENT> print(message) <NEW_LINE> <DEDENT> def publish_stats(self, stats): <NEW_LINE> <INDENT> print(stats)
A class that receives on_event() callbacks.
62599049498bea3a75a58ed0
class PlayerPipeline(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> engine = db_connect() <NEW_LINE> create_skater_table(engine) <NEW_LINE> self.Session = sessionmaker(bind=engine) <NEW_LINE> <DEDENT> def process_item(self, item, spider): <NEW_LINE> <INDENT> session = self.Session() <NEW_LINE> whois = spider.name <NEW_LINE> if whois == 'skatsum': <NEW_LINE> <INDENT> player = SkaterSum(**item) <NEW_LINE> <DEDENT> elif whois == 'skatbio': <NEW_LINE> <INDENT> player = SkaterBio(**item) <NEW_LINE> <DEDENT> elif whois == 'skateng': <NEW_LINE> <INDENT> player = SkaterEng(**item) <NEW_LINE> <DEDENT> elif whois == 'skatpim': <NEW_LINE> <INDENT> player = SkaterPIM(**item) <NEW_LINE> <DEDENT> elif whois == 'skatpm': <NEW_LINE> <INDENT> player = SkaterPM(**item) <NEW_LINE> <DEDENT> elif whois == 'skatrts': <NEW_LINE> <INDENT> player = SkaterRTS(**item) <NEW_LINE> <DEDENT> elif whois == 'skatso': <NEW_LINE> <INDENT> player = SkaterSO(**item) <NEW_LINE> <DEDENT> elif whois == 'skatot': <NEW_LINE> <INDENT> player = SkaterOT(**item) <NEW_LINE> <DEDENT> elif whois == 'skattoi': <NEW_LINE> <INDENT> player = SkaterTOI(**item) <NEW_LINE> <DEDENT> elif whois == 'goalsum': <NEW_LINE> <INDENT> player = GoalieSum(**item) <NEW_LINE> <DEDENT> elif whois == 'goalbio': <NEW_LINE> <INDENT> player = GoalieBio(**item) <NEW_LINE> <DEDENT> elif whois == 'goalps': <NEW_LINE> <INDENT> player = GoaliePS(**item) <NEW_LINE> <DEDENT> elif whois == 'goalso': <NEW_LINE> <INDENT> player = GoalieSO(**item) <NEW_LINE> <DEDENT> elif whois == 'goalst': <NEW_LINE> <INDENT> player = GoalieST(**item) <NEW_LINE> <DEDENT> elif whois == 'standings': <NEW_LINE> <INDENT> player = StandingsModel(**item) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> session.add(player) <NEW_LINE> session.commit() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> session.rollback() <NEW_LINE> raise <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() <NEW_LINE> <DEDENT> return item
pipeline for storing skater summary items in the database
62599049d7e4931a7ef3d427
class Topic(models.Model): <NEW_LINE> <INDENT> text = models.CharField(max_length=200) <NEW_LINE> date_added = models.DateTimeField(auto_now_add=True) <NEW_LINE> owner = models.ForeignKey(User, on_delete=models.PROTECT) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.text
A topic the user is learning about
62599049596a897236128f87
@attr.s(cmp=False, hash=False, slots=True) <NEW_LINE> class SummaryStats(object): <NEW_LINE> <INDENT> succeeded = attr.ib() <NEW_LINE> min_response_time = attr.ib() <NEW_LINE> avg_response_time = attr.ib() <NEW_LINE> max_response_time = attr.ib() <NEW_LINE> req_s = attr.ib() <NEW_LINE> failed = attr.ib() <NEW_LINE> @property <NEW_LINE> def total(self): <NEW_LINE> <INDENT> return self.succeeded + self.failed <NEW_LINE> <DEDENT> @property <NEW_LINE> def failed_pct(self): <NEW_LINE> <INDENT> return self.failed / self.total * 100
Represents summarized performance statistics for a TaskQueue.
62599049097d151d1a2c241d
class HostTimelinesView(TimelinesView, ComputeInfrastructureHostsView): <NEW_LINE> <INDENT> breadcrumb = BreadCrumb() <NEW_LINE> @property <NEW_LINE> def is_displayed(self): <NEW_LINE> <INDENT> return ( self.in_compute_infrastructure_hosts and '{} (Summary)'.format(self.context['object'].name) in self.breadcrumb.locations and self.is_timelines)
Represents a Host Timelines page.
62599049b830903b9686ee53
class VolitileProducer(BaseProducer): <NEW_LINE> <INDENT> @timeit <NEW_LINE> def produce(self, topic:str, value:str, key:str): <NEW_LINE> <INDENT> producer = Producer(KAFKA_CONFIG) <NEW_LINE> producer.produce(topic, value, key) <NEW_LINE> producer.flush()
Producer gets created and discarded after sending a single message.
62599049a79ad1619776b431
class PrivacyIdCountCombiner(Combiner): <NEW_LINE> <INDENT> AccumulatorType = int <NEW_LINE> def __init__(self, params: CombinerParams): <NEW_LINE> <INDENT> self._params = params <NEW_LINE> <DEDENT> def create_accumulator(self, values: Sized) -> AccumulatorType: <NEW_LINE> <INDENT> return 1 if values else 0 <NEW_LINE> <DEDENT> def merge_accumulators(self, accumulator1: AccumulatorType, accumulator2: AccumulatorType): <NEW_LINE> <INDENT> return accumulator1 + accumulator2 <NEW_LINE> <DEDENT> def compute_metrics(self, accumulator: AccumulatorType) -> dict: <NEW_LINE> <INDENT> return { 'privacy_id_count': dp_computations.compute_dp_count(accumulator, self._params.mean_var_params) } <NEW_LINE> <DEDENT> def metrics_names(self) -> List[str]: <NEW_LINE> <INDENT> return ['privacy_id_count']
Combiner for computing DP privacy id count. The type of the accumulator is int, which represents count of the elements in the dataset for which this accumulator is computed.
625990496fece00bbacccd68
class DescribeBrandCommentCountRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.BrandId = None <NEW_LINE> self.StartDate = None <NEW_LINE> self.EndDate = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.BrandId = params.get("BrandId") <NEW_LINE> self.StartDate = params.get("StartDate") <NEW_LINE> self.EndDate = params.get("EndDate") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
DescribeBrandCommentCount请求参数结构体
62599049b57a9660fecd2e2d
class PrepareMapResultTestCase(unittest.TestCase, DbTestMixin): <NEW_LINE> <INDENT> def tearDown(self): <NEW_LINE> <INDENT> self.teardown_output(self.output) <NEW_LINE> <DEDENT> def test_prepare_map_result_with_hazard(self): <NEW_LINE> <INDENT> self.output = self.setup_output() <NEW_LINE> self.output.min_value, self.output.max_value = (10.0, 20.0) <NEW_LINE> name = os.path.basename(self.output.path) <NEW_LINE> map_type = ("hazard map" if self.output.output_type == "hazard_map" else "loss map") <NEW_LINE> expected = { "layer": { "layer": "geonode:hazard_map_data", "filter": "output_id=%s" % self.output.id, "ows": "http://gemsun02.ethz.ch/geoserver-geonode-dev/ows"}, "name": "%s-%s" % (self.output.oq_job.id, name), "min": view_utils.round_float(self.output.min_value), "max": view_utils.round_float(self.output.max_value), "type": map_type, "id": self.output.id} <NEW_LINE> actual = prepare_map_result(self.output) <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_prepare_map_result_with_loss(self): <NEW_LINE> <INDENT> self.output = self.setup_output(output_type="loss_map") <NEW_LINE> self.output.min_value, self.output.max_value = (30.0, 40.0) <NEW_LINE> name = os.path.basename(self.output.path) <NEW_LINE> map_type = ("loss map" if self.output.output_type == "loss_map" else "loss map") <NEW_LINE> expected = { "layer": { "layer": "geonode:loss_map_data", "filter": "output_id=%s" % self.output.id, "ows": "http://gemsun02.ethz.ch/geoserver-geonode-dev/ows"}, "name": "%s-%s" % (self.output.oq_job.id, name), "min": view_utils.round_float(self.output.min_value), "max": view_utils.round_float(self.output.max_value), "type": map_type, "id": self.output.id} <NEW_LINE> actual = prepare_map_result(self.output) <NEW_LINE> self.assertEqual(expected, actual)
Tests the behaviour of views.prepare_map_result().
6259904923849d37ff85246e
class RegistroX356(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'X356'), CampoNumerico(2, 'PERC_PART'), CampoNumerico(3, 'ATIVO_TOTAL'), CampoNumerico(4, 'PAT_LIQUIDO'), ]
Demonstrativo de Estrutura Societária
62599049b57a9660fecd2e2e
class PacketCaptureListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[PacketCaptureResult]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(PacketCaptureListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None)
List of packet capture sessions. :param value: Information about packet capture sessions. :type value: list[~azure.mgmt.network.v2020_11_01.models.PacketCaptureResult]
625990498e05c05ec3f6f833
class LanguageBase(object): <NEW_LINE> <INDENT> lpar = "(" <NEW_LINE> rpar = ")" <NEW_LINE> func_lpar = "(" <NEW_LINE> func_delim = "," <NEW_LINE> func_rpar = ")" <NEW_LINE> array_lpar = "[" <NEW_LINE> array_delim = "," <NEW_LINE> array_rpar = "]" <NEW_LINE> op_assign = "=" <NEW_LINE> eol = "\n" <NEW_LINE> replacements = {} <NEW_LINE> operators = {} <NEW_LINE> def get_parser_atoms(self): <NEW_LINE> <INDENT> point = Literal(".") <NEW_LINE> e = CaselessLiteral("E") <NEW_LINE> return { "int": Combine(Word("+-" + nums, nums)), "float": Combine( Word("+-" + nums, nums) + Optional(point + Optional(Word(nums))) + Optional(e + Word("+-" + nums, nums)) ), "variable": Word(alphas, alphas + nums + "_"), "array_lpar": Literal(self.array_lpar), "array_delim": Literal(self.array_delim), "array_rpar": Literal(self.array_rpar), "function": Word(alphas, alphas + nums + "_$"), "func_lpar": Literal(self.func_lpar), "func_delim": Literal(self.func_delim), "func_rpar": Literal(self.func_rpar), "assign": Literal(self.op_assign), "equal": Literal("=="), "plus": Literal("+"), "minus": Literal("-"), "mult": Literal("*"), "div": Literal("/"), "lpar": Literal(self.lpar), "rpar": Literal(self.rpar), "exp": Literal("^"), "consts": CaselessKeyword("PI").setParseAction(upcaseTokens) | CaselessKeyword("E").setParseAction(upcaseTokens), } <NEW_LINE> <DEDENT> def format_atom(self, s): <NEW_LINE> <INDENT> return str(s) <NEW_LINE> <DEDENT> def pre_process(self, s): <NEW_LINE> <INDENT> return s
Base class defining a generic language. Derive from this class to make include your own language
62599049cad5886f8bdc5a57
class VirtualCollection(DAVCollection): <NEW_LINE> <INDENT> def __init__(self, path, environ, displayInfo, memberNameList): <NEW_LINE> <INDENT> DAVCollection.__init__(self, path, environ) <NEW_LINE> if isinstance(displayInfo, basestring): <NEW_LINE> <INDENT> displayInfo = {"type": displayInfo} <NEW_LINE> <DEDENT> assert type(displayInfo) is dict <NEW_LINE> assert type(memberNameList) is list <NEW_LINE> self.displayInfo = displayInfo <NEW_LINE> self.memberNameList = memberNameList <NEW_LINE> <DEDENT> def getDisplayInfo(self): <NEW_LINE> <INDENT> return self.displayInfo <NEW_LINE> <DEDENT> def getMemberNames(self): <NEW_LINE> <INDENT> return self.memberNameList <NEW_LINE> <DEDENT> def preventLocking(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def getMember(self, name): <NEW_LINE> <INDENT> return self.provider.getResourceInst(util.joinUri(self.path, name), self.environ)
Abstract base class for collections that contain a list of static members. Member names are passed to the constructor. getMember() is implemented by calling self.provider.getResourceInst()
62599049d53ae8145f919811
class DefaultDiscovery(object): <NEW_LINE> <INDENT> METHODS = [StaticHostDiscovery, NUPNPDiscovery, SSDPDiscovery] <NEW_LINE> def discover(self): <NEW_LINE> <INDENT> for cls in self.METHODS: <NEW_LINE> <INDENT> method = cls() <NEW_LINE> try: <NEW_LINE> <INDENT> return method.discover() <NEW_LINE> <DEDENT> except DiscoveryFailed: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> raise DiscoveryFailed
Discovery methods that tries all other discovery methods sequentially.
6259904996565a6dacd2d962
class itkImageFileReaderVIUL2(itkImageSourcePython.itkImageSourceVIUL2): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetFileName(self, *args): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_SetFileName(self, *args) <NEW_LINE> <DEDENT> def GetFileName(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_GetFileName(self) <NEW_LINE> <DEDENT> def SetImageIO(self, *args): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_SetImageIO(self, *args) <NEW_LINE> <DEDENT> def GetImageIO(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_GetImageIO(self) <NEW_LINE> <DEDENT> def GenerateOutputInformation(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_GenerateOutputInformation(self) <NEW_LINE> <DEDENT> def SetUseStreaming(self, *args): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_SetUseStreaming(self, *args) <NEW_LINE> <DEDENT> def GetUseStreaming(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_GetUseStreaming(self) <NEW_LINE> <DEDENT> def UseStreamingOn(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_UseStreamingOn(self) <NEW_LINE> <DEDENT> def UseStreamingOff(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_UseStreamingOff(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkImageFileReaderPython.delete_itkImageFileReaderVIUL2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkImageFileReaderPython.itkImageFileReaderVIUL2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkImageFileReaderVIUL2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkImageFileReaderVIUL2 class
6259904929b78933be26aa9b
class metavfs(object): <NEW_LINE> <INDENT> metapaths = {} <NEW_LINE> @util.propertycache <NEW_LINE> def metalog(self): <NEW_LINE> <INDENT> vfs = self.vfs <NEW_LINE> metalog = bindings.metalog.metalog.openfromenv(self.vfs.join("metalog")) <NEW_LINE> tracked = set(pycompat.decodeutf8((metalog.get("tracked") or b"")).split()) <NEW_LINE> desired = set(self.metapaths) <NEW_LINE> for name in desired.difference(tracked): <NEW_LINE> <INDENT> data = vfs.tryread(name) <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> metalog[name] = data <NEW_LINE> <DEDENT> <DEDENT> metalog["tracked"] = "\n".join(sorted(desired)).encode("utf-8") <NEW_LINE> try: <NEW_LINE> <INDENT> metalog.commit("migrate from vfs", int(util.timer())) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return metalog <NEW_LINE> <DEDENT> def metaopen(self, path, mode="r"): <NEW_LINE> <INDENT> assert path in self.metapaths <NEW_LINE> if mode in {"r", "rb"}: <NEW_LINE> <INDENT> return readablestream(self.metalog[path] or b"") <NEW_LINE> <DEDENT> elif mode in {"w", "wb"}: <NEW_LINE> <INDENT> def write(content, path=path, self=self): <NEW_LINE> <INDENT> self.metalog.set(path, content) <NEW_LINE> legacypath = self.join(path) <NEW_LINE> util.replacefile(legacypath, content) <NEW_LINE> <DEDENT> return writablestream(write) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise error.ProgrammingError("mode %s is unsupported for %s" % (mode, path))
Wrapper vfs that writes data to metalog
625990498e71fb1e983bce77
class Tipo(Enum): <NEW_LINE> <INDENT> UNO = 1 <NEW_LINE> DOS = 2 <NEW_LINE> TRES = 3 <NEW_LINE> CUATRO = 4
Clase auxiliar para enumerar el tipo de elemento a utilizar (Tipo1, Tipo2, Tipo3 o Tipo4)
6259904945492302aabfd884
class UnsupportedRootElementError(Exception): <NEW_LINE> <INDENT> def __init__(self, message, expected=None, found=None): <NEW_LINE> <INDENT> super(UnsupportedRootElementError, self).__init__(message) <NEW_LINE> self.expected = expected <NEW_LINE> self.found = found
Raised when an input STIX document does not contain a supported root- level element.
6259904907f4c71912bb07e5
class Register(APIView): <NEW_LINE> <INDENT> authentication_classes = (CsrfExemptSessionAuthentication, BasicAuthentication) <NEW_LINE> @classmethod <NEW_LINE> def post(cls, request): <NEW_LINE> <INDENT> password = request.data.get('password') <NEW_LINE> email = request.data.get('email') <NEW_LINE> username = request.data.get('username') <NEW_LINE> if User.objects.filter(Q(username=username) | Q(email=email)).exists(): <NEW_LINE> <INDENT> return Response({ "status" : "nok", "response" : "This username/ email is already registered with the system. Kindly login to proceed further with your application." }) <NEW_LINE> <DEDENT> user_obj = User.objects.create(username=username) <NEW_LINE> user_obj.email = email <NEW_LINE> user_obj.set_password(password) <NEW_LINE> user_obj.save() <NEW_LINE> authenticated_user = authenticate(request, username=username, password=password) <NEW_LINE> login(request, authenticated_user) <NEW_LINE> user_data_obj, _ = UserData.objects.get_or_create(user=user_obj) <NEW_LINE> cls.create_userdata(user_data_obj) <NEW_LINE> cls.add_anon_data_to_userdata(request.data.get('uuid'), user_obj) <NEW_LINE> return Response({ "status" : "ok", "user" : { "status" : "ok", "username" : user_obj.username, }, "current_state" : user_data_obj.session_data.get('current_state') }) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_userdata(cls, user_data_obj): <NEW_LINE> <INDENT> user_data_obj.session_data['current_state'] = 'indepth_details' <NEW_LINE> user_data_obj.save() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_anon_data_to_userdata(cls, identifier, user_obj): <NEW_LINE> <INDENT> anon_data = AnonData.objects.get(identifier=identifier) <NEW_LINE> user_obj.first_name = anon_data.data['company_name'] <NEW_LINE> user_obj.save() <NEW_LINE> _ = CompanyData.objects.create( business=user_obj, revenue=anon_data.data['revenue'], amount_requested=anon_data.data['amount_requested'], date_of_registration=timezone.datetime( year=int(anon_data.data['year_of_registration'].split('-')[0]), month=1, day=1) ) <NEW_LINE> anon_data.delete()
To register a user on to the platform
6259904923e79379d538d8b0
class PDS4StreamHandler(logging.StreamHandler): <NEW_LINE> <INDENT> def __init__(self, name, level=_loud): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> logging.StreamHandler.__init__(self, stream=sys.stdout) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> logging.StreamHandler.__init__(self, strm=sys.stdout) <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> if not hasattr(self, 'terminator'): <NEW_LINE> <INDENT> self.terminator = '\n' <NEW_LINE> <DEDENT> self.set_level(level) <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> if six.PY3: <NEW_LINE> <INDENT> super(PDS4StreamHandler, self).emit(record) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> unicode <NEW_LINE> _unicode = True <NEW_LINE> <DEDENT> except NameError: <NEW_LINE> <INDENT> _unicode = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> msg = self.format(record) <NEW_LINE> stream = self.stream <NEW_LINE> fs = b"%s{0}".format(self.terminator) <NEW_LINE> if not _unicode: <NEW_LINE> <INDENT> stream.write(fs % msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)): <NEW_LINE> <INDENT> ufs = u'%s{0}'.format(self.terminator) <NEW_LINE> try: <NEW_LINE> <INDENT> stream.write(ufs % msg) <NEW_LINE> <DEDENT> except UnicodeEncodeError: <NEW_LINE> <INDENT> stream.write((ufs % msg).encode(stream.encoding)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> stream.write(fs % msg) <NEW_LINE> <DEDENT> <DEDENT> except UnicodeError: <NEW_LINE> <INDENT> stream.write(fs % msg.encode("UTF-8")) <NEW_LINE> <DEDENT> <DEDENT> self.flush() <NEW_LINE> <DEDENT> except (KeyboardInterrupt, SystemExit): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.handleError(record) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_quiet(self): <NEW_LINE> <INDENT> return self.level >= _quiet <NEW_LINE> <DEDENT> def set_level(self, level): <NEW_LINE> <INDENT> self.setLevel(level) <NEW_LINE> <DEDENT> def get_level(self): <NEW_LINE> <INDENT> return self.level <NEW_LINE> <DEDENT> def setLevel(self, level): <NEW_LINE> <INDENT> if isinstance(level, six.string_types): <NEW_LINE> <INDENT> level = level.upper() <NEW_LINE> <DEDENT> logging.StreamHandler.setLevel(self, level)
Custom StreamHandler that has a *name* and a *is_quiet* attributes.
6259904926068e7796d4dcf6
@dataclass <NEW_LINE> class AssetSerializer(BaseEntitySerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Asset <NEW_LINE> <DEDENT> asset_id: AssetIdType = field(default_factory=uuid4, repr=False) <NEW_LINE> symbol: str = field(default_factory=str) <NEW_LINE> asset_class: str = field(default_factory=str)
AssetSerializer object is for serializing entity and deserializing data
62599049dc8b845886d5496e
class CheckLoginHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> if self.get_current_user(): <NEW_LINE> <INDENT> self.write({"errno":RET.OK, "errmsg":"true", "data":{"name":self.session.data.get("name")}}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write({"errno":RET.SESSIONERR, "errmsg":"false"})
检查登陆状态
6259904976d4e153a661dc4f
class HiliteTreeprocessor(Treeprocessor): <NEW_LINE> <INDENT> def run(self, root): <NEW_LINE> <INDENT> blocks = root.iter("pre") <NEW_LINE> for block in blocks: <NEW_LINE> <INDENT> if len(block) == 1 and block[0].tag == "code": <NEW_LINE> <INDENT> html = highlight(block[0].text, self.config, self.markdown.tab_length) <NEW_LINE> placeholder = self.markdown.htmlStash.store(html) <NEW_LINE> block.clear() <NEW_LINE> block.tag = "p" <NEW_LINE> block.text = placeholder
Hilight source code in code blocks.
62599049009cb60464d028e8
class NameOfCommand(gdb.Command): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(NameOfCommand, self).__init__('nameof', gdb.COMMAND_DATA) <NEW_LINE> <DEDENT> @errorwrap <NEW_LINE> def invoke(self, args, from_tty): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj = gdb.parse_and_eval(args) <NEW_LINE> <DEDENT> except gdb.error: <NEW_LINE> <INDENT> print('Usage: nameof <object>') <NEW_LINE> return <NEW_LINE> <DEDENT> name = nameof(obj) <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> print('"' + name + '"')
Print the name of an HHVM object.
625990490fa83653e46f6290
class ServiceContext(InstanceContext): <NEW_LINE> <INDENT> def __init__(self, version, sid): <NEW_LINE> <INDENT> super(ServiceContext, self).__init__(version) <NEW_LINE> self._solution = {'sid': sid, } <NEW_LINE> self._uri = '/Services/{sid}'.format(**self._solution) <NEW_LINE> self._environments = None <NEW_LINE> self._functions = None <NEW_LINE> self._assets = None <NEW_LINE> self._builds = None <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> payload = self._version.fetch(method='GET', uri=self._uri, ) <NEW_LINE> return ServiceInstance(self._version, payload, sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> return self._version.delete(method='DELETE', uri=self._uri, ) <NEW_LINE> <DEDENT> def update(self, include_credentials=values.unset, friendly_name=values.unset, ui_editable=values.unset): <NEW_LINE> <INDENT> data = values.of({ 'IncludeCredentials': include_credentials, 'FriendlyName': friendly_name, 'UiEditable': ui_editable, }) <NEW_LINE> payload = self._version.update(method='POST', uri=self._uri, data=data, ) <NEW_LINE> return ServiceInstance(self._version, payload, sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def environments(self): <NEW_LINE> <INDENT> if self._environments is None: <NEW_LINE> <INDENT> self._environments = EnvironmentList(self._version, service_sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._environments <NEW_LINE> <DEDENT> @property <NEW_LINE> def functions(self): <NEW_LINE> <INDENT> if self._functions is None: <NEW_LINE> <INDENT> self._functions = FunctionList(self._version, service_sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._functions <NEW_LINE> <DEDENT> @property <NEW_LINE> def assets(self): <NEW_LINE> <INDENT> if self._assets is None: <NEW_LINE> <INDENT> self._assets = AssetList(self._version, service_sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._assets <NEW_LINE> <DEDENT> @property <NEW_LINE> def builds(self): <NEW_LINE> <INDENT> if self._builds is None: <NEW_LINE> <INDENT> self._builds = BuildList(self._version, service_sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._builds <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) <NEW_LINE> return '<Twilio.Serverless.V1.ServiceContext {}>'.format(context)
PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact [email protected].
62599049d53ae8145f919813
class GitFetchStrategy(VCSFetchStrategy): <NEW_LINE> <INDENT> enabled = True <NEW_LINE> required_attributes = ('git',) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(GitFetchStrategy, self).__init__( 'git', 'tag', 'branch', 'commit', **kwargs) <NEW_LINE> self._git = None <NEW_LINE> if not self.branch: <NEW_LINE> <INDENT> self.branch = self.tag <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def git_version(self): <NEW_LINE> <INDENT> vstring = self.git('--version', return_output=True).lstrip('git version ') <NEW_LINE> return Version(vstring) <NEW_LINE> <DEDENT> @property <NEW_LINE> def git(self): <NEW_LINE> <INDENT> if not self._git: <NEW_LINE> <INDENT> self._git = which('git', required=True) <NEW_LINE> <DEDENT> return self._git <NEW_LINE> <DEDENT> @_needs_stage <NEW_LINE> def fetch(self): <NEW_LINE> <INDENT> self.stage.chdir() <NEW_LINE> if self.stage.source_path: <NEW_LINE> <INDENT> tty.msg("Already fetched %s." % self.stage.source_path) <NEW_LINE> return <NEW_LINE> <DEDENT> args = [] <NEW_LINE> if self.commit: <NEW_LINE> <INDENT> args.append('at commit %s' % self.commit) <NEW_LINE> <DEDENT> elif self.tag: <NEW_LINE> <INDENT> args.append('at tag %s' % self.tag) <NEW_LINE> <DEDENT> elif self.branch: <NEW_LINE> <INDENT> args.append('on branch %s' % self.branch) <NEW_LINE> <DEDENT> tty.msg("Trying to clone git repository:", self.url, *args) <NEW_LINE> if self.commit: <NEW_LINE> <INDENT> self.git('clone', self.url) <NEW_LINE> self.stage.chdir_to_source() <NEW_LINE> self.git('checkout', self.commit) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args = ['clone'] <NEW_LINE> if self.branch: <NEW_LINE> <INDENT> args.extend(['--branch', self.branch]) <NEW_LINE> <DEDENT> if self.git_version > ver('1.7.10'): <NEW_LINE> <INDENT> args.append('--single-branch') <NEW_LINE> <DEDENT> args.append(self.url) <NEW_LINE> self.git(*args) <NEW_LINE> self.stage.chdir_to_source() <NEW_LINE> <DEDENT> <DEDENT> def archive(self, destination): <NEW_LINE> <INDENT> super(GitFetchStrategy, self).archive(destination, exclude='.git') <NEW_LINE> <DEDENT> @_needs_stage <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> self.stage.chdir_to_source() <NEW_LINE> self.git('checkout', '.') <NEW_LINE> self.git('clean', '-f') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[git] %s" % self.url
Fetch strategy that gets source code from a git repository. Use like this in a package: version('name', git='https://github.com/project/repo.git') Optionally, you can provide a branch, or commit to check out, e.g.: version('1.1', git='https://github.com/project/repo.git', tag='v1.1') You can use these three optional attributes in addition to ``git``: * ``branch``: Particular branch to build from (default is master) * ``tag``: Particular tag to check out * ``commit``: Particular commit hash in the repo
625990498da39b475be045a4
class UpdateProfileForm(BaseModel): <NEW_LINE> <INDENT> bio: str = None <NEW_LINE> gender: str = None
Form to update user profile
6259904991af0d3eaad3b1d9
class BasisState(object): <NEW_LINE> <INDENT> def __init__(self, state, msb=None): <NEW_LINE> <INDENT> if msb is None: <NEW_LINE> <INDENT> self.bit_seq = state <NEW_LINE> self.decimal = self._binary_to_decimal(state) <NEW_LINE> self.msb = len(state) - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bit_seq = self._decimal_to_binary(state, msb) <NEW_LINE> self.decimal = state <NEW_LINE> self.msb = msb <NEW_LINE> <DEDENT> <DEDENT> def generate_flipped_states(self): <NEW_LINE> <INDENT> bit_seq_nearest_neighbour = np.roll(self.bit_seq, -1) <NEW_LINE> is_flipable = self.bit_seq != bit_seq_nearest_neighbour <NEW_LINE> flip_states = [] <NEW_LINE> for ind in np.arange(self.msb + 1)[is_flipable]: <NEW_LINE> <INDENT> flipped_seq = copy.deepcopy(self.bit_seq) <NEW_LINE> flipped_seq[ind], flipped_seq[(ind + 1) % (self.msb + 1)] = flipped_seq[(ind + 1) % (self.msb + 1)], flipped_seq[ind] <NEW_LINE> flip_states.append(BasisState(flipped_seq)) <NEW_LINE> <DEDENT> return flip_states <NEW_LINE> <DEDENT> def _binary_to_decimal(self, bit_array): <NEW_LINE> <INDENT> exponents = np.arange(len(bit_array))[bit_array > 0 ] <NEW_LINE> return np.sum(2**exponents) <NEW_LINE> <DEDENT> def _decimal_to_binary(self, decimal, msb): <NEW_LINE> <INDENT> if decimal >= 2**(msb+1): <NEW_LINE> <INDENT> raise ValueError("Decimal value is out of range!") <NEW_LINE> <DEDENT> rest, i, bit_seq = decimal, msb, np.zeros(msb+1) <NEW_LINE> while(rest > 0): <NEW_LINE> <INDENT> if 2**i <= rest: <NEW_LINE> <INDENT> bit_seq[i] = 1 <NEW_LINE> rest -= 2**i <NEW_LINE> <DEDENT> i -= 1 <NEW_LINE> <DEDENT> return bit_seq <NEW_LINE> <DEDENT> def energy(self, jz): <NEW_LINE> <INDENT> sz = self.bit_seq - 0.5 <NEW_LINE> return np.sum(sz * np.roll(sz, -1)) * jz <NEW_LINE> <DEDENT> def magnetisation(self): <NEW_LINE> <INDENT> sz = self.bit_seq - 0.5 <NEW_LINE> ind = np.arange(1, self.msb - int(self.msb % 2 == 0) + 1, 2) <NEW_LINE> sz[ind] *= -1 <NEW_LINE> return np.sum(sz) / (self.msb + 1) <NEW_LINE> <DEDENT> def magnetisation_squared(self): <NEW_LINE> <INDENT> sz = self.bit_seq - 0.5 <NEW_LINE> sign = np.ones((self.msb+1, self.msb+1)) <NEW_LINE> sign[1::2,::2] *= -1 <NEW_LINE> sign[::2, 1::2] *= -1 <NEW_LINE> return np.dot(sz, np.dot(sign, sz)) / (self.msb + 1)**2 <NEW_LINE> <DEDENT> def correlation(self): <NEW_LINE> <INDENT> sz = self.bit_seq - 0.5 <NEW_LINE> return sz[0] * sz
The basis states of the system
62599049d7e4931a7ef3d42b
class Graph: <NEW_LINE> <INDENT> sort_key = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._inputs_of = defaultdict(set) <NEW_LINE> self._consequences_of = defaultdict(set) <NEW_LINE> <DEDENT> def sorted(self, nodes, reverse=False): <NEW_LINE> <INDENT> nodes = list(nodes) <NEW_LINE> try: <NEW_LINE> <INDENT> nodes.sort(key=self.sort_key, reverse=reverse) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return nodes <NEW_LINE> <DEDENT> def add_edge(self, input_task, consequence_task): <NEW_LINE> <INDENT> self._consequences_of[input_task].add(consequence_task) <NEW_LINE> self._inputs_of[consequence_task].add(input_task) <NEW_LINE> <DEDENT> def remove_edge(self, input_task, consequence_task): <NEW_LINE> <INDENT> self._consequences_of[input_task].remove(consequence_task) <NEW_LINE> self._inputs_of[consequence_task].remove(input_task) <NEW_LINE> <DEDENT> def inputs_of(self, task): <NEW_LINE> <INDENT> return self.sorted(self._inputs_of[task]) <NEW_LINE> <DEDENT> def clear_inputs_of(self, task): <NEW_LINE> <INDENT> input_tasks = self._inputs_of.pop(task, ()) <NEW_LINE> for input_task in input_tasks: <NEW_LINE> <INDENT> self._consequences_of[input_task].remove(task) <NEW_LINE> <DEDENT> <DEDENT> def edges(self): <NEW_LINE> <INDENT> return [(a, b) for a in self.sorted(self._consequences_of) for b in self.sorted(self._consequences_of[a])] <NEW_LINE> <DEDENT> def tasks(self): <NEW_LINE> <INDENT> return self.sorted(set(self._inputs_of).union(self._consequences_of)) <NEW_LINE> <DEDENT> def immediate_consequences_of(self, task): <NEW_LINE> <INDENT> return self.sorted(self._consequences_of[task]) <NEW_LINE> <DEDENT> def recursive_consequences_of(self, tasks, include=False): <NEW_LINE> <INDENT> def visit(task): <NEW_LINE> <INDENT> visited.add(task) <NEW_LINE> consequences = self._consequences_of[task] <NEW_LINE> for consequence in self.sorted(consequences, reverse=True): <NEW_LINE> <INDENT> if consequence not in visited: <NEW_LINE> <INDENT> visit(consequence) <NEW_LINE> <DEDENT> <DEDENT> stack.insert(0, task) <NEW_LINE> <DEDENT> def generate_consequences_backwards(): <NEW_LINE> <INDENT> for task in self.sorted(tasks, reverse=True): <NEW_LINE> <INDENT> visit(task) <NEW_LINE> if include is False: <NEW_LINE> <INDENT> stack.remove(task) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> visited = set() <NEW_LINE> stack =[] <NEW_LINE> generate_consequences_backwards() <NEW_LINE> return stack
一个关于构建任务之间关系的有向图. 一个任务是通过 hash 值直接获取的, 这里使用 python 的 dictionary key.
62599049d10714528d69f068
class Enemies(pg.sprite.DirtySprite): <NEW_LINE> <INDENT> def __init__(self, game, player, pos, size, *groups): <NEW_LINE> <INDENT> super().__init__(*groups) <NEW_LINE> self.game = game <NEW_LINE> self.image = pg.Surface(size).convert() <NEW_LINE> self.image.fill(c.SILVER) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.topleft = pos <NEW_LINE> self.player = player <NEW_LINE> self.health_bar = None <NEW_LINE> <DEDENT> def update(self, room, dt): <NEW_LINE> <INDENT> self.check_if_alive(room, dt) <NEW_LINE> <DEDENT> def check_if_alive(self, room, dt): <NEW_LINE> <INDENT> if self.health > 0: <NEW_LINE> <INDENT> for health_bar in room.health_bar_container: <NEW_LINE> <INDENT> health_bar.move_health_bar(dt) <NEW_LINE> <DEDENT> self.handle_bullet_hit(room) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.kill() <NEW_LINE> room.health_bar_container.update(self.health) <NEW_LINE> <DEDENT> <DEDENT> def handle_bullet_hit(self, room): <NEW_LINE> <INDENT> bullet_hit = pg.sprite.spritecollide(self, room.bullet_container, True) <NEW_LINE> for bullet in bullet_hit: <NEW_LINE> <INDENT> self.health -= bullet.gun.output_damage <NEW_LINE> bullet.kill() <NEW_LINE> self.handle_health_bar(room) <NEW_LINE> <DEDENT> <DEDENT> def handle_health_bar(self, room): <NEW_LINE> <INDENT> if self.health > 0: <NEW_LINE> <INDENT> self.health_bar = HealthBar(self, (self.health, 10), (self.rect.centerx, self.rect.center[1]-30), c.RED) <NEW_LINE> self.health_bar.rect.centerx = self.rect.centerx <NEW_LINE> self.health_bar.add_to_group(room.health_bar_container) <NEW_LINE> self.game.all_sprites.add(self.health_bar) <NEW_LINE> self.health_bar.update(self.health)
Basic enemy class.
6259904950485f2cf55dc33e
class RemoveContactNotificationProtocolEntity(ContactNotificationProtocolEntity): <NEW_LINE> <INDENT> def __init__(self, _id, _from, timestamp, notify, offline, contactJid): <NEW_LINE> <INDENT> super(RemoveContactNotificationProtocolEntity, self).__init__(_id, _from, timestamp, notify, offline) <NEW_LINE> self.setData(contactJid) <NEW_LINE> <DEDENT> def setData(self, jid): <NEW_LINE> <INDENT> self.contactJid = jid <NEW_LINE> <DEDENT> def toProtocolTreeNode(self): <NEW_LINE> <INDENT> node = super(RemoveContactNotificationProtocolEntity, self).toProtocolTreeNode() <NEW_LINE> removeNode = ProtocolTreeNode("remove", {"jid": self.contactJid}, None, None) <NEW_LINE> node.addChild(removeNode) <NEW_LINE> return node <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fromProtocolTreeNode(node): <NEW_LINE> <INDENT> entity = ContactNotificationProtocolEntity.fromProtocolTreeNode(node) <NEW_LINE> entity.__class__ = RemoveContactNotificationProtocolEntity <NEW_LINE> removeNode = node.getChild("remove") <NEW_LINE> entity.setData(removeNode.getAttributeValue("jid")) <NEW_LINE> return entity
<notification offline="0" id="{{NOTIFICATION_ID}}" notify="{{NOTIFY_NAME}}" type="contacts" t="{{TIMESTAMP}}" from="{{SENDER_JID}}"> <remove jid="{{SET_JID}}"> </remove> </notification>
6259904982261d6c527308a0
class TestUnicode(BaseS3CLICommand): <NEW_LINE> <INDENT> def test_cp(self): <NEW_LINE> <INDENT> bucket_name = self.create_bucket() <NEW_LINE> local_example1_txt = self.files.create_file('êxample.txt', 'example1 contents') <NEW_LINE> s3_example1_txt = 's3://%s/%s' % (bucket_name, os.path.basename(local_example1_txt)) <NEW_LINE> local_example2_txt = self.files.full_path('êxample2.txt') <NEW_LINE> p = aws('s3 cp %s %s' % (local_example1_txt, s3_example1_txt)) <NEW_LINE> self.assert_no_errors(p) <NEW_LINE> p = aws('s3 cp %s %s --quiet' % (s3_example1_txt, local_example2_txt)) <NEW_LINE> self.assert_no_errors(p) <NEW_LINE> with open(local_example2_txt, 'rb') as f: <NEW_LINE> <INDENT> self.assertEqual(f.read(), b'example1 contents') <NEW_LINE> <DEDENT> <DEDENT> def test_recursive_cp(self): <NEW_LINE> <INDENT> bucket_name = self.create_bucket() <NEW_LINE> local_example1_txt = self.files.create_file('êxample1.txt', 'example1 contents') <NEW_LINE> local_example2_txt = self.files.create_file('êxample2.txt', 'example2 contents') <NEW_LINE> p = aws('s3 cp %s s3://%s --recursive --quiet' % ( self.files.rootdir, bucket_name)) <NEW_LINE> self.assert_no_errors(p) <NEW_LINE> os.remove(local_example1_txt) <NEW_LINE> os.remove(local_example2_txt) <NEW_LINE> p = aws('s3 cp s3://%s %s --recursive --quiet' % ( bucket_name, self.files.rootdir)) <NEW_LINE> self.assert_no_errors(p) <NEW_LINE> self.assertEqual(open(local_example1_txt).read(), 'example1 contents') <NEW_LINE> self.assertEqual(open(local_example2_txt).read(), 'example2 contents')
The purpose of these tests are to ensure that the commands can handle unicode characters in both keyname and from those generated for both uploading and downloading files.
625990498e05c05ec3f6f835
class ModelTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> user = User.objects.create(username="testbot") <NEW_LINE> self.bucketlist_name = "Write world class code" <NEW_LINE> self.bucketlist = Bucketlist(name=self.bucketlist_name, owner=user) <NEW_LINE> <DEDENT> def test_model_can_create_a_bucketlist(self): <NEW_LINE> <INDENT> old_count = Bucketlist.objects.count() <NEW_LINE> self.bucketlist.save() <NEW_LINE> new_count = Bucketlist.objects.count() <NEW_LINE> self.assertNotEqual(old_count, new_count)
This class defines the test suite for the bucketlist model.
6259904916aa5153ce4018a2
@model <NEW_LINE> class AudioData(MetaData): <NEW_LINE> <INDENT> Length = int <NEW_LINE> AudioEncoding = str <NEW_LINE> SampleRate = int <NEW_LINE> Channels = str <NEW_LINE> AudioBitrate = int <NEW_LINE> Title = str <NEW_LINE> Artist = str <NEW_LINE> Track = int <NEW_LINE> Album = str <NEW_LINE> Genre = str <NEW_LINE> Tcmp = int <NEW_LINE> AlbumArtist = str <NEW_LINE> Year = int <NEW_LINE> Disk = int <NEW_LINE> Tbpm = int <NEW_LINE> Composer = str
Provides the meta data that is extracted based on the content.
6259904930c21e258be99bbb
class NDArrayInterface(base.BaseMatrixInterface): <NEW_LINE> <INDENT> TARGET_MATRIX = numpy.ndarray <NEW_LINE> def const_to_matrix(self, value, convert_scalars=False): <NEW_LINE> <INDENT> if isinstance(value, cvxopt.spmatrix): <NEW_LINE> <INDENT> value = cvxopt.matrix(value) <NEW_LINE> value = numpy.array(value, dtype='float64') <NEW_LINE> <DEDENT> elif isinstance(value, list): <NEW_LINE> <INDENT> value = numpy.atleast_2d(value) <NEW_LINE> value = value.T <NEW_LINE> <DEDENT> elif scipy.sparse.issparse(value): <NEW_LINE> <INDENT> value = value.A <NEW_LINE> <DEDENT> elif isinstance(value, numpy.matrix): <NEW_LINE> <INDENT> value = value.A <NEW_LINE> <DEDENT> return numpy.atleast_2d(value) <NEW_LINE> <DEDENT> def identity(self, size): <NEW_LINE> <INDENT> return numpy.eye(size) <NEW_LINE> <DEDENT> def size(self, matrix): <NEW_LINE> <INDENT> if len(matrix.shape) == 1: <NEW_LINE> <INDENT> return (matrix.size, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return matrix.shape <NEW_LINE> <DEDENT> <DEDENT> def scalar_value(self, matrix): <NEW_LINE> <INDENT> return numpy.asscalar(matrix) <NEW_LINE> <DEDENT> def scalar_matrix(self, value, rows, cols): <NEW_LINE> <INDENT> return numpy.zeros((rows,cols), dtype='float64') + value <NEW_LINE> <DEDENT> def reshape(self, matrix, size): <NEW_LINE> <INDENT> return numpy.reshape(matrix, size, order='F')
An interface to convert constant values to the numpy ndarray class.
625990494e696a045264e7fb
class MockKLDivergence(object): <NEW_LINE> <INDENT> def __init__(self, result): <NEW_LINE> <INDENT> self.result = result <NEW_LINE> self.args = [] <NEW_LINE> self.called = Counter() <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.called() <NEW_LINE> self.args.append(args) <NEW_LINE> return self.result
Monitors DenseVariational calls to the divergence implementation.
625990490a366e3fb87ddd9b
class PreserveEphemeralRebuild(extensions.V21APIExtensionBase): <NEW_LINE> <INDENT> name = "PreserveEphemeralOnRebuild" <NEW_LINE> alias = ALIAS <NEW_LINE> version = 1 <NEW_LINE> def get_controller_extensions(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def get_resources(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def server_rebuild(self, rebuild_dict, rebuild_kwargs, body_deprecated_param=None): <NEW_LINE> <INDENT> if 'preserve_ephemeral' in rebuild_dict: <NEW_LINE> <INDENT> rebuild_kwargs['preserve_ephemeral'] = strutils.bool_from_string( rebuild_dict['preserve_ephemeral'], strict=True) <NEW_LINE> <DEDENT> <DEDENT> def get_server_rebuild_schema(self, version): <NEW_LINE> <INDENT> return preserve_ephemeral_rebuild.server_rebuild
Allow preservation of the ephemeral partition on rebuild.
6259904921a7993f00c6731f
class FoursquareVenue: <NEW_LINE> <INDENT> __slots__ = ('foursquare_id', 'lat', 'lng', 'name', 'xtile', 'ytile') <NEW_LINE> def __init__(self, json_venue): <NEW_LINE> <INDENT> self.foursquare_id = json_venue['id'] <NEW_LINE> self.lat = json_venue['location']['lat'] <NEW_LINE> self.lng = json_venue['location']['lng'] <NEW_LINE> self.name = json_venue['name'] <NEW_LINE> (self.xtile, self.ytile) = osm_deg2num(self.lat, self.lng, 16)
Class for transporting information about the venue. Includes the identifier for the tiles used from openstreetmap.
6259904907f4c71912bb07e9
class EntryAdmin(SeoEntryAdminMixin, _entry_admin_base): <NEW_LINE> <INDENT> FIELDSET_GENERAL = (None, { 'fields': ('title', 'slug', 'status',), }) <NEW_LINE> declared_fieldsets = ( FIELDSET_GENERAL, AbstractEntryBaseAdmin.FIELDSET_PUBLICATION, SeoEntryAdminMixin.FIELDSET_SEO, ) <NEW_LINE> list_filter = list(_entry_admin_base.list_filter) <NEW_LINE> formfield_overrides = {} <NEW_LINE> formfield_overrides.update(SeoEntryAdminMixin.formfield_overrides) <NEW_LINE> formfield_overrides.update({ 'intro': { 'widget': widgets.AdminTextareaWidget(attrs={'rows': 4}) }, })
The Django admin class for the default blog :class:`~fluent_blogs.models.Entry` model. When using a custom model, you can use :class:`AbstractEntryBaseAdmin`, which isn't attached to any of the optional fields.
6259904945492302aabfd889
class Array(_DatatypeBase): <NEW_LINE> <INDENT> def __init__(self, *dimensions): <NEW_LINE> <INDENT> assert len(dimensions) >= 1 <NEW_LINE> assert all(isinstance(d, (int, long)) for d in dimensions), "Dimensions must be ints, not %s" % (str(dimensions)) <NEW_LINE> self.dimensions = dimensions <NEW_LINE> num_elements = 1 <NEW_LINE> for d in self.dimensions: <NEW_LINE> <INDENT> num_elements *= d <NEW_LINE> <DEDENT> _DatatypeBase.__init__(self, "Array", "Array({%s})" % (",".join("%d" % d for d in self.dimensions)), num_elements)
Array Data Type
6259904923e79379d538d8b4
class CategoryTreeRobot: <NEW_LINE> <INDENT> def __init__(self, catTitle, catDB, filename=None, maxDepth=10): <NEW_LINE> <INDENT> self.catTitle = catTitle <NEW_LINE> self.catDB = catDB <NEW_LINE> if filename and not os.path.isabs(filename): <NEW_LINE> <INDENT> filename = config.datafilepath(filename) <NEW_LINE> <DEDENT> self.filename = filename <NEW_LINE> self.maxDepth = maxDepth <NEW_LINE> self.site = pywikibot.Site() <NEW_LINE> <DEDENT> def treeview(self, cat, currentDepth=0, parent=None): <NEW_LINE> <INDENT> result = u'#' * currentDepth <NEW_LINE> if currentDepth > 0: <NEW_LINE> <INDENT> result += u' ' <NEW_LINE> <DEDENT> result += cat.title(asLink=True, textlink=True, withNamespace=False) <NEW_LINE> result += ' (%d)' % cat.categoryinfo['pages'] <NEW_LINE> if currentDepth < self.maxDepth // 2: <NEW_LINE> <INDENT> pywikibot.output('.', newline=False) <NEW_LINE> <DEDENT> supercat_names = [] <NEW_LINE> for cat in self.catDB.getSupercats(cat): <NEW_LINE> <INDENT> if cat != parent: <NEW_LINE> <INDENT> supercat_names.append(cat.title(asLink=True, textlink=True, withNamespace=False)) <NEW_LINE> <DEDENT> <DEDENT> if supercat_names: <NEW_LINE> <INDENT> result += ' ' + i18n.twtranslate(self.site, 'category-also-in', {'alsocat': ', '.join( supercat_names)}) <NEW_LINE> <DEDENT> del supercat_names <NEW_LINE> result += '\n' <NEW_LINE> if currentDepth < self.maxDepth: <NEW_LINE> <INDENT> for subcat in self.catDB.getSubcats(cat): <NEW_LINE> <INDENT> result += self.treeview(subcat, currentDepth + 1, parent=cat) <NEW_LINE> <DEDENT> <DEDENT> elif self.catDB.getSubcats(cat): <NEW_LINE> <INDENT> result += '#' * (currentDepth + 1) + ' [...]\n' <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> cat = pywikibot.Category(self.site, self.catTitle) <NEW_LINE> pywikibot.output('Generating tree...', newline=False) <NEW_LINE> tree = self.treeview(cat) <NEW_LINE> pywikibot.output(u'') <NEW_LINE> if self.filename: <NEW_LINE> <INDENT> pywikibot.output(u'Saving results in %s' % self.filename) <NEW_LINE> import codecs <NEW_LINE> f = codecs.open(self.filename, 'a', 'utf-8') <NEW_LINE> f.write(tree) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pywikibot.output(tree, toStdout=True)
Robot to create tree overviews of the category structure. Parameters: * catTitle - The category which will be the tree's root. * catDB - A CategoryDatabase object * maxDepth - The limit beyond which no subcategories will be listed. This also guarantees that loops in the category structure won't be a problem. * filename - The textfile where the tree should be saved; None to print the tree to stdout.
6259904950485f2cf55dc33f
class Country(htb.HTBObject): <NEW_LINE> <INDENT> rank: int <NEW_LINE> country_code: str <NEW_LINE> members: int <NEW_LINE> points: int <NEW_LINE> user_owns: int <NEW_LINE> root_owns: int <NEW_LINE> challenge_owns: int <NEW_LINE> user_bloods: int <NEW_LINE> root_bloods: int <NEW_LINE> fortress: int <NEW_LINE> endgame: int <NEW_LINE> name: str <NEW_LINE> def __init__(self, data: dict): <NEW_LINE> <INDENT> self.rank = data['rank'] <NEW_LINE> self.country_code = data['country'] <NEW_LINE> self.members = data['members'] <NEW_LINE> self.points = data['points'] <NEW_LINE> self.user_owns = data['user_owns'] <NEW_LINE> self.root_owns = data['root_owns'] <NEW_LINE> self.challenge_owns = data['challenge_owns'] <NEW_LINE> self.user_bloods = data['user_bloods'] <NEW_LINE> self.root_owns = data['root_bloods'] <NEW_LINE> self.fortress = data['fortress'] <NEW_LINE> self.endgame = data['endgame'] <NEW_LINE> self.name = data['name']
The class representing a Country Attributes: rank: The Country's global rank country_code: The Country's country code members: The number of members from the Country points: The Country's total points user_owns: The Country's total user owns root_owns: The Country's total root owns challenge_owns: The Country's total challenge owns user_bloods: The Country's total user bloods root_bloods: The Country's total root bloods fortress: The Country's total Fortress flags endgame: The Country's total Endgame flags name: The name of the Country Args: data: The data of the country
62599049ec188e330fdf9c53
class HelpCommand(commands.DefaultHelpCommand): <NEW_LINE> <INDENT> async def send_bot_help(self, mapping): <NEW_LINE> <INDENT> embed = self.create_embed( title=f"`{self.clean_prefix}help`", fields=[{ "name": cog.qualified_name if cog else '\u200B', "value": "\n".join([ self.short(command) for command in await self.filter_commands(cog_commands) ])} for cog, cog_commands in mapping.items() if cog_commands][::-1] ) <NEW_LINE> await self.get_destination().send(embed=embed) <NEW_LINE> <DEDENT> async def send_cog_help(self, cog): <NEW_LINE> <INDENT> embed = self.create_embed( title=cog.qualified_name.capitalize(), description=cog.description, **({"fields": [{ "name": f"{cog.qualified_name.capitalize()} Commands:", "value": "\n".join([ self.short(command) for command in cog.get_commands()]) }]} if cog.get_commands() else {})) <NEW_LINE> await self.get_destination().send(embed=embed) <NEW_LINE> <DEDENT> async def send_group_help(self, group): <NEW_LINE> <INDENT> embed = self.create_embed( title=self.short(group, False), description=group.help, fields=[{ "name": f"Subcommands:", "value": "\n".join([ self.short(command) for command in await self.filter_commands(group.commands) ]) }] ) <NEW_LINE> await self.get_destination().send(embed=embed) <NEW_LINE> <DEDENT> async def send_command_help(self, command): <NEW_LINE> <INDENT> sig = self.get_command_signature(command) <NEW_LINE> embed = self.create_embed( title=f"`{sig[:-1] if sig.endswith(' ') else sig}`", description=command.help, ) <NEW_LINE> await self.get_destination().send(embed=embed) <NEW_LINE> <DEDENT> def command_not_found(self, string): <NEW_LINE> <INDENT> return f"Command {self.short(string, False)} does not exist." <NEW_LINE> <DEDENT> async def subcommand_not_found(self, command, string): <NEW_LINE> <INDENT> if isinstance(command, commands.Group) and len(command.all_commands) > 0: <NEW_LINE> <INDENT> return f"Command {self.short(command, False)} has no subcommand named `{string}`." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f"Command {self.short(command, False)} has no subcommands." <NEW_LINE> <DEDENT> <DEDENT> async def send_error_message(self, error): <NEW_LINE> <INDENT> await self.get_destination().send( embed=discord.Embed(title="Command/Subcommand not found.", description=error, color=self.EMBED_COLOR)) <NEW_LINE> <DEDENT> def create_embed(self, fields: list = (), **kwargs): <NEW_LINE> <INDENT> embed = discord.Embed(color=discord.Color.blurple(), **kwargs) <NEW_LINE> for field in fields: <NEW_LINE> <INDENT> embed.add_field(**field, inline=False) <NEW_LINE> <DEDENT> embed.set_footer( text=f"Type {self.clean_prefix}help command for more info on a command. You can also type {self.clean_prefix}help category for more info on a category.") <NEW_LINE> return embed <NEW_LINE> <DEDENT> def short(self, command, doc=True): <NEW_LINE> <INDENT> return f'`{self.clean_prefix}{command}` {(command.short_doc if doc else "")}'
Set up help command for the bot.
62599049b5575c28eb7136a4
class VQE_light: <NEW_LINE> <INDENT> params = ([1, 3], [False, True]) <NEW_LINE> param_names = ["n_steps", "optimize"] <NEW_LINE> def time_hydrogen(self, n_steps, optimize): <NEW_LINE> <INDENT> hyperparams = {"n_steps": n_steps, "optimize": optimize} <NEW_LINE> benchmark_vqe(hyperparams) <NEW_LINE> <DEDENT> def peakmem_hydrogen(self, n_steps, optimize): <NEW_LINE> <INDENT> hyperparams = {"n_steps": n_steps, "optimize": optimize} <NEW_LINE> benchmark_vqe(hyperparams)
Benchmark the VQE algorithm using different number of optimization steps and grouping options.
6259904950485f2cf55dc340
class DocumentNotFound(Exception): <NEW_LINE> <INDENT> pass
Exception returned when a document cannot be found.
62599049009cb60464d028ec
class Artist: <NEW_LINE> <INDENT> def __init__(self, artist_name): <NEW_LINE> <INDENT> self.artist_name = artist_name <NEW_LINE> self.artist_id = MusicBrainzHandler.get_artist_id_from_artist_name(self.artist_name) <NEW_LINE> self.artist_songs = MusicBrainzHandler.get_artist_songs_from_artist_id(self.artist_id) <NEW_LINE> self.lyrics_dict = Artist.construct_lyrics_dict(self.artist_name, self.artist_songs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def construct_lyrics_dict(artist_name, artist_songs): <NEW_LINE> <INDENT> print('Finding lyrics for {} {} songs...'.format(len(artist_songs), artist_name)) <NEW_LINE> lyrics_dict = {} <NEW_LINE> for song in artist_songs: <NEW_LINE> <INDENT> word_count = LyricsOvhHandler.find_number_of_words_in_song(artist_name, song) <NEW_LINE> if word_count is not None: <NEW_LINE> <INDENT> lyrics_dict[song] = word_count <NEW_LINE> <DEDENT> <DEDENT> print('Found lyrics for {} of the {} songs\n'.format(len(lyrics_dict), len(artist_songs))) <NEW_LINE> return lyrics_dict <NEW_LINE> <DEDENT> def print_artist_statistics(self): <NEW_LINE> <INDENT> if len(self.lyrics_dict) == 0: <NEW_LINE> <INDENT> print('No statistics to output for {}'.format(self.artist_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> song_lengths = list(self.lyrics_dict.values()) <NEW_LINE> mean_length = np.mean(song_lengths) <NEW_LINE> median_length = np.median(song_lengths) <NEW_LINE> length_stand_dev = np.std(song_lengths) <NEW_LINE> min_length = np.min(song_lengths) <NEW_LINE> max_length = np.max(song_lengths) <NEW_LINE> min_length_song = min(self.lyrics_dict, key=self.lyrics_dict.get) <NEW_LINE> max_length_song = max(self.lyrics_dict, key=self.lyrics_dict.get) <NEW_LINE> print('Outputting statistics for {}\n'.format(self.artist_name)) <NEW_LINE> print('Found lyrics for {} songs'.format(len(song_lengths))) <NEW_LINE> print('Mean song length = {:.2f} words'.format(mean_length)) <NEW_LINE> print('Median song length = {:.2f} words'.format(median_length)) <NEW_LINE> print('Standard deviation of song length = {:.2f} words'.format(length_stand_dev)) <NEW_LINE> print('Shortest song is {} with {} words'.format(min_length_song, min_length)) <NEW_LINE> print('Longest song is {} with {} words'.format(max_length_song, max_length)) <NEW_LINE> <DEDENT> <DEDENT> def plot_artist_histogram(self): <NEW_LINE> <INDENT> if len(self.lyrics_dict) == 0: <NEW_LINE> <INDENT> print('No songs to plot for {}'.format(self.artist_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plt.figure() <NEW_LINE> plt.hist(self.lyrics_dict.values(), bins=50) <NEW_LINE> plt.title('Word count histogram for artist {}'.format(self.artist_name)) <NEW_LINE> plt.grid() <NEW_LINE> plt.xlabel('Word count') <NEW_LINE> plt.ylabel('Frequency') <NEW_LINE> plt.savefig('plots/{}.png'.format(self.artist_name))
A class representing an artist
62599049a79ad1619776b437
class PermissionHandler(BaseHandler): <NEW_LINE> <INDENT> def init_with_config(self, config): <NEW_LINE> <INDENT> super().init_with_config(config) <NEW_LINE> self.backend = DriveStorage(config) <NEW_LINE> self.db = DBStorage(config) <NEW_LINE> self.search = SearchHandler(config=config) <NEW_LINE> <DEDENT> def init_args(self, subparser) -> None: <NEW_LINE> <INDENT> parser = super().init_args(subparser) <NEW_LINE> parser.add_argument( "action", help="Define what action to take.", choices=["list", "add"] ) <NEW_LINE> parser.add_argument( "--for_tags", help="Define which tags to add permissions for ", nargs="+" ) <NEW_LINE> parser.add_argument( "--share_with", help="email id of the person to share with." ) <NEW_LINE> parser.add_argument( "--not_persistent", action="store_true", help="If provided, future uploads wont be shared.", ) <NEW_LINE> <DEDENT> def execute_command(self, id, email, role): <NEW_LINE> <INDENT> self.backend.add_permissions_user(fileid=id, email=email, role=role) <NEW_LINE> <DEDENT> def run(self, args): <NEW_LINE> <INDENT> if args.action == "add": <NEW_LINE> <INDENT> if not args.for_tags: <NEW_LINE> <INDENT> logger.critical("--for_tags is required. Try again.") <NEW_LINE> <DEDENT> elif not args.share_with: <NEW_LINE> <INDENT> logger.critical("--share_with is required. Try again.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tags = args.for_tags <NEW_LINE> email = args.share_with <NEW_LINE> role = "reader" <NEW_LINE> if not args.not_persistent: <NEW_LINE> <INDENT> self.db.add_permissions(tags, email, role) <NEW_LINE> <DEDENT> response = self.search.execute_command( name=None, tags=tags, do_and=False ) <NEW_LINE> for item in response: <NEW_LINE> <INDENT> id = item[1] <NEW_LINE> self.backend.add_permissions_user(fileid=id, email=email, role=role) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if args.action == "list": <NEW_LINE> <INDENT> if args.for_tags is None: <NEW_LINE> <INDENT> print(self.db.get_permissions()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for tag in args.for_tags: <NEW_LINE> <INDENT> print(self.db.get_permissions(tag))
This class handles adding permissions to uploaded files. This has two main drawbacks as of now, 1. No control over the tag types. User can upload any data as tag. 2. Updating tags is not implemented. NOTE: Mechanism to tag file while uploading has not yet been implemented.
625990493cc13d1c6d466aef
class ExplicitValidationFormMetaclass(DeclarativeFieldsMetaclass): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> new_class = super(ExplicitValidationFormMetaclass, cls).__new__(cls, name, bases, attrs) <NEW_LINE> add_validators_to_class_fields(new_class) <NEW_LINE> return new_class
Adds explicit declared field validators to class fields
6259904930c21e258be99bbd
class RevokeLinkedAppStatus(bb.Struct): <NEW_LINE> <INDENT> __slots__ = [ '_success_value', '_error_type_value', ] <NEW_LINE> _has_required_fields = True <NEW_LINE> def __init__(self, success=None, error_type=None): <NEW_LINE> <INDENT> self._success_value = bb.NOT_SET <NEW_LINE> self._error_type_value = bb.NOT_SET <NEW_LINE> if success is not None: <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> if error_type is not None: <NEW_LINE> <INDENT> self.error_type = error_type <NEW_LINE> <DEDENT> <DEDENT> success = bb.Attribute("success") <NEW_LINE> error_type = bb.Attribute("error_type", nullable=True, user_defined=True) <NEW_LINE> def _process_custom_annotations(self, annotation_type, field_path, processor): <NEW_LINE> <INDENT> super(RevokeLinkedAppStatus, self)._process_custom_annotations(annotation_type, field_path, processor)
:ivar team.RevokeLinkedAppStatus.success: Result of the revoking request. :ivar team.RevokeLinkedAppStatus.error_type: The error cause in case of a failure.
625990498e71fb1e983bce7c
class VoteHandler(MainHandler): <NEW_LINE> <INDENT> @_check_user_or_login <NEW_LINE> def get(self, article_key): <NEW_LINE> <INDENT> user = self.check_user() <NEW_LINE> article = db.get(article_key) <NEW_LINE> if user and article and not user.name in article.votes: <NEW_LINE> <INDENT> article.votes.append(user.name) <NEW_LINE> article.put() <NEW_LINE> <DEDENT> return self.redirect('/')
handle article vote request.
625990498a43f66fc4bf354d
class JobManager(object): <NEW_LINE> <INDENT> def __init__(self, user, passwd, config): <NEW_LINE> <INDENT> self.user = user <NEW_LINE> self.passwd = passwd <NEW_LINE> self.config = config <NEW_LINE> self.jobs = Queue() <NEW_LINE> self.config["threads"] = max(min(self.config.get("threads", 5), 10), 1) <NEW_LINE> self.config["job"] = self.config.get("job", ScrapeJob()) <NEW_LINE> self.make_jobs() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.start_jobs() <NEW_LINE> <DEDENT> def make_jobs(self): <NEW_LINE> <INDENT> job = self.config["job"] <NEW_LINE> letters = job["letters"] <NEW_LINE> threads_per_letter = int((self.config["threads"] - 1)/len(letters) + 1) <NEW_LINE> for l in letters: <NEW_LINE> <INDENT> job_letter = ScrapeJob(job) <NEW_LINE> job_letter["letters"] = l <NEW_LINE> for s in range(0, threads_per_letter): <NEW_LINE> <INDENT> temp = ScrapeJob(job_letter) <NEW_LINE> temp["subject_start"] = s <NEW_LINE> temp["subject_step"] = threads_per_letter <NEW_LINE> logging.info("Made job: {0}".format(temp)) <NEW_LINE> self.jobs.put_nowait(temp) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run_jobs(self, queue): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> session = SolusSession(self.user, self.passwd) <NEW_LINE> <DEDENT> except EnvironmentError as e: <NEW_LINE> <INDENT> logging.critical(e) <NEW_LINE> return <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> job = queue.get_nowait() <NEW_LINE> <DEDENT> except Empty as e: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if PROFILE: <NEW_LINE> <INDENT> import cProfile <NEW_LINE> cProfile.runctx("SolusScraper(session, job).start()", globals(), locals()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SolusScraper(session, job).start() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def start_jobs(self): <NEW_LINE> <INDENT> threads = [] <NEW_LINE> for x in range(self.config["threads"]): <NEW_LINE> <INDENT> threads.append(Process(target=self.run_jobs, args=(self.jobs,))) <NEW_LINE> threads[-1].start() <NEW_LINE> <DEDENT> for t in threads: <NEW_LINE> <INDENT> t.join()
Handles dividing up the scraping work and starting the scraper threads
62599049e64d504609df9dac
class VariableDatum( object ): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.variableDatumID = 0 <NEW_LINE> self.variableDatumLength = 0 <NEW_LINE> self.variableData = [] <NEW_LINE> <DEDENT> def datumPaddingSizeInBits(self): <NEW_LINE> <INDENT> padding = 0 <NEW_LINE> remainder = self.variableDatumLength % 64 <NEW_LINE> if remainder != 0: <NEW_LINE> <INDENT> padding = 64 - remainder <NEW_LINE> <DEDENT> return padding <NEW_LINE> <DEDENT> def serialize(self, outputStream): <NEW_LINE> <INDENT> outputStream.write_unsigned_int(self.variableDatumID); <NEW_LINE> outputStream.write_unsigned_int(self.variableDatumLength); <NEW_LINE> for x in range(self.variableDatumLength // 8): <NEW_LINE> <INDENT> outputStream.write_byte(self.variableData[x]) <NEW_LINE> <DEDENT> for x in range(self.datumPaddingSizeInBits() // 8): <NEW_LINE> <INDENT> outputStream.write_byte(0) <NEW_LINE> <DEDENT> <DEDENT> def parse(self, inputStream): <NEW_LINE> <INDENT> self.variableDatumID = inputStream.read_unsigned_int(); <NEW_LINE> self.variableDatumLength = inputStream.read_unsigned_int(); <NEW_LINE> for x in range(self.variableDatumLength // 8): <NEW_LINE> <INDENT> self.variableData.append(inputStream.read_byte()); <NEW_LINE> <DEDENT> for x in range(self.datumPaddingSizeInBits() // 8): <NEW_LINE> <INDENT> inputStream.read_byte()
the variable datum type, the datum length, and the value for that variable datum type. NOT COMPLETE. Section 6.2.93
62599049d7e4931a7ef3d42f
class ApiClient: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.match_key = None <NEW_LINE> <DEDENT> def set_match_key(self): <NEW_LINE> <INDENT> req = requests.get(url = 'https://qkok.co.za/rps/auth.php') <NEW_LINE> response = req.json() <NEW_LINE> self.match_key = response['match_key'] <NEW_LINE> self.round = response['round'] <NEW_LINE> <DEDENT> def get_match_key(self): <NEW_LINE> <INDENT> return self.match_key <NEW_LINE> <DEDENT> def get_round(self): <NEW_LINE> <INDENT> return self.round <NEW_LINE> <DEDENT> def save_round(self, round_number, user_play, com_play, winner): <NEW_LINE> <INDENT> params = { 'match_key': self.match_key, 'round_number': round_number, 'user_play': user_play, 'com_play': com_play, 'winner': winner } <NEW_LINE> req = requests.post(url = 'https://qkok.co.za/rps/insert_round.php', data = params) <NEW_LINE> response = req.json() <NEW_LINE> return response['return_status']
Handles all API-calls to insert/select data
6259904973bcbd0ca4bcb646
class MetadataLogger(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__metadata = defaultdict(list) <NEW_LINE> <DEDENT> def log_metadata(self, label, value): <NEW_LINE> <INDENT> self.__metadata[label].append(value) <NEW_LINE> <DEDENT> def get_metadata(self, label, only_first=False): <NEW_LINE> <INDENT> if not only_first: <NEW_LINE> <INDENT> return self.__metadata[label] if label in self.__metadata else [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.__metadata[label][0] if self.__metadata[label] else None
This class provides a simple interface for assigning additional metadata to any object in our data model. Examples: storing ANNOVAR columns like depth, base count, dbSNP id, quality information for variants, additional prediction information for peptides etc. This functionality is not used from core methods of FRED2. The saved values are accessed via :meth:`~Fred2.Core.MetadataLogger.log_metadata` and :meth:`~Fred2.Core.MetadataLogger.get_metadata`
62599049462c4b4f79dbcdb8
class FirewallPolicyRule(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'rule_type': {'required': True}, 'priority': {'maximum': 65000, 'minimum': 100}, } <NEW_LINE> _attribute_map = { 'rule_type': {'key': 'ruleType', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'priority': {'key': 'priority', 'type': 'int'}, } <NEW_LINE> _subtype_map = { 'rule_type': {'FirewallPolicyFilterRule': 'FirewallPolicyFilterRule', 'FirewallPolicyNatRule': 'FirewallPolicyNatRule'} } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(FirewallPolicyRule, self).__init__(**kwargs) <NEW_LINE> self.rule_type = None <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.priority = kwargs.get('priority', None)
Properties of the rule. You probably want to use the sub-classes and not this class directly. Known sub-classes are: FirewallPolicyFilterRule, FirewallPolicyNatRule. All required parameters must be populated in order to send to Azure. :param rule_type: Required. The type of the rule.Constant filled by server. Possible values include: "FirewallPolicyNatRule", "FirewallPolicyFilterRule". :type rule_type: str or ~azure.mgmt.network.v2019_06_01.models.FirewallPolicyRuleType :param name: Name of the Rule. :type name: str :param priority: Priority of the Firewall Policy Rule resource. :type priority: int
62599049435de62698e9d1bf
class PublicTagApiTests(APITestCase): <NEW_LINE> <INDENT> def test_login_required(self): <NEW_LINE> <INDENT> res = self.client.get(TAGS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
Test the publicly available Tag api
6259904982261d6c527308a2
class IEditFinishedEvent(IObjectEvent): <NEW_LINE> <INDENT> pass
Base event signalling that an edit operation has completed
62599049a79ad1619776b439
class FineTuneSGD(Optimizer): <NEW_LINE> <INDENT> def __init__(self, exception_vars, multiplier=0.1, lr=0.01, momentum=0., decay=0., nesterov=False, **kwargs): <NEW_LINE> <INDENT> super(FineTuneSGD, self).__init__(**kwargs) <NEW_LINE> with K.name_scope(self.__class__.__name__): <NEW_LINE> <INDENT> self.iterations = K.variable(0, dtype='int64', name='iterations') <NEW_LINE> self.lr = K.variable(lr, name='lr') <NEW_LINE> self.momentum = K.variable(momentum, name='momentum') <NEW_LINE> self.decay = K.variable(decay, name='decay') <NEW_LINE> <DEDENT> self.initial_decay = decay <NEW_LINE> self.nesterov = nesterov <NEW_LINE> self.exception_vars = exception_vars <NEW_LINE> self.multiplier = multiplier <NEW_LINE> <DEDENT> @interfaces.legacy_get_updates_support <NEW_LINE> def get_updates(self, loss, params): <NEW_LINE> <INDENT> grads = self.get_gradients(loss, params) <NEW_LINE> self.updates = [K.update_add(self.iterations, 1)] <NEW_LINE> lr = self.lr <NEW_LINE> if self.initial_decay > 0: <NEW_LINE> <INDENT> lr = lr * (1. / (1. + self.decay * K.cast(self.iterations, K.dtype(self.decay)))) <NEW_LINE> <DEDENT> shapes = [K.int_shape(p) for p in params] <NEW_LINE> moments = [K.zeros(shape) for shape in shapes] <NEW_LINE> self.weights = [self.iterations] + moments <NEW_LINE> for p, g, m in zip(params, grads, moments): <NEW_LINE> <INDENT> if p not in self.exception_vars: <NEW_LINE> <INDENT> multiplied_lr = lr * self.multiplier <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> multiplied_lr = lr <NEW_LINE> <DEDENT> v = self.momentum * m - multiplied_lr * g <NEW_LINE> self.updates.append(K.update(m, v)) <NEW_LINE> if self.nesterov: <NEW_LINE> <INDENT> new_p = p + self.momentum * v - multiplied_lr * g <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_p = p + v <NEW_LINE> <DEDENT> if getattr(p, 'constraint', None) is not None: <NEW_LINE> <INDENT> new_p = p.constraint(new_p) <NEW_LINE> <DEDENT> self.updates.append(K.update(p, new_p)) <NEW_LINE> <DEDENT> return self.updates <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'lr': float(K.get_value(self.lr)), 'momentum': float(K.get_value(self.momentum)), 'decay': float(K.get_value(self.decay)), 'nesterov': self.nesterov} <NEW_LINE> base_config = super(SGD, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items()))
Stochastic gradient descent optimizer. Includes support for momentum, learning rate decay, and Nesterov momentum. # Arguments lr: float >= 0. Learning rate. momentum: float >= 0. Parameter updates momentum. decay: float >= 0. Learning rate decay over each update. nesterov: boolean. Whether to apply Nesterov momentum.
6259904926238365f5fadf16
class qf_mac_gen_yibao_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRUCT, 'success', (EncryptorRet, EncryptorRet.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = EncryptorRet() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('qf_mac_gen_yibao_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62599049b57a9660fecd2e36
class BashCompletion(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/scop/bash-completion" <NEW_LINE> url = "https://github.com/scop/bash-completion/archive/2.3.tar.gz" <NEW_LINE> version('2.7', 'f72c9e2e877d188c3159956a3496a450e7279b76') <NEW_LINE> version('2.3', '67e50f5f3c804350b43f2b664c33dde811d24292') <NEW_LINE> version('develop', git='https://github.com/scop/bash-completion.git') <NEW_LINE> depends_on('automake', type='build') <NEW_LINE> depends_on('autoconf', type='build') <NEW_LINE> depends_on('libtool', type='build') <NEW_LINE> depends_on('[email protected]:', type='run') <NEW_LINE> @run_before('install') <NEW_LINE> def create_install_directory(self): <NEW_LINE> <INDENT> mkdirp(join_path(self.prefix.share, 'bash-completion', 'completions')) <NEW_LINE> <DEDENT> @run_after('install') <NEW_LINE> def show_message_to_user(self): <NEW_LINE> <INDENT> prefix = self.prefix <NEW_LINE> print('=====================================================') <NEW_LINE> print('Bash completion has been installed. To use it, please') <NEW_LINE> print('include the following lines in your ~/.bash_profile :') <NEW_LINE> print('') <NEW_LINE> print('# Use bash-completion, if available') <NEW_LINE> print('[[ $PS1 && -f %s/share/bash-completion/bash_completion ]] && \ ' % prefix) <NEW_LINE> print(' . %s/share/bash-completion/bash_completion' % prefix) <NEW_LINE> print('') <NEW_LINE> print('=====================================================')
Programmable completion functions for bash.
6259904996565a6dacd2d966
class HandDatset(Dataset): <NEW_LINE> <INDENT> def __init__(self, hand_root, transforms, train_set=True): <NEW_LINE> <INDENT> self.root = os.path.join(hand_root, 'Dataset') <NEW_LINE> if train_set: <NEW_LINE> <INDENT> txt_list = os.path.join(self.root, "train.csv") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> txt_list = os.path.join(self.root, "test.csv") <NEW_LINE> <DEDENT> self.list = txt_list <NEW_LINE> self.df = pd.read_csv(self.list) <NEW_LINE> try: <NEW_LINE> <INDENT> json_file = open('./hand_classes.json', 'r') <NEW_LINE> self.class_dict = json.load(json_file) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> exit(-1) <NEW_LINE> <DEDENT> self.transforms = transforms <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.df) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> img_path = self.df.path.iloc[idx] <NEW_LINE> img_path = os.path.join('./', img_path) <NEW_LINE> image = Image.open(img_path) <NEW_LINE> if image.format != "JPEG": <NEW_LINE> <INDENT> raise ValueError("Image format not JPEG") <NEW_LINE> <DEDENT> boxes = [] <NEW_LINE> labels = [] <NEW_LINE> iscrowd = [] <NEW_LINE> xmin = float(self.df.tlx.iloc[idx]) <NEW_LINE> xmax = float(self.df.brx.iloc[idx]) <NEW_LINE> ymin = float(self.df.tly.iloc[idx]) <NEW_LINE> ymax = float(self.df.bry.iloc[idx]) <NEW_LINE> boxes.append([xmin, ymin, xmax, ymax]) <NEW_LINE> gest = img_path.split('/')[3] <NEW_LINE> labels.append(self.class_dict[gest]) <NEW_LINE> iscrowd.append(int(0)) <NEW_LINE> boxes = torch.as_tensor(boxes, dtype=torch.float32) <NEW_LINE> labels = torch.as_tensor(labels, dtype=torch.int64) <NEW_LINE> iscrowd = torch.as_tensor(iscrowd, dtype=torch.int64) <NEW_LINE> image_id = torch.tensor([idx]) <NEW_LINE> area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0]) <NEW_LINE> target = {} <NEW_LINE> target["boxes"] = boxes <NEW_LINE> target["labels"] = labels <NEW_LINE> target["image_id"] = image_id <NEW_LINE> target["area"] = area <NEW_LINE> target["iscrowd"] = iscrowd <NEW_LINE> if self.transforms is not None: <NEW_LINE> <INDENT> image, target = self.transforms(image, target) <NEW_LINE> <DEDENT> return image, target <NEW_LINE> <DEDENT> def get_height_and_width(self, idx): <NEW_LINE> <INDENT> data_height = int(480) <NEW_LINE> data_width = int(640) <NEW_LINE> return data_height, data_width <NEW_LINE> <DEDENT> def parse_xml_to_dict(self, xml): <NEW_LINE> <INDENT> if len(xml) == 0: <NEW_LINE> <INDENT> return {xml.tag: xml.text} <NEW_LINE> <DEDENT> result = {} <NEW_LINE> for child in xml: <NEW_LINE> <INDENT> child_result = self.parse_xml_to_dict(child) <NEW_LINE> if child.tag != 'object': <NEW_LINE> <INDENT> result[child.tag] = child_result[child.tag] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if child.tag not in result: <NEW_LINE> <INDENT> result[child.tag] = [] <NEW_LINE> <DEDENT> result[child.tag].append(child_result[child.tag]) <NEW_LINE> <DEDENT> <DEDENT> return {xml.tag: result}
读取解析SCUT-Ego-Gesture Datasethttp://www.hcii-lab.net/data/scutegogesture/数据集
62599049a8ecb033258725cc
@StatusCodes.EPIPE <NEW_LINE> @StatusCodes.ESHUTDOWN <NEW_LINE> class BrokenPipeError(ConnectionError, _BrokenPipeError): <NEW_LINE> <INDENT> pass
Broken pipe.
62599049004d5f362081f9c4
class Device (_Ancestor_Essence) : <NEW_LINE> <INDENT> is_partial = True <NEW_LINE> class _Attributes (_Ancestor_Essence._Attributes) : <NEW_LINE> <INDENT> _Ancestor = _Ancestor_Essence._Attributes <NEW_LINE> class left (_Ancestor.left) : <NEW_LINE> <INDENT> role_type = CNDB.OMP.Device_Type <NEW_LINE> role_name = "type" <NEW_LINE> ui_allow_new = False <NEW_LINE> <DEDENT> class name (A_String) : <NEW_LINE> <INDENT> kind = Attr.Primary_Optional <NEW_LINE> max_length = 40 <NEW_LINE> ignore_case = True <NEW_LINE> completer = Attr.Completer_Spec (2, Attr.Selector.primary) <NEW_LINE> <DEDENT> class desc (A_Text) : <NEW_LINE> <INDENT> kind = Attr.Optional <NEW_LINE> ui_name = "Description"
Model a device used by a CNDB node.
6259904971ff763f4b5e8b5f
class ReplaceUuid(InvariantAwareCommand): <NEW_LINE> <INDENT> path = None <NEW_LINE> _path = None <NEW_LINE> class PathArg(PathInvariant): <NEW_LINE> <INDENT> _help = "path of file to replace UUIDs in" <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> out = self._out <NEW_LINE> options = self.options <NEW_LINE> verbose = self._verbose <NEW_LINE> path = self._path <NEW_LINE> import re <NEW_LINE> from uuid import uuid4 <NEW_LINE> import linecache <NEW_LINE> regex = re.compile(r'[0-9a-f]{8}(?:-[0-9a-f]{4}){4}[0-9a-f]{8}', re.I) <NEW_LINE> with open(path, 'r') as f: <NEW_LINE> <INDENT> text = f.read() <NEW_LINE> <DEDENT> uuid_map = { src_uuid: str(uuid4()).upper() for src_uuid in regex.findall(text) } <NEW_LINE> for (old_uuid, new_uuid) in uuid_map.items(): <NEW_LINE> <INDENT> out("Replacing %s -> %s." % (old_uuid, new_uuid)) <NEW_LINE> text = text.replace(old_uuid, new_uuid) <NEW_LINE> <DEDENT> if 'vcxproj' in path: <NEW_LINE> <INDENT> newline = '\r\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newline = '\n' <NEW_LINE> <DEDENT> with open(path, 'w') as f: <NEW_LINE> <INDENT> f.write(text)
Replaces UUIDs in a file with new ones.
6259904924f1403a926862aa
class AddItem(Model): <NEW_LINE> <INDENT> def __init__(self, category: str=None, food_item_name: str=None, quantity: int=None, price: int=None, eta: int=None, description: str=None): <NEW_LINE> <INDENT> self.swagger_types = { 'category': str, 'food_item_name': str, 'quantity': int, 'price': int, 'eta': int, 'description': str } <NEW_LINE> self.attribute_map = { 'category': 'category', 'food_item_name': 'food_item_name', 'quantity': 'quantity', 'price': 'price', 'eta': 'ETA', 'description': 'description' } <NEW_LINE> self._category = category <NEW_LINE> self._food_item_name = food_item_name <NEW_LINE> self._quantity = quantity <NEW_LINE> self._price = price <NEW_LINE> self._eta = eta <NEW_LINE> self._description = description <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'AddItem': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def category(self) -> str: <NEW_LINE> <INDENT> return self._category <NEW_LINE> <DEDENT> @category.setter <NEW_LINE> def category(self, category: str): <NEW_LINE> <INDENT> self._category = category <NEW_LINE> <DEDENT> @property <NEW_LINE> def food_item_name(self) -> str: <NEW_LINE> <INDENT> return self._food_item_name <NEW_LINE> <DEDENT> @food_item_name.setter <NEW_LINE> def food_item_name(self, food_item_name: str): <NEW_LINE> <INDENT> self._food_item_name = food_item_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def quantity(self) -> int: <NEW_LINE> <INDENT> return self._quantity <NEW_LINE> <DEDENT> @quantity.setter <NEW_LINE> def quantity(self, quantity: int): <NEW_LINE> <INDENT> self._quantity = quantity <NEW_LINE> <DEDENT> @property <NEW_LINE> def price(self) -> int: <NEW_LINE> <INDENT> return self._price <NEW_LINE> <DEDENT> @price.setter <NEW_LINE> def price(self, price: int): <NEW_LINE> <INDENT> self._price = price <NEW_LINE> <DEDENT> @property <NEW_LINE> def eta(self) -> int: <NEW_LINE> <INDENT> return self._eta <NEW_LINE> <DEDENT> @eta.setter <NEW_LINE> def eta(self, eta: int): <NEW_LINE> <INDENT> self._eta = eta <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self) -> str: <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @description.setter <NEW_LINE> def description(self, description: str): <NEW_LINE> <INDENT> self._description = description
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990496e29344779b019fc
class LogisticRegression: <NEW_LINE> <INDENT> def __init__(self, input, n_in, n_out): <NEW_LINE> <INDENT> self.W = theano.shared(np.zeros((n_in, n_out), dtype = theano.config.floatX), name = 'W', borrow = True) <NEW_LINE> self.b = theano.shared(np.zeros((n_out,), dtype = theano.config.floatX), name = 'b', borrow = True) <NEW_LINE> self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b) <NEW_LINE> self.y_pred = T.argmax(self.p_y_given_x, axis = 1) <NEW_LINE> self.params = [self.W, self.b] <NEW_LINE> self.input = input <NEW_LINE> <DEDENT> def negative_log_likelihood(self, y): <NEW_LINE> <INDENT> return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y]) <NEW_LINE> <DEDENT> def errors(self, y): <NEW_LINE> <INDENT> if y.ndim != self.y_pred.ndim: <NEW_LINE> <INDENT> raise TypeError("Wrong dimension:", ('y:',y.type,"y_pred:", y_pred.type)) <NEW_LINE> <DEDENT> if y.dtype.startswith('int'): <NEW_LINE> <INDENT> return T.mean(T.neq(self.y_pred, y)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError()
multi-Class Logistic Regression
6259904950485f2cf55dc344
class ConvTranspose1D(nn.Module): <NEW_LINE> <INDENT> def __init__(self, size_in, length, size, stride=1, padding=None): <NEW_LINE> <INDENT> super(ConvTranspose1D, self).__init__() <NEW_LINE> util.autoassign(locals()) <NEW_LINE> padding = padding if padding is not None else self.length <NEW_LINE> self.ConvT = nn.ConvTranspose1d(self.size_in, self.size, self.length, stride=self.stride, padding=padding, bias=False) <NEW_LINE> self.ConvT.weight.data = init.glorot_uniform((self.size, self.size_in, self.length, 1)).squeeze() <NEW_LINE> <DEDENT> def forward(self, signal): <NEW_LINE> <INDENT> out = self.ConvT(signal.permute(0, 2, 1)) <NEW_LINE> return out.permute(0, 2, 1)
A one-dimensional convolutional layer.
62599049097d151d1a2c2428
class Actor(models.Model): <NEW_LINE> <INDENT> name = models.CharField('Имя', max_length=100) <NEW_LINE> age = models.PositiveSmallIntegerField('Возраст', default=0) <NEW_LINE> description = models.TextField('Описание') <NEW_LINE> image = models.ImageField('Изображение', upload_to='actors/') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('person_view_url', kwargs={'slug': self.name}) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Актер/Режиссер' <NEW_LINE> verbose_name_plural = 'Актеры/Режиссеры'
Актеры и режиссеры
62599049009cb60464d028f0
class Message(models.Model): <NEW_LINE> <INDENT> subject = models.CharField(_("Subject"), max_length=120) <NEW_LINE> body = models.TextField(_("Body")) <NEW_LINE> sender = models.ForeignKey(User, related_name='sent_messages', null=True, verbose_name=_("Sender")) <NEW_LINE> recipient = models.ForeignKey(User, related_name='received_messages', null=True, verbose_name=_("Recipient")) <NEW_LINE> parent_msg = models.ForeignKey('self', related_name='next_messages', null=True, blank=True, verbose_name=_("Parent message")) <NEW_LINE> conversation = models.ForeignKey('self', null=True, blank=True, verbose_name=_("Conversation")) <NEW_LINE> sent_at = models.DateTimeField(_("sent at"), null=True, blank=True) <NEW_LINE> read_at = models.DateTimeField(_("read at"), null=True, blank=True) <NEW_LINE> replied_at = models.DateTimeField(_("replied at"), null=True, blank=True) <NEW_LINE> sender_deleted_at = models.DateTimeField(_("Sender deleted at"), null=True, blank=True) <NEW_LINE> recipient_deleted_at = models.DateTimeField(_("Recipient deleted at"), null=True, blank=True) <NEW_LINE> objects = MessageManager() <NEW_LINE> def new(self): <NEW_LINE> <INDENT> if self.read_at is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def replied(self): <NEW_LINE> <INDENT> if self.replied_at is not None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.subject <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> conversation_id = self.conversation_id or self.pk <NEW_LINE> return ('messages_detail', [str(conversation_id)]) <NEW_LINE> <DEDENT> get_absolute_url = models.permalink(get_absolute_url) <NEW_LINE> def save(self, **kwargs): <NEW_LINE> <INDENT> if not self.id: <NEW_LINE> <INDENT> self.sent_at = datetime.datetime.now() <NEW_LINE> <DEDENT> super(Message, self).save(**kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['-sent_at'] <NEW_LINE> verbose_name = _("Message") <NEW_LINE> verbose_name_plural = _("Messages")
A private message from user to user
625990491f5feb6acb163fb0
class BrokenTestCaseWarning(Warning): <NEW_LINE> <INDENT> pass
emitted as a warning when an exception occurs in one of setUp, tearDown, setUpClass, or tearDownClass
62599049d53ae8145f91981c
class LotteryGame(GameBase): <NEW_LINE> <INDENT> _BOOKIE_PERCENT = 0.10 <NEW_LINE> _MAX_BET_PERCENT = 0.25 <NEW_LINE> def __init__(self, bookie): <NEW_LINE> <INDENT> self._bookie = bookie <NEW_LINE> self._winning_item = inventory_lib.Create('CoinPurse', None, None, {}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return 'lottery' <NEW_LINE> <DEDENT> def CapBet(self, user, amount, resolver): <NEW_LINE> <INDENT> pool = self._bookie.LookupBets(self.name, resolver=resolver) <NEW_LINE> if user in pool: <NEW_LINE> <INDENT> del pool[user] <NEW_LINE> <DEDENT> jackpot, item = self.ComputeCurrentJackpot(pool) <NEW_LINE> pool_value = jackpot + item.value <NEW_LINE> max_bet = int((self._MAX_BET_PERCENT * pool_value) / (1 - self._MAX_BET_PERCENT * (1 - self._BOOKIE_PERCENT))) <NEW_LINE> return min(amount, max_bet) <NEW_LINE> <DEDENT> def TakeBet(self, bet): <NEW_LINE> <INDENT> if re.match(r'(the )?(lottery|lotto|raffle|jackpot)', bet.target): <NEW_LINE> <INDENT> bet.target = bet.resolver <NEW_LINE> bet.amount = self.CapBet(bet.user, bet.amount, bet.resolver) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def FormatBet(self, bet): <NEW_LINE> <INDENT> return inflect_lib.Plural(bet.amount, '%s ticket' % self.name) <NEW_LINE> <DEDENT> def SettleBets(self, pool, msg_fn, *args, **kwargs): <NEW_LINE> <INDENT> pool_value = sum(user_bets[0].amount for user_bets in pool.values()) <NEW_LINE> if not pool_value: <NEW_LINE> <INDENT> return ([], {}) <NEW_LINE> <DEDENT> msg_fn(None, 'All 7-11s are closed! %s bet %s on the lottery' % (inflect_lib.Plural(len(pool), 'pleb'), util_lib.FormatHypecoins(pool_value))) <NEW_LINE> coins, item = self.ComputeCurrentJackpot(pool) <NEW_LINE> winning_number = random.randint(1, pool_value) <NEW_LINE> ticket_number = 0 <NEW_LINE> for user, user_bets in pool.items(): <NEW_LINE> <INDENT> num_tickets = user_bets[0].amount <NEW_LINE> ticket_number += num_tickets <NEW_LINE> if ticket_number >= winning_number: <NEW_LINE> <INDENT> msg_fn(user, [ 'You\'ve won %s in the lottery!' % util_lib.FormatHypecoins(coins), ('We\'ve always been such close friends. Can I borrow some money ' 'for rent?') ]) <NEW_LINE> msg_fn(None, '%s won %s and a(n) %s in the lottery!' % ( user, util_lib.FormatHypecoins(coins), item.human_name)) <NEW_LINE> return ([(user, coins), (user, item)], {}) <NEW_LINE> <DEDENT> <DEDENT> return ([], {}) <NEW_LINE> <DEDENT> def ComputeCurrentJackpot(self, pool): <NEW_LINE> <INDENT> pool_value = 0 <NEW_LINE> for user_bets in pool.values(): <NEW_LINE> <INDENT> pool_value += sum(bet.amount for bet in user_bets) <NEW_LINE> <DEDENT> return (int(math.floor(pool_value * (1 - self._BOOKIE_PERCENT))), self._winning_item)
Betting on the lottery! The more you bet, the greater your chance!
6259904923849d37ff852478
class PastMeetings(Client): <NEW_LINE> <INDENT> path = "/past_meetings/{meetingId}" <NEW_LINE> @validation <NEW_LINE> def instances(self, meetingId: int): <NEW_LINE> <INDENT> query = get_arguments(locals()) <NEW_LINE> path = self.path.format(**query) + "/instances" <NEW_LINE> return self.request(method="get", path=path) <NEW_LINE> <DEDENT> @validation <NEW_LINE> def participants( self, meetingId: int, page_size: int = 0, next_page_token: str = None ): <NEW_LINE> <INDENT> query = get_arguments(locals()) <NEW_LINE> path = self.path.format(**query) + "/participants" <NEW_LINE> del query["meetingId"] <NEW_LINE> return self.request(method="get", path=path, query=query) <NEW_LINE> <DEDENT> @validation <NEW_LINE> def details(self, meetingId: int): <NEW_LINE> <INDENT> query = get_arguments(locals()) <NEW_LINE> path = self.path.format(**query) <NEW_LINE> return self.request(method="get", path=path)
@namespace Zoom.Meetings.PastMeetings @class PastMeetings
6259904926238365f5fadf18
class GeneralizationMixin(object): <NEW_LINE> <INDENT> def __init__(self, general=None, generalizationSet=None, isSubstitutable=None, specific=None, **kwargs): <NEW_LINE> <INDENT> super(GeneralizationMixin, self).__init__(**kwargs)
User defined mixin class for Generalization.
625990498a43f66fc4bf3551
class AIMod(CoreService): <NEW_LINE> <INDENT> _name = "AdsbFirewall" <NEW_LINE> _group = "Security" <NEW_LINE> _depends = () <NEW_LINE> _dirs = () <NEW_LINE> _configs = ('aimod.cfg', 'aimod.sh') <NEW_LINE> _startindex = 50 <NEW_LINE> _startup = ('sh aimod.sh',) <NEW_LINE> _shutdown = ('pkill python',) <NEW_LINE> @classmethod <NEW_LINE> def generateconfig(cls, node, filename, services): <NEW_LINE> <INDENT> cfg = "" <NEW_LINE> if filename == "aimod.sh": <NEW_LINE> <INDENT> cfg += "#!/bin/sh\n" <NEW_LINE> cfg += "# auto-generated by AIMod (aimod.py)\n" <NEW_LINE> cfg += "sleep 30\n" <NEW_LINE> cfg += "python -m atn.surveillance.adsb.security.wmlat_filter\n" <NEW_LINE> <DEDENT> elif filename == "aimod.cfg": <NEW_LINE> <INDENT> cfg += "[General]\n" <NEW_LINE> cfg += "id = %s\n" % node.name <NEW_LINE> cfg += "\n" <NEW_LINE> cfg += "; DB where unprocessed messsages are stored\n" <NEW_LINE> cfg += "dbname = atn_sim\n" <NEW_LINE> cfg += "dbuser = atn_sim\n" <NEW_LINE> cfg += "dbpass = atn_sim\n" <NEW_LINE> cfg += "dbhost = 172.17.255.254\n" <NEW_LINE> cfg += "\n" <NEW_LINE> cfg += "; Destination(s) of reliable messages\n" <NEW_LINE> cfg += "destinations = Dump1090\n" <NEW_LINE> cfg += "\n" <NEW_LINE> cfg += "[Dump1090]\n" <NEW_LINE> cfg += "type = dump1090\n" <NEW_LINE> cfg += "server = 127.0.0.1\n" <NEW_LINE> cfg += "port = 30001\n" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cfg += "# %s not defined\n" % filename <NEW_LINE> <DEDENT> return cfg <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def subnetentry(x): <NEW_LINE> <INDENT> if x.find(":") >= 0: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> net = IPv4Prefix(x) <NEW_LINE> return 'echo " network %s"' % (net)
This is a sample user-defined service.
6259904945492302aabfd88e
class SimpleClassifier(_BaseSimpleEstimator, ClassifierMixin): <NEW_LINE> <INDENT> def __init__(self, refit=True, random_state=None, verbose=1, type_hints=None, shuffle=True): <NEW_LINE> <INDENT> self.verbose = verbose <NEW_LINE> self.random_state = random_state <NEW_LINE> self.refit = refit <NEW_LINE> self.type_hints = type_hints <NEW_LINE> self.shuffle = shuffle <NEW_LINE> <DEDENT> def _get_estimators(self): <NEW_LINE> <INDENT> return get_fast_classifiers(n_classes=len(self.classes_)) <NEW_LINE> <DEDENT> def _preprocess_target(self, y): <NEW_LINE> <INDENT> target_type = type_of_target(y) <NEW_LINE> le = LabelEncoder().fit(y) <NEW_LINE> y = pd.Series(y) <NEW_LINE> self.classes_ = le.classes_ <NEW_LINE> if target_type == "binary": <NEW_LINE> <INDENT> minority_class = y.value_counts().index[1] <NEW_LINE> my_average_precision_scorer = make_scorer( average_precision_score, pos_label=minority_class, needs_threshold=True) <NEW_LINE> scoring = {'accuracy': 'accuracy', 'average_precision': my_average_precision_scorer, 'roc_auc': 'roc_auc', 'recall_macro': 'recall_macro', 'f1_macro': 'f1_macro' } <NEW_LINE> <DEDENT> elif target_type == "multiclass": <NEW_LINE> <INDENT> scoring = ['accuracy', 'recall_macro', 'precision_macro', 'f1_macro'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unknown target type: {}".format(target_type)) <NEW_LINE> <DEDENT> return y, scoring <NEW_LINE> <DEDENT> def fit(self, X, y=None, *, target_col=None): <NEW_LINE> <INDENT> self._rank_scoring = "recall_macro" <NEW_LINE> return self._fit(X=X, y=y, target_col=target_col)
Automagic anytime classifier. Parameters ---------- refit : boolean, True Whether to refit the model on the full dataset. random_state : random state, int or None (default=None) Random state or seed. verbose : integer, default=1 Verbosity (higher is more output). type_hints : dict or None If dict, provide type information for columns. Keys are column names, values are types as provided by detect_types. shuffle : boolean, default=True Whether to shuffle the training set in cross-validation. Attributes ---------- est_ : sklearn estimator Best estimator found.
62599049a8ecb033258725ce
class BeeWander: <NEW_LINE> <INDENT> def __init__(self, bee_name): <NEW_LINE> <INDENT> self.__bee = bee.Bee(name = bee_name) <NEW_LINE> <DEDENT> def go_straight(self): <NEW_LINE> <INDENT> self.__bee.set_vel(0.5,0.5) <NEW_LINE> <DEDENT> def turn_left(self): <NEW_LINE> <INDENT> self.__bee.set_vel(-0.1,0.1) <NEW_LINE> <DEDENT> def turn_right(self): <NEW_LINE> <INDENT> self.__bee.set_vel(0.1,-0.1) <NEW_LINE> <DEDENT> def wander(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.go_straight() <NEW_LINE> while ((self.__bee.get_range(bee.OBJECT_FRONT) < 3) and (self.__bee.get_range(bee.OBJECT_RIGHT_FRONT) < 4)): <NEW_LINE> <INDENT> self.turn_left() <NEW_LINE> <DEDENT> while ((self.__bee.get_range(bee.OBJECT_FRONT) < 3) and (self.__bee.get_range(bee.OBJECT_LEFT_FRONT) < 4)): <NEW_LINE> <INDENT> self.turn_right()
A demo bee controller. An simple example of using the Bee-API.
625990493617ad0b5ee074f9
class TrackContainmentMode: <NEW_LINE> <INDENT> def __init__(self, orb): <NEW_LINE> <INDENT> self.orb = orb <NEW_LINE> self.b_in_menu = True <NEW_LINE> <DEDENT> def on_menu_focus(self): <NEW_LINE> <INDENT> self.b_in_menu = True <NEW_LINE> <DEDENT> def on_o_game_focus(self): <NEW_LINE> <INDENT> self.b_in_menu = False <NEW_LINE> <DEDENT> def is_focus_on_menu(self): <NEW_LINE> <INDENT> return self.b_in_menu <NEW_LINE> <DEDENT> def is_focus_on_game(self): <NEW_LINE> <INDENT> return not self.b_in_menu
Tracks whether we are in the menu or not.
62599049d7e4931a7ef3d433
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.set_filename('image21.xlsx') <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> worksheet.write(2000, 0, "Here") <NEW_LINE> worksheet.insert_image(2000, 1, self.image_dir + 'logo.png') <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual()
Test file created by XlsxWriter against a file created by Excel.
62599049cad5886f8bdc5a5d
class FlexResourceSchedulingGoalValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> FLEXRS_UNSPECIFIED = 0 <NEW_LINE> FLEXRS_SPEED_OPTIMIZED = 1 <NEW_LINE> FLEXRS_COST_OPTIMIZED = 2
Which Flexible Resource Scheduling mode to run in. Values: FLEXRS_UNSPECIFIED: Run in the default mode. FLEXRS_SPEED_OPTIMIZED: Optimize for lower execution time. FLEXRS_COST_OPTIMIZED: Optimize for lower cost.
625990497cff6e4e811b6df7