code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
@view_auth_classes() <NEW_LINE> class CourseGradingPolicy(GradeViewMixin, ListAPIView): <NEW_LINE> <INDENT> allow_empty = False <NEW_LINE> def get(self, request, course_id, **kwargs): <NEW_LINE> <INDENT> course = self._get_course(request, course_id, request.user, 'staff') <NEW_LINE> if isinstance(course, Response): <NEW_LINE> <INDENT> return course <NEW_LINE> <DEDENT> return Response(GradingPolicySerializer(course.raw_grader, many=True).data)
**Use Case** Get the course grading policy. **Example requests**: GET /api/grades/v0/policy/{course_id}/ **Response Values** * assignment_type: The type of the assignment, as configured by course staff. For example, course staff might make the assignment types Homework, Quiz, and Exam. * count: The number of assignments of the type. * dropped: Number of assignments of the type that are dropped. * weight: The weight, or effect, of the assignment type on the learner's final grade.
62599076379a373c97d9a982
class GetMaintenanceScheduleInfo(Base): <NEW_LINE> <INDENT> def __init__(self, ): <NEW_LINE> <INDENT> super(GetMaintenanceScheduleInfo, self).__init__( module='admin', cls='GetMaintenanceScheduleInfo', fn='get', args={ })
class: veracode.SDK.admin.GetMaintenanceScheduleInfo params: returns: A python object that represents the returned API data.
62599076167d2b6e312b8241
class Exploit(BaseExploit): <NEW_LINE> <INDENT> target_protocol = Protocol.CUSTOM <NEW_LINE> def run(self): <NEW_LINE> <INDENT> raise NotImplementedError("You have to define your own 'run' method.") <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> raise NotImplementedError("You have to define your own 'check' method.") <NEW_LINE> <DEDENT> def run_threads(self, threads_number, target_function, *args, **kwargs): <NEW_LINE> <INDENT> threads = [] <NEW_LINE> threads_running = threading.Event() <NEW_LINE> threads_running.set() <NEW_LINE> for thread_id in range(int(threads_number)): <NEW_LINE> <INDENT> thread = threading.Thread( target=target_function, args=chain((threads_running,), args), kwargs=kwargs, name="thread-{}".format(thread_id), ) <NEW_LINE> threads.append(thread) <NEW_LINE> thread.start() <NEW_LINE> <DEDENT> start = time.time() <NEW_LINE> try: <NEW_LINE> <INDENT> while thread.isAlive(): <NEW_LINE> <INDENT> thread.join(1) <NEW_LINE> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> threads_running.clear() <NEW_LINE> <DEDENT> for thread in threads: <NEW_LINE> <INDENT> thread.join() <NEW_LINE> <DEDENT> print_status("Elapsed time: {0:.4f} seconds".format(round(time.time() - start, 2)))
Base class for exploits
6259907621bff66bcd7245c9
class ofdpaDropStatusEntry_t(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, ofdpaDropStatusEntry_t, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, ofdpaDropStatusEntry_t, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_setmethods__["lmepId"] = _OFDPA_python.ofdpaDropStatusEntry_t_lmepId_set <NEW_LINE> __swig_getmethods__["lmepId"] = _OFDPA_python.ofdpaDropStatusEntry_t_lmepId_get <NEW_LINE> if _newclass:lmepId = _swig_property(_OFDPA_python.ofdpaDropStatusEntry_t_lmepId_get, _OFDPA_python.ofdpaDropStatusEntry_t_lmepId_set) <NEW_LINE> __swig_setmethods__["type"] = _OFDPA_python.ofdpaDropStatusEntry_t_type_set <NEW_LINE> __swig_getmethods__["type"] = _OFDPA_python.ofdpaDropStatusEntry_t_type_get <NEW_LINE> if _newclass:type = _swig_property(_OFDPA_python.ofdpaDropStatusEntry_t_type_get, _OFDPA_python.ofdpaDropStatusEntry_t_type_set) <NEW_LINE> __swig_setmethods__["dropAction"] = _OFDPA_python.ofdpaDropStatusEntry_t_dropAction_set <NEW_LINE> __swig_getmethods__["dropAction"] = _OFDPA_python.ofdpaDropStatusEntry_t_dropAction_get <NEW_LINE> if _newclass:dropAction = _swig_property(_OFDPA_python.ofdpaDropStatusEntry_t_dropAction_get, _OFDPA_python.ofdpaDropStatusEntry_t_dropAction_set) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> this = _OFDPA_python.new_ofdpaDropStatusEntry_t() <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _OFDPA_python.delete_ofdpaDropStatusEntry_t <NEW_LINE> __del__ = lambda self : None;
Proxy of C ofdpaDropStatusEntry_s struct
625990764e4d562566373d65
class GridMDP(MDP): <NEW_LINE> <INDENT> def __init__(self, grid, terminals, init=(0, 0), gamma=.9): <NEW_LINE> <INDENT> grid.reverse() <NEW_LINE> MDP.__init__(self, init, actlist=orientations, terminals=terminals, gamma=gamma) <NEW_LINE> update(self, grid=grid, rows=len(grid), cols=len(grid[0])) <NEW_LINE> for x in range(self.cols): <NEW_LINE> <INDENT> for y in range(self.rows): <NEW_LINE> <INDENT> self.reward[x, y] = grid[y][x] <NEW_LINE> if grid[y][x] is not None: <NEW_LINE> <INDENT> self.states.add((x, y)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def T(self, state, action): <NEW_LINE> <INDENT> jumps = [(2,0), (-2,0), (0,2), (0,-2)] <NEW_LINE> if action == None: <NEW_LINE> <INDENT> return [(0.0, state)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if action in jumps: <NEW_LINE> <INDENT> return [(0.5, self.go(state, action)), (0.5, state)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [(0.8, self.go(state, action)), (0.1, self.go(state, turn_right(action))), (0.1, self.go(state, turn_left(action)))] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def go(self, state, direction): <NEW_LINE> <INDENT> if direction is not None: <NEW_LINE> <INDENT> state1 = vector_add(state, direction) <NEW_LINE> return if_(state1 in self.states, state1, state) <NEW_LINE> <DEDENT> <DEDENT> def to_grid(self, mapping): <NEW_LINE> <INDENT> return list(reversed([[mapping.get((x,y), None) for x in range(self.cols)] for y in range(self.rows)])) <NEW_LINE> <DEDENT> def to_arrows(self, policy): <NEW_LINE> <INDENT> chars = {(1, 0):'>', (0, 1):'^', (-1, 0):'<', (0, -1):'v', None: '.'} <NEW_LINE> return self.to_grid(dict([(s, chars[a]) for (s, a) in policy.items()]))
A two-dimensional grid MDP, as in [Figure 17.1]. All you have to do is specify the grid as a list of lists of rewards; use None for an obstacle (unreachable state). Also, you should specify the terminal states. An action is an (x, y) unit vector; e.g. (1, 0) means move east.
625990765fdd1c0f98e5f8de
class CreateHTMLConfigData(_CreateBasicHTMLData): <NEW_LINE> <INDENT> def __init__(self, current_config): <NEW_LINE> <INDENT> super().__init__("Config", current_config) <NEW_LINE> <DEDENT> def get_sensor_data(self, ip): <NEW_LINE> <INDENT> command_data = sensor_commands.CreateSensorNetworkCommand(ip, self.network_timeout, network_get_commands.system_data) <NEW_LINE> sensor_system = sensor_commands.get_data(command_data).split(",") <NEW_LINE> try: <NEW_LINE> <INDENT> sensor_system[3] = app_useful_functions.convert_minutes_string(sensor_system[3]) <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> app_logger.app_logger.error("Sensor Config Report: " + str(error)) <NEW_LINE> <DEDENT> command_data.command = network_get_commands.sensor_configuration <NEW_LINE> sensor_config = sensor_commands.get_data(command_data).split(",") <NEW_LINE> final_sensor_config = [str(sensor_system[0]), str(sensor_system[1]), str(sensor_system[2]), str(sensor_config[0]), str(sensor_config[1]), str(sensor_config[2]), str(sensor_config[3]), str(sensor_config[4]), str(sensor_config[5])] <NEW_LINE> self.data_queue.put([ip, final_sensor_config])
Create a Configuration HTML Report data object.
625990763d592f4c4edbc80d
class Order(BaseOrderInfo): <NEW_LINE> <INDENT> SUBMITTED = 1 <NEW_LINE> PROCESSED = 2 <NEW_LINE> SHIPPED = 3 <NEW_LINE> CANCELLED = 4 <NEW_LINE> ORDER_STATUSES = ((SUBMITTED, _(u'Submitted')), (PROCESSED, _(u'Processed')), (SHIPPED, _(u'Shipped')), (CANCELLED, _(u'Cancelled')),) <NEW_LINE> date = models.DateTimeField(auto_now_add=True) <NEW_LINE> status = models.IntegerField(choices=ORDER_STATUSES, default=SUBMITTED) <NEW_LINE> ip_address = models.IPAddressField() <NEW_LINE> last_updated = models.DateTimeField(auto_now=True) <NEW_LINE> user = models.ForeignKey(User, null=True) <NEW_LINE> transaction_id = models.CharField(max_length=20) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return _(u'Order #') + str(self.id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total(self): <NEW_LINE> <INDENT> total = decimal.Decimal('0.00') <NEW_LINE> order_items = OrderItem.objects.filter(order=self) <NEW_LINE> for item in order_items: <NEW_LINE> <INDENT> total += item.total <NEW_LINE> <DEDENT> return total <NEW_LINE> <DEDENT> @permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('order_details', (), { 'order_id': self.id })
Класс для заказа
6259907644b2445a339b760e
class XIVDS8KDriver(san.SanDriver): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(XIVDS8KDriver, self).__init__(*args, **kwargs) <NEW_LINE> self.configuration.append_config_values(xiv_ds8k_opts) <NEW_LINE> proxy = importutils.import_class(self.configuration.xiv_ds8k_proxy) <NEW_LINE> self.xiv_ds8k_proxy = proxy( { "xiv_ds8k_user": self.configuration.san_login, "xiv_ds8k_pass": self.configuration.san_password, "xiv_ds8k_address": self.configuration.san_ip, "xiv_ds8k_vol_pool": self.configuration.san_clustername, "xiv_ds8k_connection_type": self.configuration.xiv_ds8k_connection_type }, LOG, exception, driver=self) <NEW_LINE> <DEDENT> def do_setup(self, context): <NEW_LINE> <INDENT> self.xiv_ds8k_proxy.setup(context) <NEW_LINE> <DEDENT> def ensure_export(self, context, volume): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.ensure_export(context, volume) <NEW_LINE> <DEDENT> def create_export(self, context, volume): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.create_export(context, volume) <NEW_LINE> <DEDENT> def create_volume(self, volume): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.create_volume(volume) <NEW_LINE> <DEDENT> def delete_volume(self, volume): <NEW_LINE> <INDENT> self.xiv_ds8k_proxy.delete_volume(volume) <NEW_LINE> <DEDENT> def remove_export(self, context, volume): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.remove_export(context, volume) <NEW_LINE> <DEDENT> def initialize_connection(self, volume, connector): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.initialize_connection(volume, connector) <NEW_LINE> <DEDENT> def terminate_connection(self, volume, connector, **kwargs): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.terminate_connection(volume, connector) <NEW_LINE> <DEDENT> def create_volume_from_snapshot(self, volume, snapshot): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.create_volume_from_snapshot( volume, snapshot) <NEW_LINE> <DEDENT> def create_snapshot(self, snapshot): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.create_snapshot(snapshot) <NEW_LINE> <DEDENT> def delete_snapshot(self, snapshot): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.delete_snapshot(snapshot) <NEW_LINE> <DEDENT> def get_volume_stats(self, refresh=False): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.get_volume_stats(refresh) <NEW_LINE> <DEDENT> def create_cloned_volume(self, tgt_volume, src_volume): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.create_cloned_volume(tgt_volume, src_volume) <NEW_LINE> <DEDENT> def extend_volume(self, volume, new_size): <NEW_LINE> <INDENT> self.xiv_ds8k_proxy.extend_volume(volume, new_size) <NEW_LINE> <DEDENT> def migrate_volume(self, context, volume, host): <NEW_LINE> <INDENT> return self.xiv_ds8k_proxy.migrate_volume(context, volume, host)
Unified IBM XIV and DS8K volume driver.
625990767cff6e4e811b73a0
class CreateNotificationConfigurationResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.AutoScalingNotificationId = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.AutoScalingNotificationId = params.get("AutoScalingNotificationId") <NEW_LINE> self.RequestId = params.get("RequestId")
CreateNotificationConfiguration response structure.
625990761f5feb6acb164556
class BlockQuote(WebElement): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> tagName = "blockquote" <NEW_LINE> properties = WebElement.properties.copy() <NEW_LINE> properties['cite'] = {'action':'attribute'}
Defines a section that is quoted from another source
625990768a43f66fc4bf3af6
class Encode(Transform): <NEW_LINE> <INDENT> function = 'ENCODE' <NEW_LINE> arity = 2 <NEW_LINE> def __init__(self, *expressions, **extra): <NEW_LINE> <INDENT> raise NotSupportedError('This function is not implemented in ' 'the current version.')
Encode(data bytea, format text) :return text:
6259907663b5f9789fe86ac6
class DeckGeoJson(BaseDeckGLViz): <NEW_LINE> <INDENT> viz_type = "deck_geojson" <NEW_LINE> verbose_name = _("Deck.gl - GeoJSON") <NEW_LINE> def query_obj(self) -> QueryObjectDict: <NEW_LINE> <INDENT> query_obj = super().query_obj() <NEW_LINE> query_obj["columns"] += [self.form_data.get("geojson")] <NEW_LINE> query_obj["metrics"] = [] <NEW_LINE> query_obj["groupby"] = [] <NEW_LINE> return query_obj <NEW_LINE> <DEDENT> def get_properties(self, data: Dict[str, Any]) -> Dict[str, Any]: <NEW_LINE> <INDENT> geojson = data[get_column_name(self.form_data["geojson"])] <NEW_LINE> return json.loads(geojson)
deck.gl's GeoJSONLayer
6259907697e22403b383c863
class CTD_ANON_35 (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/user/WORK/NRP/RobotDesigner/dev/BlenderRobotDesigner/robot_designer_plugin/resources/xsd_sdf/imu.xsd', 50, 16) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> _ElementMap.update({ }) <NEW_LINE> _AttributeMap.update({ })
Angular velocity about the Y axis
62599076097d151d1a2c29d5
class IdChecker: <NEW_LINE> <INDENT> name = "flake8_oist_steps" <NEW_LINE> version = "0.0.1" <NEW_LINE> tet_id_mesg = ( "E421 consider using steps.geom.UNKNOWN_TET" " constant instead of -1." ) <NEW_LINE> tri_id_mesg = ( "E422 consider using steps.geom.UNKNOWN_TRI" " constant instead of -1." ) <NEW_LINE> def __init__(self, tree, filename): <NEW_LINE> <INDENT> self._tree = tree <NEW_LINE> self._filename = filename <NEW_LINE> <DEDENT> @property <NEW_LINE> def tree(self): <NEW_LINE> <INDENT> return self._tree <NEW_LINE> <DEDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> return self._filename <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> tree = self.tree <NEW_LINE> if self.filename == "stdin": <NEW_LINE> <INDENT> lines = stdin_utils.stdin_get_value() <NEW_LINE> tree = ast.parse(lines) <NEW_LINE> <DEDENT> visitor = CheckVisitor() <NEW_LINE> visitor.visit(tree) <NEW_LINE> for error in visitor.errors: <NEW_LINE> <INDENT> yield error
Flake8 checker to enforce usage of STEPS simulator
6259907697e22403b383c864
class BeliefUpdateNodeModel(BayesianModel): <NEW_LINE> <INDENT> def __init__(self, nodes_dict): <NEW_LINE> <INDENT> super().__init__(edges=self._get_edges_from_nodes(nodes_dict.values()), variables=list(nodes_dict.keys()), cpds=[node.cpd for node in nodes_dict.values()]) <NEW_LINE> self.nodes_dict = nodes_dict <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def init_from_edges(cls, edges, node_class): <NEW_LINE> <INDENT> nodes = set() <NEW_LINE> g = nx.DiGraph(edges) <NEW_LINE> for label in set(itertools.chain(*edges)): <NEW_LINE> <INDENT> node = node_class(label_id=label, children=list(g.successors(label)), parents=list(g.predecessors(label))) <NEW_LINE> nodes.add(node) <NEW_LINE> <DEDENT> nodes_dict = {node.label_id: node for node in nodes} <NEW_LINE> return cls(nodes_dict) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_edges_from_nodes(nodes): <NEW_LINE> <INDENT> edges = set() <NEW_LINE> for node in nodes: <NEW_LINE> <INDENT> if node.parents: <NEW_LINE> <INDENT> edge_tuples = zip(node.parents, [node.label_id]*len(node.parents)) <NEW_LINE> edges.update(edge_tuples) <NEW_LINE> <DEDENT> <DEDENT> return list(edges) <NEW_LINE> <DEDENT> def set_boundary_conditions(self): <NEW_LINE> <INDENT> for root in self.get_roots(): <NEW_LINE> <INDENT> self.nodes_dict[root].update_pi_agg(self.nodes_dict[root].cpd.values) <NEW_LINE> <DEDENT> for leaf in self.get_leaves(): <NEW_LINE> <INDENT> self.nodes_dict[leaf].update_lambda_agg(np.ones([self.nodes_dict[leaf].cardinality])) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def all_nodes_are_fully_initialized(self): <NEW_LINE> <INDENT> for node in self.nodes_dict.values(): <NEW_LINE> <INDENT> if not node.is_fully_initialized: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> copy_nodes = copy.deepcopy(self.nodes_dict) <NEW_LINE> copy_model = self.__class__(nodes_dict=copy_nodes) <NEW_LINE> return copy_model
A Bayesian model storing nodes (e.g. Node or BernoulliOrNode) implementing properties and methods for Pearl's belief update algorithm. ref: "Fusion, Propagation, and Structuring in Belief Networks" Artificial Intelligence 29 (1986) 241-288
62599076be8e80087fbc09f4
class User(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserManager() <NEW_LINE> USERNAME_FIELD = 'email'
Custom user model that suppoerts using email instead of username
62599076fff4ab517ebcf178
class ZoomApiService(CallService): <NEW_LINE> <INDENT> ZOOM_API_URL = "https://api.zoom.us/v2/users/" <NEW_LINE> ZOOM_GET_USER_API = "https://api.zoom.us/v2/users/me" <NEW_LINE> def __init__(self, team_id): <NEW_LINE> <INDENT> self.team_id = team_id <NEW_LINE> self.zoom: Zoom = DynamoUtils.get_zoom_data(team_id) <NEW_LINE> if not self.zoom.is_valid(): <NEW_LINE> <INDENT> raise Exception("No call integration for team") <NEW_LINE> <DEDENT> <DEDENT> def __create_request_header(self): <NEW_LINE> <INDENT> headers = { "Content-Type": "application/json", "Authorization": "Bearer " + self.__get_access_token(), } <NEW_LINE> return headers <NEW_LINE> <DEDENT> def create_call(self) -> Integration: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> headers = self.__create_request_header() <NEW_LINE> user_id = self.__get_user_id(headers) <NEW_LINE> if user_id is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> request_body = { "type": 2, "settings": {"join_before_host": True, "jbh_time": 0}, } <NEW_LINE> r = requests.post( self.ZOOM_API_URL + user_id + "/meetings", headers=headers, data=json.dumps(request_body), ) <NEW_LINE> response = {"response_code": r.status_code} <NEW_LINE> if r.status_code == 201: <NEW_LINE> <INDENT> call_link = r.json().get("join_url") <NEW_LINE> self.zoom.link = call_link <NEW_LINE> return self.zoom <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error(f"Zoom meeting could not be created {e}") <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(f"Zoom meeting could not be created {e}") <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> def __get_user_id(self, headers): <NEW_LINE> <INDENT> r = requests.get(self.ZOOM_GET_USER_API, headers=headers) <NEW_LINE> return r.json().get("id") <NEW_LINE> <DEDENT> def __get_access_token(self) -> str: <NEW_LINE> <INDENT> if ( self.zoom.token_data.expiry_date is not None and self.zoom.token_data.is_access_token_expired() is False ): <NEW_LINE> <INDENT> return self.zoom.token_data.access_token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.zoom.token_data.refresh_token is not None: <NEW_LINE> <INDENT> logger.info("Token expired, refreshing...") <NEW_LINE> self.zoom.token_data = ZoomOauthService.refresh_access_token( self.zoom.token_data ) <NEW_LINE> DynamoUtils.save_zoom_data(self.team_id, self.zoom) <NEW_LINE> logger.info("Token refreshed and saved") <NEW_LINE> return self.zoom.token_data.access_token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception()
Service to handle zoom api calls
625990763317a56b869bf1f6
class ConnectorError(MongoConnectorError): <NEW_LINE> <INDENT> pass
Raised when creating a mongo_connector.Connector object with nonsensical parameters
6259907623849d37ff852a19
class RHCreateReferenceMixin: <NEW_LINE> <INDENT> def _process_args(self): <NEW_LINE> <INDENT> self.reference_value = request.form['value'] <NEW_LINE> reference_type_name = request.form['type'] <NEW_LINE> self.reference_type = (ReferenceType.query .filter(db.func.lower(ReferenceType.name) == reference_type_name.lower()) .one()) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def jsonify_reference(reference): <NEW_LINE> <INDENT> return jsonify(id=reference.id)
Common methods for RH class creating a ContibutionReference or SubContributionReference.
625990764527f215b58eb651
class TreeItemAttr: <NEW_LINE> <INDENT> def __init__(self, colText=wx.NullColour, colBack=wx.NullColour, font=wx.NullFont): <NEW_LINE> <INDENT> self._colText = colText <NEW_LINE> self._colBack = colBack <NEW_LINE> self._font = font <NEW_LINE> <DEDENT> def SetTextColour(self, colText): <NEW_LINE> <INDENT> self._colText = colText <NEW_LINE> <DEDENT> def SetBackgroundColour(self, colBack): <NEW_LINE> <INDENT> self._colBack = colBack <NEW_LINE> <DEDENT> def SetFont(self, font): <NEW_LINE> <INDENT> self._font = font <NEW_LINE> <DEDENT> def HasTextColour(self): <NEW_LINE> <INDENT> return self._colText.IsOk() <NEW_LINE> <DEDENT> def HasBackgroundColour(self): <NEW_LINE> <INDENT> return self._colBack.IsOk() <NEW_LINE> <DEDENT> def HasFont(self): <NEW_LINE> <INDENT> return self._font.IsOk() <NEW_LINE> <DEDENT> def GetTextColour(self): <NEW_LINE> <INDENT> return self._colText <NEW_LINE> <DEDENT> def GetBackgroundColour(self): <NEW_LINE> <INDENT> return self._colBack <NEW_LINE> <DEDENT> def GetFont(self): <NEW_LINE> <INDENT> return self._font
Creates the item attributes (text colour, background colour and font).
625990762c8b7c6e89bd514a
class CustomBlockTreeNode(TreeNode): <NEW_LINE> <INDENT> canonical_tag_name = 'test' <NEW_LINE> alias_tag_names = ()
Custom subclass of ``TreeNode`` for tests.
625990764e4d562566373d66
class DecisionCreateUpdateNestedSerializer( UpdateNestedMixin, FieldPermissionsSerializerMixin, serializers.ModelSerializer ): <NEW_LINE> <INDENT> id = serializers.IntegerField(required=False) <NEW_LINE> type = InstanceDictPrimaryKeyRelatedField( instance_class=DecisionType, queryset=DecisionType.objects.filter(), related_serializer=DecisionTypeSerializer, required=False, allow_null=True, ) <NEW_LINE> conditions = ConditionCreateUpdateSerializer( many=True, required=False, allow_null=True ) <NEW_LINE> decision_maker = InstanceDictPrimaryKeyRelatedField( instance_class=DecisionMaker, queryset=DecisionMaker.objects.filter(), related_serializer=DecisionMakerSerializer, required=False, allow_null=True, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Decision <NEW_LINE> fields = ( "id", "reference_number", "decision_maker", "decision_date", "section", "type", "description", "conditions", )
This is used when the decision is added or updated inside a lease The lease is not included in this serializer, but set via the UpdateNestedMixin in LeaseCreateUpdateSerializer.
6259907644b2445a339b760f
@dataclass(frozen=True) <NEW_LINE> class CorosyncRunningOnNode(ReportItemMessage): <NEW_LINE> <INDENT> node: str <NEW_LINE> _code = codes.COROSYNC_RUNNING_ON_NODE <NEW_LINE> @property <NEW_LINE> def message(self) -> str: <NEW_LINE> <INDENT> return f"{self.node}: corosync is running"
Corosync is running on a node, which is not ok node -- node address / name
62599076283ffb24f3cf520d
class MyCustomUser(AbstractUser, PermissionsMixin): <NEW_LINE> <INDENT> username = models.CharField(_('username'), max_length=254, unique=False, blank=True) <NEW_LINE> email = models.EmailField( _('Email Address'), unique=True, error_messages={ 'unique': _("A user with that email already exists."), } ) <NEW_LINE> first_name = models.CharField(_('first name'), max_length=30, blank=True) <NEW_LINE> last_name = models.CharField(_('last name'), max_length=30, blank=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> is_moderator = models.BooleanField(_('moderator'), default=False) <NEW_LINE> is_superuser = models.BooleanField(default=False) <NEW_LINE> is_active = models.BooleanField(_('active'), default=True, help_text=_('Designates whether this user should be treated as ' 'active. Unselect this instead of deleting accounts.')) <NEW_LINE> birth_day = models.DateField(_('birthday'), blank=True, null=True) <NEW_LINE> location_user = models.CharField(_('location'), default='', blank=True, null=True, max_length=512) <NEW_LINE> phone_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$', message="Phone number must be entered in the format: '+9999999999'. Up to 15 digits allowed.") <NEW_LINE> phone_number_user = models.CharField(_('Phone number user'), validators=[phone_regex], max_length=15, blank=True) <NEW_LINE> date_joined = models.DateTimeField(_('date joined'), default=timezone.now) <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['username', 'first_name', 'last_name'] <NEW_LINE> objects = MyUserManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> swappable = "AUTH_USER_MODEL" <NEW_LINE> verbose_name = _('user') <NEW_LINE> verbose_name_plural = _('users') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email <NEW_LINE> <DEDENT> def get_full_name(self): <NEW_LINE> <INDENT> full_name = '%s %s' % (self.first_name, self.last_name) <NEW_LINE> return full_name.strip() <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.first_name
An abstract base class implementing a fully featured User model with admin-compliant permissions. Username, password and email are required. Other fields are optional.
62599076009cb60464d02e9e
class BatchTreeOperator(Operator): <NEW_LINE> <INDENT> bl_idname = "mod_tree.batch_tree" <NEW_LINE> bl_label = "Batch Tree Generation" <NEW_LINE> bl_options = {"REGISTER", "UNDO"} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> print(LOGO) <NEW_LINE> messages, message_lvls, status = save_everything() <NEW_LINE> for i, message in enumerate(messages): <NEW_LINE> <INDENT> self.report({message_lvls[i]}, message) <NEW_LINE> return {status} <NEW_LINE> <DEDENT> scene = context.scene <NEW_LINE> trees = [] <NEW_LINE> save_radius = scene.radius <NEW_LINE> space = scene.batch_space <NEW_LINE> seeds = [] <NEW_LINE> if scene.batch_group_name != "": <NEW_LINE> <INDENT> if scene.batch_group_name not in bpy.data.groups: <NEW_LINE> <INDENT> bpy.ops.group.create(name=scene.batch_group_name) <NEW_LINE> <DEDENT> <DEDENT> for i in range(scene.tree_number): <NEW_LINE> <INDENT> new_seed = randint(0, 1000) <NEW_LINE> while new_seed in seeds: <NEW_LINE> <INDENT> new_seed = randint(0, 1000) <NEW_LINE> <DEDENT> pointer = int(sqrt(scene.tree_number)) <NEW_LINE> pos_x = i % pointer <NEW_LINE> pos_y = i//pointer <NEW_LINE> seed(new_seed) <NEW_LINE> scene.radius = save_radius*(1 + scene.batch_radius_randomness*(.5 - random())*2) <NEW_LINE> create_tree(Vector((-space*pointer/2, -space*pointer/2, 0)) + Vector((pos_x, pos_y, 0))*space) <NEW_LINE> trees.append(bpy.context.active_object) <NEW_LINE> if scene.batch_group_name != "": <NEW_LINE> <INDENT> bpy.ops.object.group_link(group=scene.batch_group_name) <NEW_LINE> <DEDENT> <DEDENT> for tree in trees: <NEW_LINE> <INDENT> tree.select = True <NEW_LINE> <DEDENT> scene.radius = save_radius <NEW_LINE> return {'FINISHED'}
Batch trees
62599076f548e778e596cef2
class UserProfileSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.UserProfile <NEW_LINE> fields = ('id', 'name', 'email', 'password') <NEW_LINE> extra_kwargs = { 'password': { 'write_only': True, 'style': {'input_type': 'password'} } } <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> user = models.UserProfile.objects.create_user( email=validated_data['email'], name=validated_data['name'], password=validated_data['password'], ) <NEW_LINE> return user
Serializes a user profile object
625990767047854f46340d1b
class OpenfoodfactsClient: <NEW_LINE> <INDENT> def __init__(self, country="fr"): <NEW_LINE> <INDENT> if country not in ("fr", "en", "world"): <NEW_LINE> <INDENT> raise ValueError("Country must be fr, en or world") <NEW_LINE> <DEDENT> self.url = f"https://{country}.openfoodfacts.org/cgi/search.pl" <NEW_LINE> <DEDENT> def get_products_by_popularity(self, page_size, number_of_pages): <NEW_LINE> <INDENT> products = [] <NEW_LINE> for page in range(1, number_of_pages + 1): <NEW_LINE> <INDENT> params = { "action": "process", "sort_by": "unique_scans_n", "page_size": page_size, "page": page, "json": True, } <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.get(self.url, params=params) <NEW_LINE> response.raise_for_status() <NEW_LINE> <DEDENT> except requests.HTTPError: <NEW_LINE> <INDENT> print("Un code d'erreur HTTP a été retourné par l'API") <NEW_LINE> continue <NEW_LINE> <DEDENT> except requests.exceptions.RequestException: <NEW_LINE> <INDENT> print("Une erreur de connection réseau a eu lieu") <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data = response.json() <NEW_LINE> <DEDENT> except json.JSONDecodeError: <NEW_LINE> <INDENT> print("Une erreur de décodage à eu lieu") <NEW_LINE> continue <NEW_LINE> <DEDENT> products.extend(data['products']) <NEW_LINE> <DEDENT> return products
Attributes and methods to do things with Open Food Facts API
62599076f9cc0f698b1c5f7d
class IngeschrevenPersoonHalAllOf(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> lazy_import() <NEW_LINE> return { 'embedded': (IngeschrevenPersoonEmbedded,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'embedded': '_embedded', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value)
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values.
6259907663b5f9789fe86ac8
class Soul: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> pool = random.randint(3, 10) <NEW_LINE> self.ar_boost = random.randint(1, pool-2) <NEW_LINE> pool -= self.ar_boost <NEW_LINE> self.df_boost = random.randint(1, pool-1) <NEW_LINE> pool -= self.df_boost <NEW_LINE> self.spd_boost = random.randint(1, pool)
Soul class
62599076e1aae11d1e7cf4c0
@register_response('goto') <NEW_LINE> @register <NEW_LINE> class GotoResponse(BaseSchema): <NEW_LINE> <INDENT> __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } <NEW_LINE> __refs__ = set() <NEW_LINE> __slots__ = list(__props__.keys()) + ['kwargs'] <NEW_LINE> def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, update_ids_from_dap=False, **kwargs): <NEW_LINE> <INDENT> self.type = 'response' <NEW_LINE> self.request_seq = request_seq <NEW_LINE> self.success = success <NEW_LINE> self.command = command <NEW_LINE> self.seq = seq <NEW_LINE> self.message = message <NEW_LINE> self.body = body <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def to_dict(self, update_ids_to_dap=False): <NEW_LINE> <INDENT> type = self.type <NEW_LINE> request_seq = self.request_seq <NEW_LINE> success = self.success <NEW_LINE> command = self.command <NEW_LINE> seq = self.seq <NEW_LINE> message = self.message <NEW_LINE> body = self.body <NEW_LINE> dct = { 'type': type, 'request_seq': request_seq, 'success': success, 'command': command, 'seq': seq, } <NEW_LINE> if message is not None: <NEW_LINE> <INDENT> dct['message'] = message <NEW_LINE> <DEDENT> if body is not None: <NEW_LINE> <INDENT> dct['body'] = body <NEW_LINE> <DEDENT> dct.update(self.kwargs) <NEW_LINE> return dct
Response to 'goto' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually.
62599076adb09d7d5dc0becd
class ClusterUtilizationModel(GenericChartModel): <NEW_LINE> <INDENT> _title = "Cluster Utilization"
Cluster Utilization Model
6259907691f36d47f2231b40
class ConfigBase(object): <NEW_LINE> <INDENT> config_subdir = None <NEW_LINE> config_suffix = ".json" <NEW_LINE> def __init__(self, name, config_dirs=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self._set_config_dirs(config_dirs) <NEW_LINE> self._set_config_path() <NEW_LINE> self._read_config() <NEW_LINE> <DEDENT> def _set_config_dirs(self, config_dirs=None): <NEW_LINE> <INDENT> self.config_dirs = [] <NEW_LINE> if config_dirs: <NEW_LINE> <INDENT> self.config_dirs += config_dirs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.config_dirs.append(os.path.join(xdg.BaseDirectory.xdg_config_home, "releng-sop", self.config_subdir)) <NEW_LINE> self.config_dirs.append(os.path.join("/etc", "releng-sop", self.config_subdir)) <NEW_LINE> <DEDENT> <DEDENT> def _set_config_path(self): <NEW_LINE> <INDENT> filename = "%s%s" % (self.name, self.config_suffix) <NEW_LINE> for dirname in self.config_dirs: <NEW_LINE> <INDENT> path = os.path.realpath(os.path.join(dirname, filename)) <NEW_LINE> if os.path.exists(path): <NEW_LINE> <INDENT> self.config_path = path <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> message = "Couldn't find config file '%s' in following locations:\n%s" <NEW_LINE> raise ConfigError(message % (filename, "\n".join(self.config_dirs))) <NEW_LINE> <DEDENT> def _read_config(self): <NEW_LINE> <INDENT> with open(self.config_path, "r") as f: <NEW_LINE> <INDENT> self.config_data = json.load(f) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self.config_data[name] <NEW_LINE> <DEDENT> def __contains__(self, name): <NEW_LINE> <INDENT> return name in self.config_data <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for key in self.config_data: <NEW_LINE> <INDENT> yield key
Base class for configurations.
625990768a349b6b43687bbd
class ForgotForm(Form): <NEW_LINE> <INDENT> email = TextField('Email', [validators.Required(), validators.Length(min=3, max=50)]) <NEW_LINE> security_question = TextField('Security Question', [validators.Required(), validators.Length(min=10, max=40)]) <NEW_LINE> security_answer = TextField('Security Answer', [validators.Required(), validators.Length(min=6, max=40)])
Forgot password form of the user
6259907655399d3f05627e7c
class ListTree: <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> self.__visited = {} <NEW_LINE> return '<Instance of {0}, address {1}:\n{2}{3}>'.format( self.__class__.__name__, id(self), self.__attrnames(self, 0), self.__listclass(self.__class__, 4) ) <NEW_LINE> <DEDENT> def __listclass(self, aClass, indent): <NEW_LINE> <INDENT> dots = '.' * indent <NEW_LINE> if aClass in self.__visited: <NEW_LINE> <INDENT> return '\n{0}<Class {1}, address {2}: (see above)>\n'.format( dots, aClass.__name__, id(aClass) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__visited[aClass] = True <NEW_LINE> genabove = (self.__listclass(c, indent+4) for c in aClass.__bases__) <NEW_LINE> return '\n{0}<Class {1}, address {2}:\n{3}{4}{5}>\n'.format( dots, aClass.__name__, id(aClass), self.__attrnames(aClass, indent), ''.join(genabove), dots ) <NEW_LINE> <DEDENT> <DEDENT> def __attrnames(self, obj, indent): <NEW_LINE> <INDENT> spaces = ' ' * (indent + 4) <NEW_LINE> result = '' <NEW_LINE> for attr in sorted(obj.__dict__): <NEW_LINE> <INDENT> if attr.startswith('__') and attr.endswith('__'): <NEW_LINE> <INDENT> result += spaces + '{0}=<>\n'.format(attr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result += spaces + '{0}={1}\n'.format(attr, getattr(obj, attr)) <NEW_LINE> <DEDENT> <DEDENT> return result
Mix-in that returns an __str__ trace of the entire class tree and all its objects' attrs at and above self; run by print(), str() returns constructed string; uses __X attr names to avoid impacting clients; uses generator expr to recurse to superclasses
625990767d847024c075dd3d
class AutoTarget: <NEW_LINE> <INDENT> pass
Use this as target value in clients for them to automatically connect to the target exposed by the server. Servers must have only one target.
6259907697e22403b383c865
class NameGenerator(object): <NEW_LINE> <INDENT> implements(INameGenerator) <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> import pdb;pdb.set_trace() <NEW_LINE> folder = self.context.aq_parent <NEW_LINE> import pdb;pdb.set_trace() <NEW_LINE> ids = [i[0] for i in folder.contentItems( filter={'portal_type': ('wt.testrig.foo',)})] <NEW_LINE> ids.remove(obj.id) <NEW_LINE> if len(ids) > 0: <NEW_LINE> <INDENT> ids.sort() <NEW_LINE> new_id = ids[-1] <NEW_LINE> new_id = int(new_id) + 1 <NEW_LINE> new_id = '%d' % new_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_id = '1' <NEW_LINE> <DEDENT> title = new_id <NEW_LINE> return title
Customized name from title behavior."
62599076d268445f2663a80f
class Floppy(object): <NEW_LINE> <INDENT> def __init__(self, id, control_pin, direction_pin, arduino): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.control_pin = control_pin <NEW_LINE> self.direction_pin = direction_pin <NEW_LINE> self.arduino = arduino <NEW_LINE> <DEDENT> def play_note(self, note, is_midi=False): <NEW_LINE> <INDENT> if is_midi: <NEW_LINE> <INDENT> note = settings.SEMITONE_SCALE[note] <NEW_LINE> <DEDENT> note = note / settings.ARDUINO_RESOLUTION <NEW_LINE> self.arduino.write(chr(self.control_pin)) <NEW_LINE> self.arduino.write(chr(note >> 8)) <NEW_LINE> self.arduino.write(chr(note % 256)) <NEW_LINE> <DEDENT> def rest(self): <NEW_LINE> <INDENT> self.arduino.write(chr(self.control_pin)) <NEW_LINE> self.arduino.write(chr(0)) <NEW_LINE> self.arduino.write(chr(0))
Represents a floppy drive connected to the Arduino
625990767b180e01f3e49d16
class RandomSampler(Sampler): <NEW_LINE> <INDENT> def __init__(self, dataset: Dataset): <NEW_LINE> <INDENT> self._dataset = dataset <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self._dataset.shuffle() <NEW_LINE> return iter(self._dataset.index_pool) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._dataset)
Sample elements randomly
6259907660cbc95b06365a1f
class ElementViewForFakeModelWithLocateChild(_HasLocateChild, Element): <NEW_LINE> <INDENT> pass
Non-live element with a locateChild.
62599076fff4ab517ebcf17a
class Patient(object): <NEW_LINE> <INDENT> def __init__(self, viruses, maxPop): <NEW_LINE> <INDENT> self.viruses = viruses <NEW_LINE> self.maxPop = maxPop <NEW_LINE> <DEDENT> def getViruses(self): <NEW_LINE> <INDENT> return self.viruses <NEW_LINE> <DEDENT> def getMaxPop(self): <NEW_LINE> <INDENT> return self.maxPop <NEW_LINE> <DEDENT> def getTotalPop(self): <NEW_LINE> <INDENT> return len(self.getViruses()) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> surviveViruses = [e for e in self.getViruses() if not e.doesClear()] <NEW_LINE> density = len(surviveViruses) / self.getMaxPop() <NEW_LINE> offSprings = [] <NEW_LINE> for e in surviveViruses: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> offSprings.append(e.reproduce(density)) <NEW_LINE> <DEDENT> except NoChildException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.viruses = surviveViruses + offSprings <NEW_LINE> return self.getTotalPop()
Representation of a simplified patient. The patient does not take any drugs and his/her virus populations have no drug resistance.
6259907632920d7e50bc79ab
class GenericHandler: <NEW_LINE> <INDENT> def __init__(self, repo: publishing.GenericRepo): <NEW_LINE> <INDENT> self.repo = repo <NEW_LINE> <DEDENT> async def handle_request(self, req: request) -> str: <NEW_LINE> <INDENT> secret = req.args.get('secret') <NEW_LINE> if secret != self.repo.secret: <NEW_LINE> <INDENT> logger.warning(f'Request for generic repo \'{self.repo.name}\' did not have valid secret parameter!') <NEW_LINE> abort(403) <NEW_LINE> <DEDENT> loop = asyncio.get_event_loop() <NEW_LINE> loop.run_in_executor(None, self.repo.publish_repo) <NEW_LINE> return 'OK'
Handler that serves request for Generic repos. It verifies that the repo's secret is passed as GET argument of the request
625990765fc7496912d48f1b
class PlaceHolder: <NEW_LINE> <INDENT> def __init__(self, alogger): <NEW_LINE> <INDENT> self.loggerMap = { alogger : None } <NEW_LINE> <DEDENT> def append(self, alogger): <NEW_LINE> <INDENT> if alogger not in self.loggerMap: <NEW_LINE> <INDENT> self.loggerMap[alogger] = None
PlaceHolder instances are used in the Manager logger hierarchy to take the place of nodes for which no loggers have been defined. This class is intended for internal use only and not as part of the public API.
625990765fdd1c0f98e5f8e1
class TweetsFromFiles(Pipeline): <NEW_LINE> <INDENT> def __init__(self, *filepaths): <NEW_LINE> <INDENT> lines = LinesFromGzipFiles(filepaths) <NEW_LINE> steps = [(map, tweet_info.tweet_from_json_line)] <NEW_LINE> super().__init__(lines, steps)
Iterate over tweets from the given .jsonl.gz files. Args: filepaths (iterable of str): Paths to gzip files of tweets. Yields: dict: Tweet object. Notes: See `twitter_analysis_tools.utils.Pipeline` for more details.
6259907676e4537e8c3f0ee3
class BlogDetails(APIView): <NEW_LINE> <INDENT> def get_object(self, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Blog.objects.get(pk=pk) <NEW_LINE> <DEDENT> except Blog.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> <DEDENT> def get(self, request, pk, format=None): <NEW_LINE> <INDENT> blog = self.get_object(pk) <NEW_LINE> blog = BlogSerializer(blog) <NEW_LINE> return Response(blog.data)
Retrieve a Blog instance.
625990764e4d562566373d67
class Sprint(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100, blank=True, default='') <NEW_LINE> description = models.TextField(blank=True, default='') <NEW_LINE> end = models.DateField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> if self.name != '': <NEW_LINE> <INDENT> return "<Sprint: %s>" % self.name <NEW_LINE> <DEDENT> return '<Sprint ending %s>' % self.end
a Dev iteration period
625990763539df3088ecdbfc
class LutronScene(LutronDevice, Scene): <NEW_LINE> <INDENT> def __init__(self, area_name, keypad_name, lutron_device, lutron_led, controller): <NEW_LINE> <INDENT> super().__init__(area_name, lutron_device, controller) <NEW_LINE> self._keypad_name = keypad_name <NEW_LINE> self._led = lutron_led <NEW_LINE> <DEDENT> def activate(self): <NEW_LINE> <INDENT> self._lutron_device.press() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "{} {}: {}".format( self._area_name, self._keypad_name, self._lutron_device.name )
Representation of a Lutron Scene.
625990767c178a314d78e89d
class CmdBase(Command): <NEW_LINE> <INDENT> def execute(self): <NEW_LINE> <INDENT> raise Exception("virtual, not implemented") <NEW_LINE> <DEDENT> def undo(self): <NEW_LINE> <INDENT> raise Exception("virtual, not implemented") <NEW_LINE> <DEDENT> def redo(self): <NEW_LINE> <INDENT> self.execute()
Base command
62599076f548e778e596cef4
class AutoModelForPreTraining: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise EnvironmentError( "AutoModelForPreTraining is designed to be instantiated " "using the `AutoModelForPreTraining.from_pretrained(pretrained_model_name_or_path)` or " "`AutoModelForPreTraining.from_config(config)` methods." ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_config(cls, config): <NEW_LINE> <INDENT> for config_class, model_class in MODEL_FOR_PRETRAINING_MAPPING.items(): <NEW_LINE> <INDENT> if isinstance(config, config_class): <NEW_LINE> <INDENT> return model_class(config) <NEW_LINE> <DEDENT> <DEDENT> raise ValueError( "Unrecognized configuration class {} for this kind of AutoModel: {}.\n" "Model type should be one of {}.".format( config.__class__, cls.__name__, ", ".join(c.__name__ for c in MODEL_FOR_PRETRAINING_MAPPING.keys()) ) ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): <NEW_LINE> <INDENT> config = kwargs.pop("config", None) <NEW_LINE> if not isinstance(config, PretrainedConfig): <NEW_LINE> <INDENT> config = AutoConfig.from_pretrained(pretrained_model_name_or_path, **kwargs) <NEW_LINE> <DEDENT> for config_class, model_class in MODEL_FOR_PRETRAINING_MAPPING.items(): <NEW_LINE> <INDENT> if isinstance(config, config_class): <NEW_LINE> <INDENT> return model_class.from_pretrained(pretrained_model_name_or_path, *model_args, config=config, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> raise ValueError( "Unrecognized configuration class {} for this kind of AutoModel: {}.\n" "Model type should be one of {}.".format( config.__class__, cls.__name__, ", ".join(c.__name__ for c in MODEL_FOR_PRETRAINING_MAPPING.keys()) ) )
:class:`~transformers.AutoModelForPreTraining` is a generic model class that will be instantiated as one of the model classes of the library -with the architecture used for pretraining this model– when created with the `AutoModelForPreTraining.from_pretrained(pretrained_model_name_or_path)` class method. This class cannot be instantiated using `__init__()` (throws an error).
62599076f9cc0f698b1c5f7e
class BreakRop(Op): <NEW_LINE> <INDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(type(self)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return type(self) == type(other) <NEW_LINE> <DEDENT> def make_node(self, x): <NEW_LINE> <INDENT> return Apply(self, [x], [x.type()]) <NEW_LINE> <DEDENT> def perform(self, node, inp, out_): <NEW_LINE> <INDENT> x, = inp <NEW_LINE> out, = out_ <NEW_LINE> out[0] = x <NEW_LINE> <DEDENT> def grad(self, inp, grads): <NEW_LINE> <INDENT> return [grad_undefined(self, 0, inp[0])] <NEW_LINE> <DEDENT> def R_op(self, inputs, eval_points): <NEW_LINE> <INDENT> return [None]
@note: Non-differentiable.
62599076097d151d1a2c29d9
class UKAINodeErrorState(object): <NEW_LINE> <INDENT> def __init__(self, address, reason): <NEW_LINE> <INDENT> self._suspend_time = 60 <NEW_LINE> self._address = address <NEW_LINE> self._reason = reason <NEW_LINE> self._retry_after = time.time() + self._suspend_time <NEW_LINE> <DEDENT> @property <NEW_LINE> def address(self): <NEW_LINE> <INDENT> return (self._address) <NEW_LINE> <DEDENT> @property <NEW_LINE> def reason(self): <NEW_LINE> <INDENT> return (self._reason) <NEW_LINE> <DEDENT> @property <NEW_LINE> def retry_after(self): <NEW_LINE> <INDENT> return (self._retry_after) <NEW_LINE> <DEDENT> def is_expired(self): <NEW_LINE> <INDENT> return (self._retry_after < time.time()) <NEW_LINE> <DEDENT> def extend(self, reason): <NEW_LINE> <INDENT> self._reason = reason <NEW_LINE> self._retry_after = time.time() + self._suspend_time
The UKAINodeErrorState class represents the node information which is considered in a failure status.
6259907691f36d47f2231b41
class MapValue(Record): <NEW_LINE> <INDENT> map = models.ForeignKey(Map) <NEW_LINE> key = models.CharField(max_length=255) <NEW_LINE> v1 = models.TextField(verbose_name='Value 1', blank=True) <NEW_LINE> v2 = models.TextField(verbose_name='Value 2', blank=True, null=True) <NEW_LINE> v3 = models.TextField(verbose_name='Value 3', blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{0}:{1}={2}".format(self.map, self.key, ':'.join([ x for x in (self.v1, self.v2, self.v3) if x is not None ])) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'mapvalue' <NEW_LINE> unique_together = ('map', 'key',)
Generic key->value mappings for maps
625990765fc7496912d48f1c
class DictMatch(ListMatch): <NEW_LINE> <INDENT> MATCH = 'dict' <NEW_LINE> def __init__(self, value, option): <NEW_LINE> <INDENT> ListMatch.__init__(self, value, '', option) <NEW_LINE> self.match = value <NEW_LINE> self.reason = '' <NEW_LINE> self.option = option <NEW_LINE> <DEDENT> def GetValidMatches(self, command, index): <NEW_LINE> <INDENT> matches = {} <NEW_LINE> for item, helptext in self.match.items(): <NEW_LINE> <INDENT> if index is None or command[index] == ' ': <NEW_LINE> <INDENT> matches[item] = helptext <NEW_LINE> if self.option.default == item: <NEW_LINE> <INDENT> matches[item] += ' [Default]' <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> token = command[index] <NEW_LINE> if self._GetRegex(item) and (not token): <NEW_LINE> <INDENT> matches[item] = helptext <NEW_LINE> <DEDENT> elif not token or item.startswith(token): <NEW_LINE> <INDENT> matches[item] = helptext <NEW_LINE> if self.option.default == item: <NEW_LINE> <INDENT> matches[item] += ' [Default]' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return matches
An option that matches on a dict.
62599076fff4ab517ebcf17c
class TLStagingNotice(TLChangeNotice): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> content_template = "{} has approved a change for staging to {}" <NEW_LINE> super(TLStagingNotice, self).__init__( content_template=content_template, )
Notification when entry has been moved from draft to staging.
62599076091ae3566870659f
class SwaApplicationSettingsApplication( ApplicationSettingsApplication ): <NEW_LINE> <INDENT> def __init__(self, config=None): <NEW_LINE> <INDENT> super().__init__(config) <NEW_LINE> if config: <NEW_LINE> <INDENT> self.button_field = config["buttonField"] if "buttonField" in config else None <NEW_LINE> self.checkbox = config["checkbox"] if "checkbox" in config else None <NEW_LINE> self.login_url_regex = config["loginUrlRegex"] if "loginUrlRegex" in config else None <NEW_LINE> self.password_field = config["passwordField"] if "passwordField" in config else None <NEW_LINE> self.redirect_url = config["redirectUrl"] if "redirectUrl" in config else None <NEW_LINE> self.url = config["url"] if "url" in config else None <NEW_LINE> self.username_field = config["usernameField"] if "usernameField" in config else None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.button_field = None <NEW_LINE> self.checkbox = None <NEW_LINE> self.login_url_regex = None <NEW_LINE> self.password_field = None <NEW_LINE> self.redirect_url = None <NEW_LINE> self.url = None <NEW_LINE> self.username_field = None <NEW_LINE> <DEDENT> <DEDENT> def request_format(self): <NEW_LINE> <INDENT> parent_req_format = super().request_format() <NEW_LINE> current_obj_format = { "buttonField": self.button_field, "checkbox": self.checkbox, "loginUrlRegex": self.login_url_regex, "passwordField": self.password_field, "redirectUrl": self.redirect_url, "url": self.url, "usernameField": self.username_field } <NEW_LINE> parent_req_format.update(current_obj_format) <NEW_LINE> return parent_req_format
A class for SwaApplicationSettingsApplication objects.
625990764c3428357761bc1c
class ApplicationRuleCondition(FirewallPolicyRuleCondition): <NEW_LINE> <INDENT> _validation = { 'rule_condition_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'rule_condition_type': {'key': 'ruleConditionType', 'type': 'str'}, 'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'}, 'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'}, 'protocols': {'key': 'protocols', 'type': '[FirewallPolicyRuleConditionApplicationProtocol]'}, 'target_fqdns': {'key': 'targetFqdns', 'type': '[str]'}, 'fqdn_tags': {'key': 'fqdnTags', 'type': '[str]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationRuleCondition, self).__init__(**kwargs) <NEW_LINE> self.rule_condition_type = 'ApplicationRuleCondition' <NEW_LINE> self.source_addresses = kwargs.get('source_addresses', None) <NEW_LINE> self.destination_addresses = kwargs.get('destination_addresses', None) <NEW_LINE> self.protocols = kwargs.get('protocols', None) <NEW_LINE> self.target_fqdns = kwargs.get('target_fqdns', None) <NEW_LINE> self.fqdn_tags = kwargs.get('fqdn_tags', None)
Rule condition of type application. All required parameters must be populated in order to send to Azure. :param name: Name of the rule condition. :type name: str :param description: Description of the rule condition. :type description: str :param rule_condition_type: Required. Rule Condition Type.Constant filled by server. Possible values include: "ApplicationRuleCondition", "NetworkRuleCondition". :type rule_condition_type: str or ~azure.mgmt.network.v2019_06_01.models.FirewallPolicyRuleConditionType :param source_addresses: List of source IP addresses for this rule. :type source_addresses: list[str] :param destination_addresses: List of destination IP addresses or Service Tags. :type destination_addresses: list[str] :param protocols: Array of Application Protocols. :type protocols: list[~azure.mgmt.network.v2019_06_01.models.FirewallPolicyRuleConditionApplicationProtocol] :param target_fqdns: List of FQDNs for this rule condition. :type target_fqdns: list[str] :param fqdn_tags: List of FQDN Tags for this rule condition. :type fqdn_tags: list[str]
62599076dc8b845886d54f20
class Trackpoints(_SegWrap): <NEW_LINE> <INDENT> def import_locations(self, gpx_file): <NEW_LINE> <INDENT> self._gpx_file = gpx_file <NEW_LINE> data = utils.prepare_xml_read(gpx_file, objectify=True) <NEW_LINE> with suppress(AttributeError): <NEW_LINE> <INDENT> self.metadata.import_metadata(data.metadata) <NEW_LINE> <DEDENT> for segment in data.trk.trkseg: <NEW_LINE> <INDENT> points = point.TimedPoints() <NEW_LINE> for trackpoint in segment.trkpt: <NEW_LINE> <INDENT> latitude = trackpoint.get('lat') <NEW_LINE> longitude = trackpoint.get('lon') <NEW_LINE> try: <NEW_LINE> <INDENT> name = trackpoint.name.text <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> name = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> description = trackpoint.desc.text <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> description = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> elevation = float(trackpoint.ele.text) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> elevation = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> time = utils.Timestamp.parse_isoformat( trackpoint.time.text ) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> time = None <NEW_LINE> <DEDENT> points.append( Trackpoint( latitude, longitude, name, description, elevation, time ) ) <NEW_LINE> <DEDENT> self.append(points) <NEW_LINE> <DEDENT> <DEDENT> def export_gpx_file(self): <NEW_LINE> <INDENT> gpx = create_elem('gpx', GPX_ELEM_ATTRIB) <NEW_LINE> if not self.metadata.bounds: <NEW_LINE> <INDENT> self.metadata.bounds = [j for i in self for j in i] <NEW_LINE> <DEDENT> gpx.append(self.metadata.togpx()) <NEW_LINE> track = create_elem('trk') <NEW_LINE> gpx.append(track) <NEW_LINE> for segment in self: <NEW_LINE> <INDENT> chunk = create_elem('trkseg') <NEW_LINE> track.append(chunk) <NEW_LINE> for place in segment: <NEW_LINE> <INDENT> chunk.append(place.togpx()) <NEW_LINE> <DEDENT> <DEDENT> return etree.ElementTree(gpx)
Class for representing a group of :class:`Trackpoint` objects. .. versionadded:: 0.10.0
62599076f548e778e596cef5
@attr.attrs(auto_attribs=True) <NEW_LINE> class FileStream(abc.MappingFieldset[OffsetType, Message]): <NEW_LINE> <INDENT> path: str <NEW_LINE> errors: str = attr.attrib( default="warn", validator=attr.validators.in_(["ignore", "warn", "raise"]) ) <NEW_LINE> def items(self) -> T.ItemsView[OffsetType, Message]: <NEW_LINE> <INDENT> return FileStreamItems(self) <NEW_LINE> <DEDENT> def __iter__(self) -> T.Iterator[OffsetType]: <NEW_LINE> <INDENT> raise NotImplementedError("use `.items()` instead") <NEW_LINE> <DEDENT> def message_from_file(self, file, offset=None, **kwargs): <NEW_LINE> <INDENT> return Message.from_file(file, offset, **kwargs) <NEW_LINE> <DEDENT> def __getitem__(self, item: T.Optional[OffsetType]) -> Message: <NEW_LINE> <INDENT> with open(self.path, "rb") as file: <NEW_LINE> <INDENT> return self.message_from_file(file, offset=item) <NEW_LINE> <DEDENT> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> return sum(1 for _ in self.items())
Mapping-like access to a filestream of Messages. Sample usage: >>> filestream = FileStream("era5-levels-members.grib") >>> message1 = filestream[None] >>> message1["offset"] 0.0 >>> message2 = filestream[14760] >>> message2["offset"] 14760.0 Note that any offset return the first message found _after_ that offset: >>> message2_again = filestream[1] >>> message2_again["offset"] 14760.0
6259907632920d7e50bc79ae
class ReturnGameState(messages.Message): <NEW_LINE> <INDENT> user_states = messages.MessageField(StringMessage, 1, repeated=True)
Outbound response to return the state of a game for a user.
62599076460517430c432d0c
class SnifferBase(node.NodeBase): <NEW_LINE> <INDENT> def __init__(self, comm): <NEW_LINE> <INDENT> node.NodeBase.__init__(self, comm=comm)
Sniffer STA This represents a platform-independent monitor STA that should be used by tests. Real Sniffer STAs should extend this class and implement the actual AP functions.
625990761f5feb6acb16455c
class MetaStratusLabXml(MetaDataBase): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> super(MetaStratusLabXml, self).__init__() <NEW_LINE> try: <NEW_LINE> <INDENT> self.xml_obj = et.parse(filename) <NEW_LINE> <DEDENT> except et.ParseError: <NEW_LINE> <INDENT> vprint('XML parse error') <NEW_LINE> self.xml_obj = None <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> vprint('Parsed OK, but still no XML object from:' + str(filename)) <NEW_LINE> <DEDENT> self.data = {'checksums': {}} <NEW_LINE> <DEDENT> def get_metadata(self): <NEW_LINE> <INDENT> if not self.xml_obj: <NEW_LINE> <INDENT> vprint('No XML object') <NEW_LINE> return None <NEW_LINE> <DEDENT> nsp = StratusLabNS._NS_TO_URL_PREFIXES <NEW_LINE> ret = self.data <NEW_LINE> root = self.xml_obj.getroot() <NEW_LINE> if root.tag == 'metadata': <NEW_LINE> <INDENT> rdf = root.find('rdf:RDF', nsp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rdf = root <NEW_LINE> <DEDENT> desc = rdf.find('rdf:Description', nsp) <NEW_LINE> for cksum in desc.findall('slreq:checksum', nsp): <NEW_LINE> <INDENT> algo = cksum.find('slreq:algorithm', nsp) <NEW_LINE> val = cksum.find('slreq:value', nsp) <NEW_LINE> ret['checksums'][sl_to_hashlib(algo.text)] = val.text <NEW_LINE> <DEDENT> for key, val in StratusLabNS._RETKEY_TO_NS_PREFIXES.items(): <NEW_LINE> <INDENT> if key == 'algorithm': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> mdkey = val + ':' + key <NEW_LINE> node = desc.find(mdkey, nsp) <NEW_LINE> if node is not None: <NEW_LINE> <INDENT> ret[key] = node.text <NEW_LINE> <DEDENT> <DEDENT> return ret
Parse the metadata .xml file, from the stratuslab marketplace Extract interesting data: url and message digests
625990768a43f66fc4bf3afc
class StateTest(object): <NEW_LINE> <INDENT> def __init__(self, func, msg): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __call__(self, token): <NEW_LINE> <INDENT> return self.func(token) <NEW_LINE> <DEDENT> def expect_token(*types, **kw): <NEW_LINE> <INDENT> msg = kw.pop('msg', None) <NEW_LINE> if kw: <NEW_LINE> <INDENT> raise TypeError('unexpected keyword argument %r' % iter(kw).next()) <NEW_LINE> <DEDENT> if len(types) == 1: <NEW_LINE> <INDENT> if msg is None: <NEW_LINE> <INDENT> msg = "expected '%s'" % types[0] <NEW_LINE> <DEDENT> return StateTest(lambda t: t.type == types[0], msg) <NEW_LINE> <DEDENT> if msg is None: <NEW_LINE> <INDENT> msg = 'expected one of %s' % ', '.join(["'%s'" % type for type in types]) <NEW_LINE> <DEDENT> return StateTest(lambda t: t.type in types, msg) <NEW_LINE> <DEDENT> expect_token = staticmethod(expect_token)
Wrapper class for basic lambdas in order to simplify debugging in the parser. It also provides static helper functions that replace some lambda expressions
62599076aad79263cf43011f
class ManagementLinkClient(object): <NEW_LINE> <INDENT> def __init__( self, credentials, subscription_id, api_version='2016-09-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None): <NEW_LINE> <INDENT> self.config = ManagementLinkClientConfiguration(credentials, subscription_id, api_version, accept_language, long_running_operation_retry_timeout, generate_client_request_id, base_url, filepath) <NEW_LINE> self._client = ServiceClient(self.config.credentials, self.config) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.resource_links = ResourceLinksOperations( self._client, self.config, self._serialize, self._deserialize)
Azure resources can be linked together to form logical relationships. You can establish links between resources belonging to different resource groups. However, all the linked resources must belong to the same subscription. Each resource can be linked to 50 other resources. If any of the linked resources are deleted or moved, the link owner must clean up the remaining link. :ivar config: Configuration for client. :vartype config: ManagementLinkClientConfiguration :ivar resource_links: ResourceLinks operations :vartype resource_links: .operations.ResourceLinksOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials object<msrestazure.azure_active_directory>` :param subscription_id: The ID of the target subscription. :type subscription_id: str :param api_version: The API version to use for the operation. :type api_version: str :param accept_language: Gets or sets the preferred language for the response. :type accept_language: str :param long_running_operation_retry_timeout: Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. :type long_running_operation_retry_timeout: int :param generate_client_request_id: When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. :type generate_client_request_id: bool :param str base_url: Service URL :param str filepath: Existing config
62599076097d151d1a2c29db
class RpcProxy(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.url = "None" <NEW_LINE> self.flavour = "Base" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{flavour} Client for {url}>".format( url=self.url, flavour=self.flavour) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc, type, stack): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if key in self.__dict__: <NEW_LINE> <INDENT> return self.__dict__[key] <NEW_LINE> <DEDENT> return lambda *a, **kw: self._apicall(self, key, *a, **kw) <NEW_LINE> <DEDENT> def _apicall(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("No proxy dispatch method!")
A base implementation of the proxy pattern for RPC clients.
62599076a8370b77170f1d33
class UHostInstanceSetSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = { "AutoRenew": fields.Str(required=False, load_from="AutoRenew"), "BasicImageId": fields.Str(required=False, load_from="BasicImageId"), "BasicImageName": fields.Str( required=False, load_from="BasicImageName" ), "BootDiskState": fields.Str(required=False, load_from="BootDiskState"), "CPU": fields.Int(required=False, load_from="CPU"), "ChargeType": fields.Str(required=False, load_from="ChargeType"), "CreateTime": fields.Int(required=False, load_from="CreateTime"), "DiskSet": fields.List(UHostDiskSetSchema()), "ExpireTime": fields.Int(required=False, load_from="ExpireTime"), "GPU": fields.Int(required=False, load_from="GPU"), "HostType": fields.Str(required=False, load_from="HostType"), "HotplugFeature": fields.Bool( required=False, load_from="HotplugFeature" ), "IPSet": fields.List(UHostIPSetSchema()), "ImageId": fields.Str(required=False, load_from="ImageId"), "IsolationGroup": fields.Str( required=False, load_from="IsolationGroup" ), "LifeCycle": fields.Str(required=False, load_from="LifeCycle"), "MachineType": fields.Str(required=False, load_from="MachineType"), "Memory": fields.Int(required=False, load_from="Memory"), "Name": fields.Str(required=False, load_from="Name"), "NetCapability": fields.Str(required=False, load_from="NetCapability"), "NetworkState": fields.Str(required=False, load_from="NetworkState"), "OsName": fields.Str(required=False, load_from="OsName"), "OsType": fields.Str(required=False, load_from="OsType"), "Remark": fields.Str(required=False, load_from="Remark"), "State": fields.Str(required=False, load_from="State"), "StorageType": fields.Str(required=False, load_from="StorageType"), "SubnetType": fields.Str(required=False, load_from="SubnetType"), "Tag": fields.Str(required=False, load_from="Tag"), "TimemachineFeature": fields.Str( required=False, load_from="TimemachineFeature" ), "TotalDiskSpace": fields.Int( required=False, load_from="TotalDiskSpace" ), "UHostId": fields.Str(required=False, load_from="UHostId"), "UHostType": fields.Str(required=False, load_from="UHostType"), "Zone": fields.Str(required=False, load_from="Zone"), }
UHostInstanceSet - DescribeUHostInstance
6259907660cbc95b06365a21
class Institution(QObject): <NEW_LINE> <INDENT> populationChanged = pyqtSignal() <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> QObject.__init__(self, parent) <NEW_LINE> self.name = 'Institution' <NEW_LINE> self.people = [] <NEW_LINE> self.templates = [] <NEW_LINE> self.durations = []
storage of all people storage of a list of templates used to create events storage of a list of durations used to sort events
62599076627d3e7fe0e087ef
class BroadcastResponse(object): <NEW_LINE> <INDENT> def __init__(self, request_id=None): <NEW_LINE> <INDENT> self.request_id = request_id
BroadcastResponse. https://developers.line.biz/en/reference/messaging-api/#send-broadcast-message
6259907623849d37ff852a1f
class SettingsStringUpdater(object): <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> drv = driver.Driver(pygram.python_grammar, pytree.convert) <NEW_LINE> tree = self._parse_func(drv)(source) <NEW_LINE> if not isinstance(tree, Node): <NEW_LINE> <INDENT> raise SettingsError('Invalid settings: no nodes') <NEW_LINE> <DEDENT> self.root = tree <NEW_LINE> self.changed = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def result(self): <NEW_LINE> <INDENT> return str(self.root) <NEW_LINE> <DEDENT> def update(self, new_settings={}, append_settings={}, create_if_missing=False): <NEW_LINE> <INDENT> node_names = new_settings.keys() + append_settings.keys() <NEW_LINE> node_dict = find_assignment_nodes(self.root, node_names) <NEW_LINE> for name, value in new_settings.iteritems(): <NEW_LINE> <INDENT> if name in node_dict: <NEW_LINE> <INDENT> raise SettingsError("Variable '%s' already present in settings" % name) <NEW_LINE> <DEDENT> self.root.append_child(AssignStatement(name, value)) <NEW_LINE> self.changed = True <NEW_LINE> <DEDENT> for name, value in append_settings.iteritems(): <NEW_LINE> <INDENT> if name not in node_dict: <NEW_LINE> <INDENT> if create_if_missing: <NEW_LINE> <INDENT> self.root.append_child(AssignStatement(name, value)) <NEW_LINE> self.changed = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SettingsError("Variable '%s' missing from settings" % name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> append_to_assignment_node(node_dict[name], value) <NEW_LINE> self.changed = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _parse_func(self, drv): <NEW_LINE> <INDENT> return drv.parse_string
Base class that implements node tree parsing and updating. Handles strings.
6259907616aa5153ce401e41
class DeletionRobot: <NEW_LINE> <INDENT> def __init__(self, generator, summary, always=False, undelete=True): <NEW_LINE> <INDENT> self.generator = generator <NEW_LINE> self.summary = summary <NEW_LINE> self.prompt = not always <NEW_LINE> self.undelete = undelete <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for page in self.generator: <NEW_LINE> <INDENT> pywikibot.output(u'Processing page %s' % page.title()) <NEW_LINE> if self.undelete: <NEW_LINE> <INDENT> page.undelete(self.summary) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> page.delete(self.summary, self.prompt)
This robot allows deletion of pages en masse.
6259907656ac1b37e6303996
class DataViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Data.objects.all() <NEW_LINE> serializer_class = DataSerializer
API endpoint that allows data to be viewed or edited.
625990764a966d76dd5f0852
class LutronCasetaFan(LutronCasetaDevice, FanEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def speed(self) -> str: <NEW_LINE> <INDENT> return VALUE_TO_SPEED[self._device["fan_speed"]] <NEW_LINE> <DEDENT> @property <NEW_LINE> def speed_list(self) -> list: <NEW_LINE> <INDENT> return FAN_SPEEDS <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self) -> int: <NEW_LINE> <INDENT> return SUPPORT_SET_SPEED <NEW_LINE> <DEDENT> async def async_turn_on(self, speed: str = None, **kwargs): <NEW_LINE> <INDENT> if speed is None: <NEW_LINE> <INDENT> speed = SPEED_MEDIUM <NEW_LINE> <DEDENT> await self.async_set_speed(speed) <NEW_LINE> <DEDENT> async def async_turn_off(self, **kwargs): <NEW_LINE> <INDENT> await self.async_set_speed(SPEED_OFF) <NEW_LINE> <DEDENT> async def async_set_speed(self, speed: str) -> None: <NEW_LINE> <INDENT> await self._smartbridge.set_fan(self.device_id, SPEED_TO_VALUE[speed]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return VALUE_TO_SPEED[self._device["fan_speed"]] in [ SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ] <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> self._device = self._smartbridge.get_device_by_id(self.device_id) <NEW_LINE> _LOGGER.debug("State of this lutron fan device is %s", self._device)
Representation of a Lutron Caseta fan. Including Fan Speed.
6259907655399d3f05627e80
class TestExpirable(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testExpirable(self): <NEW_LINE> <INDENT> pass
Expirable unit test stubs
625990762ae34c7f260aca4d
@base <NEW_LINE> class Layer3D(VariableTree): <NEW_LINE> <INDENT> thickness = Array(units='m', desc='layer thickness') <NEW_LINE> angle = Array(units='deg', desc='layup angle')
Same as Layer, except for being a function of span A layer thickness can go to zero if material disappears at a certain spanwise location.
625990768e7ae83300eea9fa
class Acl(object): <NEW_LINE> <INDENT> def is_authorized(self, user_info=None, action='viewrestr'): <NEW_LINE> <INDENT> if user_info is None: <NEW_LINE> <INDENT> user_info = current_user <NEW_LINE> <DEDENT> restriction = self.get('restriction') <NEW_LINE> if restriction is None: <NEW_LINE> <INDENT> return (1, 'Missing restriction') <NEW_LINE> <DEDENT> if acc_is_user_in_role(user_info, acc_get_role_id(SUPERADMINROLE)): <NEW_LINE> <INDENT> return (0, CFG_WEBACCESS_WARNING_MSGS[0]) <NEW_LINE> <DEDENT> is_authorized = (0, CFG_WEBACCESS_WARNING_MSGS[0]) <NEW_LINE> try: <NEW_LINE> <INDENT> is_authorized = self.acl_pre_authorized_hook( user_info, action, is_authorized) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if is_authorized[0] != 0 and not any(restriction.values()): <NEW_LINE> <INDENT> return is_authorized <NEW_LINE> <DEDENT> for auth_type, auth_value in six.iteritems(restriction): <NEW_LINE> <INDENT> if auth_type == 'status': <NEW_LINE> <INDENT> is_authorized = acc_authorize_action(user_info, action+obj, status=auth_value) <NEW_LINE> <DEDENT> elif auth_type == 'email': <NEW_LINE> <INDENT> if auth_value.lower().strip() != user_info['email'].lower().strip(): <NEW_LINE> <INDENT> is_authorized = (1, 'You must be member of the group' '%s in order to access this document' % repr(auth_value)) <NEW_LINE> <DEDENT> <DEDENT> elif auth_type == 'group': <NEW_LINE> <INDENT> if auth_value not in user_info['group']: <NEW_LINE> <INDENT> is_authorized = (1, 'You must be member of the group' '%s in order to access this document' % repr(auth_value)) <NEW_LINE> <DEDENT> <DEDENT> elif auth_type == 'role': <NEW_LINE> <INDENT> if not acc_is_user_in_role(user_info, acc_get_role_id(auth_value)): <NEW_LINE> <INDENT> is_authorized = (1, 'You must be member in the role %s' ' in order to access this document' % repr(auth_value)) <NEW_LINE> <DEDENT> <DEDENT> elif auth_type == 'firerole': <NEW_LINE> <INDENT> if not acc_firerole_check_user( user_info, compile_role_definition(auth_value)): <NEW_LINE> <INDENT> is_authorized = (1, 'You must be authorized in ' 'order to access this document') <NEW_LINE> <DEDENT> <DEDENT> if is_authorized[0] != 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> is_authorized = self.acl_post_authorized_hook( user_info, action, is_authorized) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return is_authorized
Access controled behavior for JSONAlchemy models.
62599076be7bc26dc9252b09
class EntityAssociation( object ): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.recordType = 4 <NEW_LINE> self.changeIndicator = 0 <NEW_LINE> self.associationStatus = 0 <NEW_LINE> self.associationType = 0 <NEW_LINE> self.entityID = EntityID(); <NEW_LINE> self.ownStationLocation = 0 <NEW_LINE> self.physicalConnectionType = 0 <NEW_LINE> self.groupMemberType = 0 <NEW_LINE> self.groupNumber = 0 <NEW_LINE> <DEDENT> def serialize(self, outputStream): <NEW_LINE> <INDENT> outputStream.write_unsigned_byte(self.recordType); <NEW_LINE> outputStream.write_unsigned_byte(self.changeIndicator); <NEW_LINE> outputStream.write_unsigned_byte(self.associationStatus); <NEW_LINE> outputStream.write_unsigned_byte(self.associationType); <NEW_LINE> self.entityID.serialize(outputStream) <NEW_LINE> outputStream.write_unsigned_short(self.ownStationLocation); <NEW_LINE> outputStream.write_unsigned_byte(self.physicalConnectionType); <NEW_LINE> outputStream.write_unsigned_byte(self.groupMemberType); <NEW_LINE> outputStream.write_unsigned_short(self.groupNumber); <NEW_LINE> <DEDENT> def parse(self, inputStream): <NEW_LINE> <INDENT> self.recordType = inputStream.read_unsigned_byte(); <NEW_LINE> self.changeIndicator = inputStream.read_unsigned_byte(); <NEW_LINE> self.associationStatus = inputStream.read_unsigned_byte(); <NEW_LINE> self.associationType = inputStream.read_unsigned_byte(); <NEW_LINE> self.entityID.parse(inputStream) <NEW_LINE> self.ownStationLocation = inputStream.read_unsigned_short(); <NEW_LINE> self.physicalConnectionType = inputStream.read_unsigned_byte(); <NEW_LINE> self.groupMemberType = inputStream.read_unsigned_byte(); <NEW_LINE> self.groupNumber = inputStream.read_unsigned_short();
Association or disassociation of two entities. Section 6.2.94.4.3
625990767cff6e4e811b73a8
class Breadcrumb(models.Model): <NEW_LINE> <INDENT> spot_id = models.BigIntegerField(unique=True) <NEW_LINE> timestamp = models.DateTimeField() <NEW_LINE> point = PointField() <NEW_LINE> raw = JSONField() <NEW_LINE> post = models.ForeignKey( Post, blank=True, null=True, related_name="breadcrumbs", on_delete=models.SET_NULL, ) <NEW_LINE> @property <NEW_LINE> def latitude(self): <NEW_LINE> <INDENT> return self.point.y if self.point else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def longitude(self): <NEW_LINE> <INDENT> return self.point.x if self.point else None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"({self.latitude}, {self.longitude}) at {self.timestamp}"
A raw track from the SPOT tracker Store this separate from the Location to prevent clutter, and to maybe use to display a live-ish track.
625990767c178a314d78e89f
class CountryMeta(type): <NEW_LINE> <INDENT> def __getattr__(cls, name): <NEW_LINE> <INDENT> if name.startswith('from'): <NEW_LINE> <INDENT> return partial(cls.fromcode, converter=name[4:]) <NEW_LINE> <DEDENT> return getattr(cls, name)
The :class:`Country` metaclass Dynamically redirect :meth:`Country.frommycode` to :meth:`Country.fromcode` with the ``mycode`` `converter`
6259907663b5f9789fe86ace
@pytest.mark.usefixtures('settings', 'import_realm', 'login_broker_sso_form', 'import_realm_external') <NEW_LINE> class Test_CT_TC_SAML_SSO_BROKER_LOGOUT_SIMPLE(): <NEW_LINE> <INDENT> def test_CT_TC_SAML_SSO_BROKER_LOGOUT_SIMPLE(self, settings, login_broker_sso_form): <NEW_LINE> <INDENT> s = Session() <NEW_LINE> sp = settings["sps_saml"][0] <NEW_LINE> sp_ip = sp["ip"] <NEW_LINE> sp_port = sp["port"] <NEW_LINE> sp_scheme = sp["http_scheme"] <NEW_LINE> sp_path = sp["path"] <NEW_LINE> sp_message = sp["logged_in_message"] <NEW_LINE> idp_ip = settings["idp"]["ip"] <NEW_LINE> idp_port = settings["idp"]["port"] <NEW_LINE> idp_scheme = settings["idp"]["http_scheme"] <NEW_LINE> idp_username = settings["idp_external"]["test_realm"]["username"] <NEW_LINE> idp_password = settings["idp_external"]["test_realm"]["password"] <NEW_LINE> idp2_ip = settings["idp_external"]["ip"] <NEW_LINE> idp2_port = settings["idp_external"]["port"] <NEW_LINE> idp2_scheme = settings["idp_external"]["http_scheme"] <NEW_LINE> keycloak_login_form_id = settings["idp"]["login_form_id"] <NEW_LINE> header = req.get_header()
Class to test the CT_TC_SAML_SSO_BROKER_LOGOUT_SIMPLE use case: As a resource owner I need the solution to ensure that all access tokens/sessions are invalidated and not usable anymore after the a user of company B, who authenticated on company B IDP, has proceeded to a logout on the target application. Company A applications are protected by Cloudtrust which acts as a broker.
625990767d847024c075dd43
class TRPO(PPO): <NEW_LINE> <INDENT> def __init__(self, env, monitor_path: str, video=False, **usercfg) -> None: <NEW_LINE> <INDENT> usercfg["kl_coef"] = 1.0 <NEW_LINE> super().__init__(env, monitor_path, video=video, **usercfg) <NEW_LINE> <DEDENT> def _actor_loss(self, old_logprob, new_logprob, advantage): <NEW_LINE> <INDENT> return trpo_loss(old_logprob, new_logprob, self.config["kl_coef"], advantage)
Trust Region Policy Optimization agent.
6259907601c39578d7f143e9
class RenameRecipientList(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/SendGrid/NewsletterAPI/Lists/RenameRecipientList') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return RenameRecipientListInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return RenameRecipientListResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return RenameRecipientListChoreographyExecution(session, exec_id, path)
Create a new instance of the RenameRecipientList Choreography. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
6259907699cbb53fe683284e
class VrfSvcsRuleAction(IntEnum): <NEW_LINE> <INDENT> RULE_ACTION_ALLOW = 1 <NEW_LINE> RULE_ACTION_DENY = 2 <NEW_LINE> RULE_ACTION_DNAT = 3 <NEW_LINE> RULE_ACTION_REJECT = 4 <NEW_LINE> RULE_ACTION_SNAT = 5
actions defined in model, insert new action values per model before internal actions.
6259907656ac1b37e6303997
class Account: <NEW_LINE> <INDENT> def __init__(self, nAccountNo, nCustomerName, nBalance): <NEW_LINE> <INDENT> self.__AccountNo = nAccountNo <NEW_LINE> self.__CustomerName = nCustomerName <NEW_LINE> self.__Balance = nBalance <NEW_LINE> <DEDENT> def getAccountNo(self): <NEW_LINE> <INDENT> return self.__AccountNo <NEW_LINE> <DEDENT> def getCustomerName(self): <NEW_LINE> <INDENT> return self.__CustomerName <NEW_LINE> <DEDENT> def getBalance(self): <NEW_LINE> <INDENT> return self.__Balance <NEW_LINE> <DEDENT> def setBalance(self, newBalance): <NEW_LINE> <INDENT> self.__Balance = newBalance <NEW_LINE> <DEDENT> def deposit(self, amount): <NEW_LINE> <INDENT> self.__Balance = self.__Balance + amount <NEW_LINE> <DEDENT> def withdraw(self, amount): <NEW_LINE> <INDENT> self.__Balance = self.__Balance - amount <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> print("Account No:", self.__AccountNo) <NEW_LINE> print("Customer Name:", self.__CustomerName) <NEW_LINE> print("Balance: ${0:.2f}".format(self.__Balance))
superclass for bank account
62599076cc0a2c111447c786
class FirewallPolicyListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[FirewallPolicy]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["FirewallPolicy"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(FirewallPolicyListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link
Response for ListFirewallPolicies API service call. :param value: List of Firewall Policies in a resource group. :type value: list[~azure.mgmt.network.v2019_11_01.models.FirewallPolicy] :param next_link: URL to get the next set of results. :type next_link: str
625990764a966d76dd5f0855
class random_read_generator: <NEW_LINE> <INDENT> def __init__(self, bamfile, chrom, barcode_getter): <NEW_LINE> <INDENT> inbam = pysam.Samfile(bamfile) <NEW_LINE> if chrom: <NEW_LINE> <INDENT> self.inbam = inbam.fetch(reference=chrom) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.inbam = inbam.fetch() <NEW_LINE> <DEDENT> self.umis = collections.defaultdict(int) <NEW_LINE> self.barcode_getter = barcode_getter <NEW_LINE> self.random_fill_size = 100000 <NEW_LINE> self.fill() <NEW_LINE> <DEDENT> def refill_random(self): <NEW_LINE> <INDENT> self.random_umis = np.random.choice( list(self.umis.keys()), self.random_fill_size, p=self.prob) <NEW_LINE> self.random_ix = 0 <NEW_LINE> <DEDENT> def fill(self): <NEW_LINE> <INDENT> self.frequency2umis = collections.defaultdict(list) <NEW_LINE> for read in self.inbam: <NEW_LINE> <INDENT> if read.is_unmapped: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if read.is_read2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.umis[self.barcode_getter(read)[0]] += 1 <NEW_LINE> <DEDENT> self.umis_counter = collections.Counter(self.umis) <NEW_LINE> total_umis = sum(self.umis_counter.values()) <NEW_LINE> U.info("total_umis %i" % total_umis) <NEW_LINE> U.info("#umis %i" % len(self.umis_counter)) <NEW_LINE> self.prob = self.umis_counter.values() <NEW_LINE> sum_prob = sum(self.prob) <NEW_LINE> self.prob = [float(x) / sum_prob for x in self.prob] <NEW_LINE> self.refill_random() <NEW_LINE> <DEDENT> def getUmis(self, n): <NEW_LINE> <INDENT> if n < (self.random_fill_size - self.random_ix): <NEW_LINE> <INDENT> barcodes = self.random_umis[self.random_ix: self.random_ix+n] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if n > self.random_fill_size: <NEW_LINE> <INDENT> self.random_fill_size = n * 2 <NEW_LINE> <DEDENT> self.refill_random() <NEW_LINE> barcodes = self.random_umis[self.random_ix: self.random_ix+n] <NEW_LINE> <DEDENT> self.random_ix += n <NEW_LINE> return barcodes
class to generate umis at random based on the distributon of umis in a bamfile
6259907676e4537e8c3f0ee9
class PHPTravelsPage(BasePageObject): <NEW_LINE> <INDENT> def get_header(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_footer(self): <NEW_LINE> <INDENT> pass
This is the base page of the PHP Travels page which contains methods to reach the header and footer
625990763d592f4c4edbc812
class ArgumentError(NelkitException): <NEW_LINE> <INDENT> pass
Raised when passing invalid arguments using cli tools.
625990768e7ae83300eea9fd
class CreateCommand(QuantumCommand, show.ShowOne): <NEW_LINE> <INDENT> api = 'network' <NEW_LINE> resource = None <NEW_LINE> log = None <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(CreateCommand, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( '--tenant-id', metavar='TENANT_ID', help=_('the owner tenant ID'), ) <NEW_LINE> parser.add_argument( '--tenant_id', help=argparse.SUPPRESS) <NEW_LINE> self.add_known_arguments(parser) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def get_data(self, parsed_args): <NEW_LINE> <INDENT> self.log.debug('get_data(%s)' % parsed_args) <NEW_LINE> quantum_client = self.get_client() <NEW_LINE> quantum_client.format = parsed_args.request_format <NEW_LINE> _extra_values = parse_args_to_dict(self.values_specs) <NEW_LINE> _merge_args(self, parsed_args, _extra_values, self.values_specs) <NEW_LINE> body = self.args2body(parsed_args) <NEW_LINE> body[self.resource].update(_extra_values) <NEW_LINE> obj_creator = getattr(quantum_client, "create_%s" % self.resource) <NEW_LINE> data = obj_creator(body) <NEW_LINE> self.format_output_data(data) <NEW_LINE> info = self.resource in data and data[self.resource] or None <NEW_LINE> if info: <NEW_LINE> <INDENT> print >>self.app.stdout, _('Created a new %s:' % self.resource) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info = {'': ''} <NEW_LINE> <DEDENT> return zip(*sorted(info.iteritems()))
Create a resource for a given tenant
625990767047854f46340d23
class MarginalInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> if self.index == 1: <NEW_LINE> <INDENT> jointInference.initialize(gameState, self.legalPositions) <NEW_LINE> <DEDENT> jointInference.addGhostAgent(self.ghostAgent) <NEW_LINE> <DEDENT> def observe(self, gameState): <NEW_LINE> <INDENT> if self.index == 1: <NEW_LINE> <INDENT> jointInference.observe(gameState) <NEW_LINE> <DEDENT> <DEDENT> def predict(self, gameState): <NEW_LINE> <INDENT> if self.index == 1: <NEW_LINE> <INDENT> jointInference.predict(gameState) <NEW_LINE> <DEDENT> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> jointDistribution = jointInference.getBeliefDistribution() <NEW_LINE> dist = DiscreteDistribution() <NEW_LINE> for t, prob in jointDistribution.items(): <NEW_LINE> <INDENT> dist[t[self.index - 1]] += prob <NEW_LINE> <DEDENT> return dist
A wrapper around the JointInference module that returns marginal beliefs about ghosts.
625990764f6381625f19a160
class OutputUrl(models.Model): <NEW_LINE> <INDENT> unique_inputs = models.ForeignKey(TaxSaveInputs, default=None) <NEW_LINE> user = models.ForeignKey(User, null=True, default=None) <NEW_LINE> model_pk = models.IntegerField(default=None, null=True) <NEW_LINE> uuid = UUIDField(auto=True, default=None, null=True) <NEW_LINE> taxcalc_vers = models.CharField(blank=True, default=None, null=True, max_length=50) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> kwargs = { 'pk': self.pk } <NEW_LINE> return reverse('output_detail', kwargs=kwargs)
This model creates a unique url for each calculation.
6259907671ff763f4b5e9116
class LinearAdstockTransformation(object): <NEW_LINE> <INDENT> def __init__(self, variables, week_to_peak, length, retention_rate): <NEW_LINE> <INDENT> self.week_to_peak = week_to_peak <NEW_LINE> self.length = length <NEW_LINE> self.retention_rate = retention_rate <NEW_LINE> self.variables = np.matrix(variables) <NEW_LINE> if self.variables.shape[0] < self.length: <NEW_LINE> <INDENT> sys.exit('Number of weeks is less than adstock length. Program Stops') <NEW_LINE> <DEDENT> <DEDENT> def linear_build_adstock_transform(self): <NEW_LINE> <INDENT> adstock_build = np.arange(1, self.week_to_peak + 1) / self.week_to_peak <NEW_LINE> adstock_decay = self.retention_rate ** np.arange(1, self.length - self.week_to_peak + 1) <NEW_LINE> adstock_coeff = np.concatenate([adstock_build, adstock_decay])[:, np.newaxis] <NEW_LINE> adstock_coeff_normalized = adstock_coeff / sum(adstock_coeff) <NEW_LINE> final_adstock = np.zeros(self.variables.shape) <NEW_LINE> for week in range(len(self.variables)): <NEW_LINE> <INDENT> adstock = np.squeeze(np.asarray(self.variables[week, :]), axis=0) * adstock_coeff_normalized <NEW_LINE> adstock = np.concatenate([np.zeros((week, self.variables.shape[1])), adstock], axis=0) <NEW_LINE> if len(adstock) < len(final_adstock): <NEW_LINE> <INDENT> final_adstock[:len(adstock), :] += adstock <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> final_adstock += adstock[:len(final_adstock), :] <NEW_LINE> <DEDENT> <DEDENT> return final_adstock
This class performs adstock transformation on variables with same adstock transformation parameters This class implements a linear adstock build
62599076e1aae11d1e7cf4c4
class VerificationDeadline(TimeStampedModel): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> app_label = "verify_student" <NEW_LINE> <DEDENT> course_key = CourseKeyField( max_length=255, db_index=True, unique=True, help_text=ugettext_lazy(u"The course for which this deadline applies"), ) <NEW_LINE> deadline = models.DateTimeField( help_text=ugettext_lazy( u"The datetime after which users are no longer allowed " u"to submit photos for verification." ) ) <NEW_LINE> deadline_is_explicit = models.BooleanField(default=False) <NEW_LINE> ALL_DEADLINES_CACHE_KEY = "verify_student.all_verification_deadlines" <NEW_LINE> @classmethod <NEW_LINE> def set_deadline(cls, course_key, deadline, is_explicit=False): <NEW_LINE> <INDENT> if deadline is None: <NEW_LINE> <INDENT> VerificationDeadline.objects.filter(course_key=course_key).delete() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> record, created = VerificationDeadline.objects.get_or_create( course_key=course_key, defaults={"deadline": deadline, "deadline_is_explicit": is_explicit} ) <NEW_LINE> if not created: <NEW_LINE> <INDENT> record.deadline = deadline <NEW_LINE> record.deadline_is_explicit = is_explicit <NEW_LINE> record.save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def deadlines_for_courses(cls, course_keys): <NEW_LINE> <INDENT> all_deadlines = cache.get(cls.ALL_DEADLINES_CACHE_KEY) <NEW_LINE> if all_deadlines is None: <NEW_LINE> <INDENT> all_deadlines = { deadline.course_key: deadline.deadline for deadline in VerificationDeadline.objects.all() } <NEW_LINE> cache.set(cls.ALL_DEADLINES_CACHE_KEY, all_deadlines) <NEW_LINE> <DEDENT> return { course_key: all_deadlines[course_key] for course_key in course_keys if course_key in all_deadlines } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deadline_for_course(cls, course_key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> deadline = cls.objects.get(course_key=course_key) <NEW_LINE> return deadline.deadline <NEW_LINE> <DEDENT> except cls.DoesNotExist: <NEW_LINE> <INDENT> return None
Represent a verification deadline for a particular course. The verification deadline is the datetime after which users are no longer allowed to submit photos for initial verification in a course. Note that this is NOT the same as the "upgrade" deadline, after which a user is no longer allowed to upgrade to a verified enrollment. If no verification deadline record exists for a course, then that course does not have a deadline. This means that users can submit photos at any time.
62599076f9cc0f698b1c5f81
class JSONLogRotator(BaseLogRotator): <NEW_LINE> <INDENT> def _make_writer(self): <NEW_LINE> <INDENT> self._buffer = StringIO() <NEW_LINE> self._bytes_written = 0 <NEW_LINE> now = datetime.now() <NEW_LINE> self.fname = self.log_folder + '/' + now.strftime('%Y%m%d_%H%M%S_{}.json'.format(self.make_random(6))) <NEW_LINE> self.fname = str(pathlib.Path(self.fname)) <NEW_LINE> self._out_fh = open(self.fname, 'w') <NEW_LINE> self.write_pid() <NEW_LINE> logging.warning("Writing to {} ({} bytes)".format(self._out_fh.name, self.max_bytes)) <NEW_LINE> for fname in glob(self.log_folder+"/*.json"): <NEW_LINE> <INDENT> if fname != self.fname: <NEW_LINE> <INDENT> self._compress(fname) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write_pid(self): <NEW_LINE> <INDENT> with open(self.pid_file, 'w') as pid_out: <NEW_LINE> <INDENT> pid_out.write(self.fname) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def make_random(nchars): <NEW_LINE> <INDENT> alphabet = string.ascii_letters + string.digits <NEW_LINE> return ''.join(random.choice(alphabet) for _ in range(nchars)) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._out_fh.close() <NEW_LINE> <DEDENT> def _compress(self, fname): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(fname, 'rb') as f_in: <NEW_LINE> <INDENT> logging.warning("Compressing {0} into {0}.gz".format(fname)) <NEW_LINE> with gzip.open(fname + '.gz', 'wb') as f_out: <NEW_LINE> <INDENT> shutil.copyfileobj(f_in, f_out) <NEW_LINE> <DEDENT> <DEDENT> logging.warning("Removing {}".format(fname)) <NEW_LINE> os.remove(fname) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> <DEDENT> <DEDENT> def writerow(self, row): <NEW_LINE> <INDENT> out_txt = json.dumps(row) + "\n" <NEW_LINE> self._bytes_written += self._out_fh.write(out_txt) <NEW_LINE> self._out_fh.flush() <NEW_LINE> if self._bytes_written > self.max_bytes: <NEW_LINE> <INDENT> self.close() <NEW_LINE> self._compress(self.fname) <NEW_LINE> self._make_writer() <NEW_LINE> <DEDENT> return out_txt
A JSON Log Rotator
625990768a349b6b43687bc5
class TaskContainerExecutionInformation(Model): <NEW_LINE> <INDENT> _attribute_map = { 'container_id': {'key': 'containerId', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'error': {'key': 'error', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(TaskContainerExecutionInformation, self).__init__(**kwargs) <NEW_LINE> self.container_id = kwargs.get('container_id', None) <NEW_LINE> self.state = kwargs.get('state', None) <NEW_LINE> self.error = kwargs.get('error', None)
Contains information about the container which a Task is executing. :param container_id: :type container_id: str :param state: This is the state of the container according to the Docker service. It is equivalent to the status field returned by "docker inspect". :type state: str :param error: This is the detailed error string from the Docker service, if available. It is equivalent to the error field returned by "docker inspect". :type error: str
625990767d43ff24874280c9
class BinaryEdgeException(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> Exception.__init__(self, message)
Exception raised if a request to BinaryEdge returns anything else than 200
6259907632920d7e50bc79b3
class settings(object): <NEW_LINE> <INDENT> def __init__(self, ): <NEW_LINE> <INDENT> self._config_data = None <NEW_LINE> self._default_cinfig_data = {'database' : {'path' : ''}, 'behavior' : {'load_last_database' : True}, 'interface' : {'odd_color' : '#FFFFFF', 'even_color' : '#FFFFFF'}, 'chart' : {'legend': {'position' : 'bottom', 'font' : 'Sans 14', 'color' : '#FFFFFF'}, 'mesh' : {'line_width' : 1.5, 'color' : '#FFFFFF', 'font' : {'name' : 'Sans 14'}}, 'background' : {'color' : '#FFFFFF'}, 'top_indent' : 5, 'bottom_indent' : 5}} <NEW_LINE> if os.name in ['posix', 'nt']: <NEW_LINE> <INDENT> if os.name == 'nt': <NEW_LINE> <INDENT> os.environ['HOME'] = os.path.join(os.environ['HOMEDRIVE'], os.environ['HOMEPATH']) <NEW_LINE> <DEDENT> config_dir = None <NEW_LINE> if not is_null_or_empty(gethash(os.environ, 'TD_CONFIG_DIR')): <NEW_LINE> <INDENT> config_dir = os.environ['TD_CONFIG_DIR'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not is_null_or_empty(gethash(os.environ, 'HOME')): <NEW_LINE> <INDENT> config_dir = os.path.join(os.environ['HOME'], '.track-deals') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise EnvironmentError('There is not HOME environment specified') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('There is no code for windoze yet') <NEW_LINE> <DEDENT> self.config_file = os.path.join(config_dir, 'gtk-view.cfg') <NEW_LINE> if not os.path.exists(config_dir): <NEW_LINE> <INDENT> os.makedirs(config_dir) <NEW_LINE> <DEDENT> if not os.path.exists(self.config_file): <NEW_LINE> <INDENT> self.make_default_config() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.read_config() <NEW_LINE> <DEDENT> <DEDENT> def read_config(self, ): <NEW_LINE> <INDENT> with open(self.config_file) as f: <NEW_LINE> <INDENT> self._config_data = json.load(f) <NEW_LINE> <DEDENT> <DEDENT> def make_default_config(self, ): <NEW_LINE> <INDENT> self._config_data = self._default_cinfig_data <NEW_LINE> self.save_config() <NEW_LINE> <DEDENT> def save_config(self, ): <NEW_LINE> <INDENT> with open(self.config_file, 'w') as f: <NEW_LINE> <INDENT> json.dump(self._config_data, f, indent = 4) <NEW_LINE> <DEDENT> <DEDENT> def get_key(self, key_name): <NEW_LINE> <INDENT> val = self._config_data <NEW_LINE> try: <NEW_LINE> <INDENT> for key in map(lambda a: a.strip(), split('\.', key_name)): <NEW_LINE> <INDENT> val = val[key] <NEW_LINE> <DEDENT> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise od_exceptions.od_exception_config_key_error('There is no such key in config "{0}"'.format(key_name)) <NEW_LINE> <DEDENT> return val <NEW_LINE> <DEDENT> def set_key(self, key_name, value): <NEW_LINE> <INDENT> val = self._config_data <NEW_LINE> keys = map(lambda a: a.strip(), split('\.', key_name)) <NEW_LINE> for key in keys[:-1]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not isinstance(val[key], dict): <NEW_LINE> <INDENT> val[key] = {} <NEW_LINE> <DEDENT> val = val[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> val[key] = {} <NEW_LINE> val = val[key] <NEW_LINE> <DEDENT> <DEDENT> val[keys[-1]] = value
rief settings saving class
62599076091ae356687065a5
class HuggingFaceFellowship(_BaseScenario): <NEW_LINE> <INDENT> def __init__( self, hf_datasets: Union[List[HFDataset], List[str], List[Tuple]], lazy: bool = False, train: bool = True ): <NEW_LINE> <INDENT> self.hf_datasets = hf_datasets <NEW_LINE> self.lazy = lazy <NEW_LINE> self.split = "train" if train else "test" <NEW_LINE> <DEDENT> def _setup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def train(self) -> bool: <NEW_LINE> <INDENT> return self.split == "train" <NEW_LINE> <DEDENT> @property <NEW_LINE> def nb_samples(self) -> int: <NEW_LINE> <INDENT> if self.lazy: <NEW_LINE> <INDENT> raise Exception("Cannot tell the number of samples if datasets are lazyly loaded.") <NEW_LINE> <DEDENT> return sum(len(dataset) for dataset in self.hf_datasets) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nb_classes(self) -> int: <NEW_LINE> <INDENT> raise NotImplementedError("Not available for this kind of scenario.") <NEW_LINE> <DEDENT> @property <NEW_LINE> def classes(self) -> List: <NEW_LINE> <INDENT> raise NotImplementedError("Not available for this kind of scenario.") <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.hf_datasets) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if self.lazy: <NEW_LINE> <INDENT> if isinstance(self.hf_datasets[index], tuple): <NEW_LINE> <INDENT> return datasets.load_dataset(*self.hf_datasets[index], split=self.split) <NEW_LINE> <DEDENT> return datasets.load_dataset(self.hf_datasets[index], split=self.split) <NEW_LINE> <DEDENT> return self.hf_datasets[index]
A scenario for a collection of HuggingFace (HF) dataset. It simply wraps multiple datasets and returns them one by one. To have a full list of the available datasets (only HuggingFace!), see there: https://huggingface.co/datasets :param hf_datasets: A list of HF dataset instances or a list of HF dataset string id. :param lazy: Load datasets on-the-fly when needed. :param train: Train split vs test split.
625990765fdd1c0f98e5f8e9
class BaseSessionTest(BaseEnvVar): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(BaseSessionTest, self).setUp() <NEW_LINE> self.environ['AWS_ACCESS_KEY_ID'] = 'access_key' <NEW_LINE> self.environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' <NEW_LINE> self.environ['AWS_CONFIG_FILE'] = 'no-exist-foo' <NEW_LINE> self.session = botocore.session.get_session() <NEW_LINE> patch_session(self.session)
Base class used to provide credentials. This class can be used as a base class that want to use a real session class but want to be completely isolated from the external environment (including environment variables). This class will also set credential vars so you can make fake requests to services.
625990764a966d76dd5f0857
class CouchDbContext(DbContext): <NEW_LINE> <INDENT> DOCUMENT_RESERVED_KEYS = {'_id', '_rev'} <NEW_LINE> def __init__(self, host, db_name): <NEW_LINE> <INDENT> server = couchdb.Server(host) <NEW_LINE> if db_name not in server: <NEW_LINE> <INDENT> raise ValueError('Database \'{}\' doesn\'t exist'.format(db_name)) <NEW_LINE> <DEDENT> self.__db = server[db_name] <NEW_LINE> <DEDENT> def get(self, id): <NEW_LINE> <INDENT> doc = self.__db.get(id) <NEW_LINE> if doc: <NEW_LINE> <INDENT> for key in self.DOCUMENT_RESERVED_KEYS: <NEW_LINE> <INDENT> doc.pop(key) <NEW_LINE> <DEDENT> <DEDENT> return doc or {} <NEW_LINE> <DEDENT> def delete(self, id): <NEW_LINE> <INDENT> conflict = True <NEW_LINE> while conflict: <NEW_LINE> <INDENT> doc = self.__db.get(id) <NEW_LINE> if doc: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__db.delete(doc) <NEW_LINE> conflict = False <NEW_LINE> <DEDENT> except couchdb.ResourceConflict as e: <NEW_LINE> <INDENT> conflict = True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> conflict = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def upsert(self, id, obj): <NEW_LINE> <INDENT> doc = self.__db.get(id) <NEW_LINE> if doc: <NEW_LINE> <INDENT> obj['_rev'] = doc.rev <NEW_LINE> self.__db[id] = obj <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj['_id'] = id <NEW_LINE> self.__db.save(obj) <NEW_LINE> <DEDENT> return 1 if doc else 0
A DbContext that wraps a CouchDb server.
62599076bf627c535bcb2e39
class VppGreInterface(VppInterface): <NEW_LINE> <INDENT> def __init__(self, test, src_ip, dst_ip, outer_fib_id=0, is_teb=0): <NEW_LINE> <INDENT> self._sw_if_index = 0 <NEW_LINE> super(VppGreInterface, self).__init__(test) <NEW_LINE> self._test = test <NEW_LINE> self.t_src = src_ip <NEW_LINE> self.t_dst = dst_ip <NEW_LINE> self.t_outer_fib = outer_fib_id <NEW_LINE> self.t_is_teb = is_teb <NEW_LINE> <DEDENT> def add_vpp_config(self): <NEW_LINE> <INDENT> s = socket.inet_pton(socket.AF_INET, self.t_src) <NEW_LINE> d = socket.inet_pton(socket.AF_INET, self.t_dst) <NEW_LINE> r = self.test.vapi.gre_tunnel_add_del(s, d, outer_fib_id=self.t_outer_fib, is_teb=self.t_is_teb) <NEW_LINE> self._sw_if_index = r.sw_if_index <NEW_LINE> self.generate_remote_hosts() <NEW_LINE> <DEDENT> def remove_vpp_config(self): <NEW_LINE> <INDENT> s = socket.inet_pton(socket.AF_INET, self.t_src) <NEW_LINE> d = socket.inet_pton(socket.AF_INET, self.t_dst) <NEW_LINE> self.unconfig() <NEW_LINE> r = self.test.vapi.gre_tunnel_add_del(s, d, outer_fib_id=self.t_outer_fib, is_add=0)
VPP GRE interface
625990765fdd1c0f98e5f8ea
class IdHasherTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.random_string = str(random.randint(-1e6, 1e6)) <NEW_LINE> self.random_int = random.randint(-1e6, 1e6) <NEW_LINE> self.test_string = "ldlaldfd_dfdfldfdf_ssdfdf" <NEW_LINE> self.test_empty = "" <NEW_LINE> self.test_none = None <NEW_LINE> self.rand_32string = str(random.randint(1e31, 1e32-1)) <NEW_LINE> <DEDENT> def testIdenticalHash(self): <NEW_LINE> <INDENT> self.assertEqual(hash_string(self.random_string), hash_string(self.random_string)) <NEW_LINE> <DEDENT> def testIntegerHash(self): <NEW_LINE> <INDENT> self.assertEqual(hash_string(self.random_int), hash_string(self.random_int)) <NEW_LINE> <DEDENT> def testValueBetweenOneZero(self): <NEW_LINE> <INDENT> t1 = assign_hash_to_zero_one(self.rand_32string) <NEW_LINE> self.assertTrue(t1 >= 0 and t1 <= 1) <NEW_LINE> <DEDENT> def testValueType(self): <NEW_LINE> <INDENT> t2 = assign_hash_to_zero_one(self.rand_32string) <NEW_LINE> self.assertTrue(type(t2) is float)
Test Hash Function
625990769c8ee82313040e3d
class VIEW3D_TP_X_Mod_Mirror(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "tp_ops.mod_mirror_x" <NEW_LINE> bl_label = "Mirror X" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> scene = bpy.context.scene <NEW_LINE> selected = bpy.context.selected_objects <NEW_LINE> object = bpy.ops.object <NEW_LINE> for obj in selected: <NEW_LINE> <INDENT> scene.objects.active = obj <NEW_LINE> object.modifier_add(type = "MIRROR") <NEW_LINE> for mod in obj.modifiers: <NEW_LINE> <INDENT> if mod.type == "MIRROR": <NEW_LINE> <INDENT> bpy.context.object.modifiers["Mirror"].use_x = True <NEW_LINE> bpy.context.object.modifiers["Mirror"].use_y = False <NEW_LINE> bpy.context.object.modifiers["Mirror"].use_z = False <NEW_LINE> bpy.context.object.modifiers["Mirror"].show_on_cage = True <NEW_LINE> bpy.context.object.modifiers["Mirror"].use_clip = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return {'FINISHED'}
Add a x mirror modifier with cage and clipping
62599076a05bb46b3848bde1