code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class BgpPeerStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'local_address': {'readonly': True}, 'neighbor': {'readonly': True}, 'asn': {'readonly': True}, 'state': {'readonly': True}, 'connected_duration': {'readonly': True}, 'routes_received': {'readonly': True}, 'messages_sent': {'readonly': True}, 'messages_received': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'local_address': {'key': 'localAddress', 'type': 'str'}, 'neighbor': {'key': 'neighbor', 'type': 'str'}, 'asn': {'key': 'asn', 'type': 'int'}, 'state': {'key': 'state', 'type': 'str'}, 'connected_duration': {'key': 'connectedDuration', 'type': 'str'}, 'routes_received': {'key': 'routesReceived', 'type': 'long'}, 'messages_sent': {'key': 'messagesSent', 'type': 'long'}, 'messages_received': {'key': 'messagesReceived', 'type': 'long'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(BgpPeerStatus, self).__init__(**kwargs) <NEW_LINE> self.local_address = None <NEW_LINE> self.neighbor = None <NEW_LINE> self.asn = None <NEW_LINE> self.state = None <NEW_LINE> self.connected_duration = None <NEW_LINE> self.routes_received = None <NEW_LINE> self.messages_sent = None <NEW_LINE> self.messages_received = None | BGP peer status details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The virtual network gateway's local address.
:vartype local_address: str
:ivar neighbor: The remote BGP peer.
:vartype neighbor: str
:ivar asn: The autonomous system number of the remote BGP peer.
:vartype asn: int
:ivar state: The BGP peer state. Possible values include: "Unknown", "Stopped", "Idle",
"Connecting", "Connected".
:vartype state: str or ~azure.mgmt.network.v2019_11_01.models.BgpPeerState
:ivar connected_duration: For how long the peering has been up.
:vartype connected_duration: str
:ivar routes_received: The number of routes learned from this peer.
:vartype routes_received: long
:ivar messages_sent: The number of BGP messages sent.
:vartype messages_sent: long
:ivar messages_received: The number of BGP messages received.
:vartype messages_received: long | 6259904a3c8af77a43b68923 |
class LoginForm(Form): <NEW_LINE> <INDENT> email = StringField("email", [ validators.DataRequired(), validators.Email(), ]) <NEW_LINE> password = PasswordField("password", [validators.DataRequired()]) | This is the old (v1) login form and is now depracated | 6259904a76d4e153a661dc5c |
@register_command <NEW_LINE> class MemoryWatchResetCommand(GenericCommand): <NEW_LINE> <INDENT> _cmdline_ = "memory reset" <NEW_LINE> _syntax_ = f"{_cmdline_}" <NEW_LINE> @only_if_gdb_running <NEW_LINE> def do_invoke(self, _: List[str]) -> None: <NEW_LINE> <INDENT> gef.ui.watches.clear() <NEW_LINE> ok("Memory watches cleared") <NEW_LINE> return | Removes all watchpoints. | 6259904a91af0d3eaad3b1f1 |
class OrganizationMixin(CheckOrganizationsEnabled): <NEW_LINE> <INDENT> org_url_field = 'slug' <NEW_LINE> admin_only = True <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return self.get_organization_queryset() <NEW_LINE> <DEDENT> def get_organization_queryset(self): <NEW_LINE> <INDENT> if self.admin_only: <NEW_LINE> <INDENT> return Organization.objects.for_admin_user(user=self.request.user) <NEW_LINE> <DEDENT> return Organization.objects.for_user(user=self.request.user) <NEW_LINE> <DEDENT> @lru_cache(maxsize=1) <NEW_LINE> def get_organization(self): <NEW_LINE> <INDENT> if self.org_url_field not in self.kwargs: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return get_object_or_404( self.get_organization_queryset(), slug=self.kwargs[self.org_url_field], ) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> organization = self.get_organization() <NEW_LINE> context['organization'] = organization <NEW_LINE> return context | Mixin class that provides organization sublevel objects.
This mixin uses several class level variables
org_url_field
The URL kwarg name for the organization slug
admin_only
Boolean the dictacts access for organization owners only or just member
access | 6259904ab5575c28eb7136af |
class SceneOperation(Hook): <NEW_LINE> <INDENT> def execute(self, operation, file_path, **kwargs): <NEW_LINE> <INDENT> if operation == "current_path": <NEW_LINE> <INDENT> scene_filepath = Application.ActiveProject.ActiveScene.filename.value <NEW_LINE> scene_name = Application.ActiveProject.ActiveScene.Name <NEW_LINE> if scene_name == "Scene" and os.path.basename(scene_filepath) == "Untitled.scn": <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> return scene_filepath <NEW_LINE> <DEDENT> elif operation == "open": <NEW_LINE> <INDENT> Application.Desktop.RedrawUI() <NEW_LINE> Application.OpenScene(file_path, False, False) <NEW_LINE> <DEDENT> elif operation == "save": <NEW_LINE> <INDENT> Application.SaveScene() <NEW_LINE> <DEDENT> elif operation == "reset": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Application.NewScene("", True) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True | Hook called to perform an operation with the
current scene | 6259904ad7e4931a7ef3d443 |
class Device(MongoDB): <NEW_LINE> <INDENT> __collectionname__ = 'devices' <NEW_LINE> rid = AnyField() <NEW_LINE> report = AnyField() <NEW_LINE> report_data_source = AnyField() <NEW_LINE> device_id = AnyField() <NEW_LINE> report_subject = AnyField() <NEW_LINE> account_id = AnyField() <NEW_LINE> aggregator_id = AnyField() <NEW_LINE> availability = AnyField() <NEW_LINE> status = AnyField() <NEW_LINE> spaces = AnyField() <NEW_LINE> def __init__(self, report, device_id, rid, spaces, report_subject, report_data_source, status_item): <NEW_LINE> <INDENT> self.device_id = device_id <NEW_LINE> self.status = status_item <NEW_LINE> self.report = report <NEW_LINE> self.rid = rid <NEW_LINE> try: <NEW_LINE> <INDENT> self.account_id = request.cert['CN'] if hasattr(request, "cert") and 'CN' in request.cert else None <NEW_LINE> self.aggregator_id = request.cert['O'] if hasattr(request, "cert") and 'O' in request.cert else None <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.account_id = None <NEW_LINE> self.aggregator_id = None <NEW_LINE> <DEDENT> self.availability = "" <NEW_LINE> self.spaces = spaces <NEW_LINE> self.report_subject = report_subject <NEW_LINE> self.report_data_source = report_data_source <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_or_create(report, device_id, rid, spaces, report_subject, report_data_source, status_item): <NEW_LINE> <INDENT> dev_test = Device.find_one({Device.device_id(): device_id}) <NEW_LINE> if dev_test: <NEW_LINE> <INDENT> dev_test.status.update(status_item) <NEW_LINE> dev_test.report = report <NEW_LINE> dev_test.rid = rid <NEW_LINE> dev_test.availability = "" <NEW_LINE> dev_test.spaces = spaces <NEW_LINE> dev_test.report_subject = report_subject <NEW_LINE> dev_test.report_data_source = report_data_source <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dev_test = Device(report, device_id, rid, spaces, report_subject, report_data_source, status_item) <NEW_LINE> <DEDENT> return dev_test | An openadr Metadata_report device | 6259904a07d97122c421806f |
class TokenMetadataTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_token(self): <NEW_LINE> <INDENT> expected_node_count = len(get_cluster().nodes) <NEW_LINE> cluster = Cluster(protocol_version=PROTOCOL_VERSION) <NEW_LINE> cluster.connect() <NEW_LINE> tmap = cluster.metadata.token_map <NEW_LINE> self.assertTrue(issubclass(tmap.token_class, Token)) <NEW_LINE> self.assertEqual(expected_node_count, len(tmap.ring)) <NEW_LINE> cluster.shutdown() <NEW_LINE> <DEDENT> def test_getting_replicas(self): <NEW_LINE> <INDENT> tokens = [MD5Token(str(i)) for i in range(0, (2 ** 127 - 1), 2 ** 125)] <NEW_LINE> hosts = [Host("ip%d" % i, SimpleConvictionPolicy) for i in range(len(tokens))] <NEW_LINE> token_to_primary_replica = dict(zip(tokens, hosts)) <NEW_LINE> keyspace = KeyspaceMetadata("ks", True, "SimpleStrategy", {"replication_factor": "1"}) <NEW_LINE> metadata = Mock(spec=Metadata, keyspaces={'ks': keyspace}) <NEW_LINE> token_map = TokenMap(MD5Token, token_to_primary_replica, tokens, metadata) <NEW_LINE> for i, token in enumerate(tokens): <NEW_LINE> <INDENT> expected_host = hosts[(i + 1) % len(hosts)] <NEW_LINE> replicas = token_map.get_replicas("ks", token) <NEW_LINE> self.assertEqual(set(replicas), set([expected_host])) <NEW_LINE> <DEDENT> for token, expected_host in zip(tokens, hosts): <NEW_LINE> <INDENT> replicas = token_map.get_replicas("ks", MD5Token(str(token.value - 1))) <NEW_LINE> self.assertEqual(set(replicas), set([expected_host])) <NEW_LINE> <DEDENT> for i, token in enumerate(tokens): <NEW_LINE> <INDENT> replicas = token_map.get_replicas("ks", MD5Token(str(token.value + 1))) <NEW_LINE> expected_host = hosts[(i + 1) % len(hosts)] <NEW_LINE> self.assertEqual(set(replicas), set([expected_host])) | Test of TokenMap creation and other behavior. | 6259904a7cff6e4e811b6e07 |
class Baker (ChaosGenerator) : <NEW_LINE> <INDENT> def __init__ (self, oshape, mu=0.49999, cascade=True, comp=0, gens=2) : <NEW_LINE> <INDENT> super().__init__ (oshape, oshape+(2,), cascade, gens) <NEW_LINE> self.mu = mu <NEW_LINE> self.comp = comp <NEW_LINE> <DEDENT> def evolve (self, gind) : <NEW_LINE> <INDENT> ret = np.copy(self.cgens[gind,...,self.comp]) <NEW_LINE> x, y = np.copy(self.cgens[gind,...,0]), np.copy(self.cgens[gind,...,1]) <NEW_LINE> less = x < self.mu <NEW_LINE> more = np.invert(less) <NEW_LINE> self.cgens[gind,less,0] = 2*x[less] <NEW_LINE> self.cgens[gind,less,1] = y[less]/2 <NEW_LINE> self.cgens[gind,more,0] = 2 - 2*x[more] <NEW_LINE> self.cgens[gind,more,1] = 1 - y[more]/2 <NEW_LINE> return ret | Baker map --> (2x, y/2) if 0 <= x < 1/2
(2-2x, 1-y/2) 1/2 <= x < 1 | 6259904ae76e3b2f99fd9dd8 |
class TestCoLangCommand(unittest.TestCase): <NEW_LINE> <INDENT> def test_backend_class(self): <NEW_LINE> <INDENT> self.assertIs(CoLangCommand.BACKEND, CoLang) <NEW_LINE> <DEDENT> def test_setup_cmd_parser(self): <NEW_LINE> <INDENT> parser = CoLangCommand.setup_cmd_parser() <NEW_LINE> self.assertIsInstance(parser, GraalCommandArgumentParser) <NEW_LINE> self.assertEqual(parser._backend, CoLang) <NEW_LINE> args = ['http://example.com/', '--git-path', '/tmp/gitpath', '--tag', 'test', '--from-date', '1970-01-01'] <NEW_LINE> parsed_args = parser.parse(*args) <NEW_LINE> self.assertEqual(parsed_args.uri, 'http://example.com/') <NEW_LINE> self.assertEqual(parsed_args.git_path, '/tmp/gitpath') <NEW_LINE> self.assertEqual(parsed_args.tag, 'test') <NEW_LINE> self.assertEqual(parsed_args.from_date, DEFAULT_DATETIME) | CoLangCommand tests | 6259904ab830903b9686ee61 |
class Context: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass | Class used to trace assumes, asserts and dumps. | 6259904a63d6d428bbee3b97 |
class NVMeoFQueueStatistics(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'successful_connect_requests': 'int', 'connection_failures': 'int', 'disconnects': 'int', 'commands_good_status': 'int', 'commands_error_status': 'int' } <NEW_LINE> self.attribute_map = { 'successful_connect_requests': 'successfulConnectRequests', 'connection_failures': 'connectionFailures', 'disconnects': 'disconnects', 'commands_good_status': 'commandsGoodStatus', 'commands_error_status': 'commandsErrorStatus' } <NEW_LINE> self._successful_connect_requests = None <NEW_LINE> self._connection_failures = None <NEW_LINE> self._disconnects = None <NEW_LINE> self._commands_good_status = None <NEW_LINE> self._commands_error_status = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def successful_connect_requests(self): <NEW_LINE> <INDENT> return self._successful_connect_requests <NEW_LINE> <DEDENT> @successful_connect_requests.setter <NEW_LINE> def successful_connect_requests(self, successful_connect_requests): <NEW_LINE> <INDENT> self._successful_connect_requests = successful_connect_requests <NEW_LINE> <DEDENT> @property <NEW_LINE> def connection_failures(self): <NEW_LINE> <INDENT> return self._connection_failures <NEW_LINE> <DEDENT> @connection_failures.setter <NEW_LINE> def connection_failures(self, connection_failures): <NEW_LINE> <INDENT> self._connection_failures = connection_failures <NEW_LINE> <DEDENT> @property <NEW_LINE> def disconnects(self): <NEW_LINE> <INDENT> return self._disconnects <NEW_LINE> <DEDENT> @disconnects.setter <NEW_LINE> def disconnects(self, disconnects): <NEW_LINE> <INDENT> self._disconnects = disconnects <NEW_LINE> <DEDENT> @property <NEW_LINE> def commands_good_status(self): <NEW_LINE> <INDENT> return self._commands_good_status <NEW_LINE> <DEDENT> @commands_good_status.setter <NEW_LINE> def commands_good_status(self, commands_good_status): <NEW_LINE> <INDENT> self._commands_good_status = commands_good_status <NEW_LINE> <DEDENT> @property <NEW_LINE> def commands_error_status(self): <NEW_LINE> <INDENT> return self._commands_error_status <NEW_LINE> <DEDENT> @commands_error_status.setter <NEW_LINE> def commands_error_status(self, commands_error_status): <NEW_LINE> <INDENT> self._commands_error_status = commands_error_status <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259904ab57a9660fecd2e4a |
class DeleteWantsToWatchInputSet(InputSet): <NEW_LINE> <INDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AccessToken', value) <NEW_LINE> <DEDENT> def set_ActionID(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ActionID', value) | An InputSet with methods appropriate for specifying the inputs to the DeleteWantsToWatch
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 6259904a3c8af77a43b68924 |
class PickupLocation(TimeStampedModel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Ophaallocatie" <NEW_LINE> <DEDENT> name = models.CharField(max_length=100, unique=True) <NEW_LINE> description = models.CharField(max_length=255) <NEW_LINE> address = models.ForeignKey(Address, null=True, blank=True) <NEW_LINE> is_default = models.BooleanField(default=False) <NEW_LINE> def save(self, **kwargs): <NEW_LINE> <INDENT> if self.is_default: <NEW_LINE> <INDENT> PickupLocation.objects.update(is_default=False) <NEW_LINE> <DEDENT> super(PickupLocation, self).save(**kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | Pickup Location for an order round | 6259904a15baa7234946335f |
class DerivedComponent(Component): <NEW_LINE> <INDENT> def __init__(self, data, link, units=None): <NEW_LINE> <INDENT> super(DerivedComponent, self).__init__(data, units=units) <NEW_LINE> self._link = link <NEW_LINE> <DEDENT> def set_parent(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._link.compute(self._data) <NEW_LINE> <DEDENT> @property <NEW_LINE> def link(self): <NEW_LINE> <INDENT> return self._link <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._link.compute(self._data, key) | A component which derives its data from a function | 6259904adc8b845886d54989 |
class EasyCheckbutton(Tkinter.Checkbutton): <NEW_LINE> <INDENT> def __init__(self, parent, text, command): <NEW_LINE> <INDENT> self._variable = Tkinter.IntVar() <NEW_LINE> Tkinter.Checkbutton.__init__(self, parent, text = text, variable = self._variable, command = command) <NEW_LINE> <DEDENT> def isChecked(self): <NEW_LINE> <INDENT> return self._variable.get() != 0 | Represents a check button. | 6259904a4e696a045264e807 |
class passwordLengthTest(TestCase): <NEW_LINE> <INDENT> @override_settings(PASSWORD_MIN_LENGTH=1) <NEW_LINE> @override_settings(PASSWORD_MAX_LENGTH=5) <NEW_LINE> def test_validate_password_min_length(self): <NEW_LINE> <INDENT> self.password = '' <NEW_LINE> with self.assertRaisesRegexp(ValidationError,"[u'Invalid Length (must be 1 characters or more)']"): <NEW_LINE> <INDENT> validate_password_length(self.password) <NEW_LINE> <DEDENT> <DEDENT> @override_settings(PASSWORD_MAX_LENGTH=5) <NEW_LINE> @override_settings(PASSWORD_MIN_LENGTH=1) <NEW_LINE> def test_validate_password_max_length(self): <NEW_LINE> <INDENT> self.password = 'test_password' <NEW_LINE> with self.assertRaisesRegexp(ValidationError,"[u'Invalid Length (must be 6 characters or less)']"): <NEW_LINE> <INDENT> validate_password_length(self.password) | test validate password length | 6259904a29b78933be26aaa9 |
class Trigger(object): <NEW_LINE> <INDENT> def __init__(self, trace, template, filters, value, pivot): <NEW_LINE> <INDENT> self.template = template <NEW_LINE> self._filters = filters <NEW_LINE> self._value = value <NEW_LINE> self._pivot = pivot <NEW_LINE> self.trace = trace <NEW_LINE> <DEDENT> def generate(self, pivot_val): <NEW_LINE> <INDENT> trappy_event = getattr(self.trace, self.template.name) <NEW_LINE> data_frame = trappy_event.data_frame <NEW_LINE> data_frame = data_frame[data_frame[self._pivot] == pivot_val] <NEW_LINE> mask = [True for _ in range(len(data_frame))] <NEW_LINE> for key, value in self._filters.items(): <NEW_LINE> <INDENT> if hasattr(value, "__call__"): <NEW_LINE> <INDENT> mask = mask & (data_frame[key].apply(value)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask = apply_filter_kv(key, value, data_frame, mask) <NEW_LINE> <DEDENT> <DEDENT> data_frame = data_frame[mask] <NEW_LINE> if isinstance(self._value, str): <NEW_LINE> <INDENT> return data_frame[value] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return pd.Series(self._value, index=data_frame.index) | Trigger is an event-value relationship which
accepts a trace object to "generate" qualified data
:param trace: A trappy FTrace object
:type trace: :mod:`trappy.trace.FTrace`
:param template: A trappy Event to act as a trigger
:type template: trappy.Base
:param filters: Key value filter pairs
:type filters: dict
The filter can either have a function:
::
def function_based_filter(elem):
if condition:
return True
else:
return False
or a value/list of values
::
f = {}
f["data_column_a"] = function_based_filter
f["data_column_b"] = value
function_based_filter is anything that behaves like a function,
i.e. a callable.
:param value: Value can be a string or a numeric
:type value: str, int, float
:param pivot: This is the column around which the data will be
pivoted
:type pivot: str | 6259904a45492302aabfd8a1 |
class CourseInfoView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request, course_id): <NEW_LINE> <INDENT> course = Course.objects.get(id=int(course_id)) <NEW_LINE> course.students += 1 <NEW_LINE> course.save() <NEW_LINE> user_cousers = UserCourse.objects.filter(user=request.user, course=course) <NEW_LINE> if not user_cousers: <NEW_LINE> <INDENT> user_couser = UserCourse(user=request.user, course=course) <NEW_LINE> user_couser.save() <NEW_LINE> <DEDENT> user_cousers = UserCourse.objects.filter(course=course) <NEW_LINE> user_ids = [user_couser.user.id for user_couser in user_cousers] <NEW_LINE> all_user_courses = UserCourse.objects.filter(user_id__in=user_ids) <NEW_LINE> course_ids = [user_couser.course.id for user_couser in all_user_courses] <NEW_LINE> relate_courses = Course.objects.filter(id__in=course_ids).order_by("-click_nums")[:3] <NEW_LINE> all_resourses = CourseResource.objects.filter(course=course) <NEW_LINE> return render(request, 'course-video.html', { "course": course, "course_resources": all_resourses, "relate_courses": relate_courses, }) | 章节信息 | 6259904a63b5f9789fe8653b |
class RGBAColor(RGBColor): <NEW_LINE> <INDENT> def __init__(self, colorset): <NEW_LINE> <INDENT> super(RGBAColor, self).__init__(colorset) <NEW_LINE> if not(0 <= float(colorset[3]) <= 1): <NEW_LINE> <INDENT> raise ValueError("an alpha must be in valid range(0 to 1.0), " "passed " + repr(int(colorset[3]))) <NEW_LINE> <DEDENT> self.alpha = float(colorset[3]) <NEW_LINE> <DEDENT> def to_rgb(self): <NEW_LINE> <INDENT> return RGBColor((self.red * self.alpha, self.green * self.alpha, self.blue * self.alpha)) <NEW_LINE> <DEDENT> def to_hex(self): <NEW_LINE> <INDENT> return self.to_rgb().to_hex() | A set of red, green, blue, alpha colorset. | 6259904a30c21e258be99bd4 |
class UMKLKNN(UMKLKNN_): <NEW_LINE> <INDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> if issubclass(type(X), np.ndarray): <NEW_LINE> <INDENT> X = force_to_column_major_and_double_precision(X) <NEW_LINE> <DEDENT> elif is_sequence(X): <NEW_LINE> <INDENT> X = [ force_to_column_major_and_double_precision(a_kernel) for a_kernel in X ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> RuntimeError( 'X must be a numpy ndarray or a list of numpy ndarray' ) <NEW_LINE> <DEDENT> UMKLKNN_.fit(self, X) <NEW_LINE> self.kernels_weights = self.beta <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> if X is not None: <NEW_LINE> <INDENT> if issubclass(type(X), np.ndarray): <NEW_LINE> <INDENT> X = force_to_column_major_and_double_precision(X) <NEW_LINE> <DEDENT> elif is_sequence(X): <NEW_LINE> <INDENT> X = [ force_to_column_major_and_double_precision(a_kernel) for a_kernel in X ] <NEW_LINE> <DEDENT> <DEDENT> return UMKLKNN_.predict(self, X) | Wrapping UMKLKNN_. | 6259904ab5575c28eb7136b0 |
class Exchange(models.Model, DictMixin): <NEW_LINE> <INDENT> cusotmer = models.ForeignKey(Customer, null=True, on_delete=models.SET_NULL) <NEW_LINE> create_at = models.DateTimeField(auto_now=True) <NEW_LINE> remark = models.CharField(max_length=100, null=True, blank=True) <NEW_LINE> money = models.DecimalField(default=0, max_digits=8, decimal_places=2) <NEW_LINE> state = models.IntegerField(default=0) <NEW_LINE> opt_user = models.CharField(max_length=20) <NEW_LINE> check_user = models.ForeignKey(User, related_name='exch_check_user', null=True, on_delete=models.SET_NULL) <NEW_LINE> check_desc = models.CharField(max_length=100, null=True, blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "退换货" <NEW_LINE> default_permissions = () <NEW_LINE> permissions = ( ('exchange_list', '库存|查看退换货'), ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def search(cls, limit, offset, **condition): <NEW_LINE> <INDENT> q = models.Q() <NEW_LINE> for k, v in condition.items(): <NEW_LINE> <INDENT> q.add(models.Q(**{k: v}), models.Q.AND) <NEW_LINE> <DEDENT> total = cls.objects.filter(q).count() <NEW_LINE> rows = cls.objects.filter(q).order_by("-id")[offset:limit + offset] <NEW_LINE> return { "total": total, "rows": [o.to_dict() for o in rows] } <NEW_LINE> <DEDENT> def create(self, detail): <NEW_LINE> <INDENT> with transaction.atomic(): <NEW_LINE> <INDENT> self.save() <NEW_LINE> if detail is not None: <NEW_LINE> <INDENT> detail.exchange_id = self.id <NEW_LINE> detail.save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def details(self, exchange_id): <NEW_LINE> <INDENT> return ExchangeDetail.objects.filter(exchange_id=exchange_id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return cls.objects.get(id=pk) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def edit(self, state): <NEW_LINE> <INDENT> with transaction.atomic(): <NEW_LINE> <INDENT> self.save() <NEW_LINE> if int(self.state) == 1 and int(state) != 1: <NEW_LINE> <INDENT> details = ExchangeDetail.objects.filter(exchange_id=self.id) <NEW_LINE> for detail in details: <NEW_LINE> <INDENT> storage = StorageInfo.objects.get(product_id=detail.product_id) <NEW_LINE> if int(detail.direct) == 0: <NEW_LINE> <INDENT> storage.number += detail.number <NEW_LINE> <DEDENT> if int(detail.direct) == 1: <NEW_LINE> <INDENT> storage.number -= detail.number <NEW_LINE> <DEDENT> storage.save() <NEW_LINE> <DEDENT> <DEDENT> if int(self.state) == 2 and int(state) == 1: <NEW_LINE> <INDENT> details = ExchangeDetail.objects.filter(exchange_id=self.id) <NEW_LINE> for detail in details: <NEW_LINE> <INDENT> storage = StorageInfo.objects.get(product_id=detail.product_id) <NEW_LINE> if int(detail.direct) == 0: <NEW_LINE> <INDENT> storage.number -= detail.number <NEW_LINE> <DEDENT> if int(detail.direct) == 1: <NEW_LINE> <INDENT> storage.number += detail.number <NEW_LINE> <DEDENT> storage.save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def remove_at(cls, pk): <NEW_LINE> <INDENT> with transaction.atomic(): <NEW_LINE> <INDENT> ExchangeDetail.objects.filter(exchange_id=pk).delete() <NEW_LINE> cls.objects.get(id=pk).delete() | 退换货 | 6259904a6fece00bbacccd86 |
class PaymentRequiredResponse(Response): <NEW_LINE> <INDENT> status_code = status.HTTP_402_PAYMENT_REQUIRED <NEW_LINE> data = 'Payment Required' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(PaymentRequiredResponse.data, *args, **kwargs) | Payment required response. | 6259904a15baa72349463360 |
class CommandInterface(object): <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> self._obj = obj <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> raise NotImplementedError | The command interface | 6259904a76d4e153a661dc5e |
class BCNN(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> torch.nn.Module.__init__(self) <NEW_LINE> self.features = torchvision.models.vgg19(pretrained=True).features <NEW_LINE> self.features = torch.nn.Sequential(*list(self.features.children()) [:-1]) <NEW_LINE> self.fc = torch.nn.Linear(512*512, 11) <NEW_LINE> for param in self.features.parameters(): <NEW_LINE> <INDENT> param.requires_grad = False <NEW_LINE> <DEDENT> torch.nn.init.kaiming_normal_(self.fc.weight.data) <NEW_LINE> if self.fc.bias is not None: <NEW_LINE> <INDENT> torch.nn.init.constant_(self.fc.bias.data, val=0) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, X): <NEW_LINE> <INDENT> N = X.size()[0] <NEW_LINE> assert X.size() == (N, 3, 448, 448) <NEW_LINE> X = self.features(X) <NEW_LINE> assert X.size() == (N, 512, 28, 28) <NEW_LINE> X = X.view(N, 512, 28**2) <NEW_LINE> X = torch.bmm(X, torch.transpose(X, 1, 2)) / (28**2) <NEW_LINE> assert X.size() == (N, 512, 512) <NEW_LINE> X = X.view(N, 512**2) <NEW_LINE> X = torch.sign(X)*torch.sqrt(torch.abs(X)+1e-12) <NEW_LINE> X = torch.nn.functional.normalize(X) <NEW_LINE> X = self.fc(X) <NEW_LINE> assert X.size() == (N, 11) <NEW_LINE> return X | B-CNN.
The B-CNN model is illustrated as follows.
conv1^2 (64) -> pool1 -> conv2^2 (128) -> pool2 -> conv3^3 (256) -> pool3
-> conv4^3 (512) -> pool4 -> conv5^3 (512) -> bilinear pooling
-> sqrt-normalize -> L2-normalize -> fc (200).
The network accepts a 3*448*448 input, and the pool5 activation has shape
512*28*28 since we down-sample 5 times.
Attributes:
features, torch.nn.Module: Convolution and pooling layers.
fc, torch.nn.Module: 200. | 6259904a287bf620b6272fb8 |
class Settings(models.Model): <NEW_LINE> <INDENT> passcode = models.CharField(max_length=45) <NEW_LINE> theme = models.CharField(max_length=45) <NEW_LINE> user = models.ForeignKey(User) | docstring for Settings | 6259904a711fe17d825e1685 |
class SignupForm(forms.Form): <NEW_LINE> <INDENT> username = forms.CharField( label='아이디', widget=forms.TextInput( attrs={ 'class': 'form-control', } ) ) <NEW_LINE> email = forms.EmailField( label='이메일', widget=forms.TextInput( attrs={ 'class': 'form-control', } ) ) <NEW_LINE> password = forms.CharField( label='비밀번호', widget=forms.PasswordInput( attrs={ 'class': 'form-control', } ), ) <NEW_LINE> password2 = forms.CharField( label='비밀번호 확인', widget=forms.PasswordInput( attrs={ 'class': 'form-control', } ), ) <NEW_LINE> gender = forms.CharField( label='성별', widget=forms.Select( attrs={ 'class': 'form-control', }, choices=User.CHOICE_GENDER, ) ) <NEW_LINE> introduce = forms.CharField( label='소개', widget=forms.Textarea( attrs={ 'class': 'form-control', } ), required=False, ) <NEW_LINE> def clean_username(self): <NEW_LINE> <INDENT> data = self.cleaned_data['username'] <NEW_LINE> if User.objects.filter(username=data).exists(): <NEW_LINE> <INDENT> raise ValidationError('이미 사용중인 아이디입니다') <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> super().clean() <NEW_LINE> password = self.cleaned_data['password'] <NEW_LINE> password2 = self.cleaned_data['password2'] <NEW_LINE> if password != password2: <NEW_LINE> <INDENT> self.add_error('password2', '비밀번호와 비밀번호확인의 값이 일치하지 않습니다') <NEW_LINE> <DEDENT> return self.cleaned_data <NEW_LINE> <DEDENT> def signup(self): <NEW_LINE> <INDENT> fields = [ 'username', 'email', 'password', 'gender', 'introduce', ] <NEW_LINE> create_user_dict = {key: value for key, value in self.cleaned_data.items() if key in fields} <NEW_LINE> user = User.objects.create_user(**create_user_dict) <NEW_LINE> return user | 회원가입을 작성하는 form(회원가입 화면 형태) | 6259904a30c21e258be99bd6 |
class Area(object): <NEW_LINE> <INDENT> __slots__ = 'top', 'right', 'bottom', 'left' <NEW_LINE> def __init__(self, top, right, bottom, left): <NEW_LINE> <INDENT> self.top = top <NEW_LINE> self.right = right <NEW_LINE> self.bottom = bottom <NEW_LINE> self.left = left <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter((self.top, self.right, self.bottom, self.left)) | Area related to diagram item like item margins or padding information. | 6259904a71ff763f4b5e8b75 |
class comparator(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> raise AttributeError("No constructor defined") <NEW_LINE> <DEDENT> __repr__ = _swig_repr <NEW_LINE> def make(): <NEW_LINE> <INDENT> return _phyauth_swig.comparator_make() <NEW_LINE> <DEDENT> make = staticmethod(make) <NEW_LINE> __swig_destroy__ = _phyauth_swig.delete_comparator <NEW_LINE> __del__ = lambda self: None | <+description of block+>
Constructor Specific Documentation:
Return a shared_ptr to a new instance of phyauth::comparator.
To avoid accidental use of raw pointers, phyauth::comparator's constructor is in a private implementation class. phyauth::comparator::make is the public interface for creating new instances. | 6259904a16aa5153ce4018bd |
class RNNLockedDropout(Module): <NEW_LINE> <INDENT> def __init__(self, dropout, use_mc_dropout): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert 0.0 <= dropout <= 1.0, 'Dropout has to be in range <0.0, 1.0>' <NEW_LINE> self.use_mc_dropout = use_mc_dropout <NEW_LINE> self.dropout = dropout <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> if (self.training or self.use_mc_dropout) and self.dropout != 0: <NEW_LINE> <INDENT> m = x.data.new(x.size(0), 1, x.size(2)).bernoulli_(1.0 - self.dropout) <NEW_LINE> mask = Variable(m, requires_grad=False) <NEW_LINE> if self.dropout != 1.0: <NEW_LINE> <INDENT> mask /= (1.0 - self.dropout) <NEW_LINE> <DEDENT> return mask * x <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x | We want to use the same dropout mask for all timepoints. Using this layer we will be able to do so. Dropout masks
will be different for different examples within the minibatch but will not change in timesteps. | 6259904a96565a6dacd2d971 |
class ChangeHistoryEvent(proto.Message): <NEW_LINE> <INDENT> id = proto.Field( proto.STRING, number=1, ) <NEW_LINE> change_time = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) <NEW_LINE> actor_type = proto.Field( proto.ENUM, number=3, enum="ActorType", ) <NEW_LINE> user_actor_email = proto.Field( proto.STRING, number=4, ) <NEW_LINE> changes_filtered = proto.Field( proto.BOOL, number=5, ) <NEW_LINE> changes = proto.RepeatedField( proto.MESSAGE, number=6, message="ChangeHistoryChange", ) | A set of changes within a Google Analytics account or its
child properties that resulted from the same cause. Common
causes would be updates made in the Google Analytics UI, changes
from customer support, or automatic Google Analytics system
changes.
Attributes:
id (str):
ID of this change history event. This ID is
unique across Google Analytics.
change_time (google.protobuf.timestamp_pb2.Timestamp):
Time when change was made.
actor_type (google.analytics.admin_v1alpha.types.ActorType):
The type of actor that made this change.
user_actor_email (str):
Email address of the Google account that made
the change. This will be a valid email address
if the actor field is set to USER, and empty
otherwise. Google accounts that have been
deleted will cause an error.
changes_filtered (bool):
If true, then the list of changes returned
was filtered, and does not represent all changes
that occurred in this event.
changes (Sequence[google.analytics.admin_v1alpha.types.ChangeHistoryChange]):
A list of changes made in this change history
event that fit the filters specified in
SearchChangeHistoryEventsRequest. | 6259904a8a349b6b4368761d |
class BasicPostTestsMixin(BasicTestsMixin): <NEW_LINE> <INDENT> basic_post_fixtures = [] <NEW_LINE> basic_post_use_admin = False <NEW_LINE> def setup_basic_post_test(self, user, with_local_site, local_site_name, post_valid_data): <NEW_LINE> <INDENT> raise NotImplementedError("%s doesn't implement setup_basic_post_test" % self.__class__.__name__) <NEW_LINE> <DEDENT> def check_post_result(self, user, rsp, *args): <NEW_LINE> <INDENT> raise NotImplementedError("%s doesn't implement check_post_result" % self.__class__.__name__) <NEW_LINE> <DEDENT> @test_template <NEW_LINE> def test_post(self): <NEW_LINE> <INDENT> self.load_fixtures(self.basic_post_fixtures) <NEW_LINE> self._login_user(admin=self.basic_post_use_admin) <NEW_LINE> url, mimetype, post_data, cb_args = self.setup_basic_post_test(self.user, False, None, True) <NEW_LINE> self.assertFalse(url.startswith('/s/' + self.local_site_name)) <NEW_LINE> rsp = self.apiPost(url, post_data, expected_mimetype=mimetype) <NEW_LINE> self._close_file_handles(post_data) <NEW_LINE> self.assertEqual(rsp['stat'], 'ok') <NEW_LINE> self.check_post_result(self.user, rsp, *cb_args) | Mixin to add basic HTTP POST unit tests.
The subclass must implement ``setup_basic_post_test`` and
``check_post_result``.
It may also set ``basic_post_fixtures`` to a list of additional
fixture names to import, and ``basic_post_use_admin`` to ``True``
if it wants to run the test as an administrator user. | 6259904a30dc7b76659a0c04 |
class TestFindIt(unittest.TestCase): <NEW_LINE> <INDENT> def test_find_it(self): <NEW_LINE> <INDENT> self.assertEqual(find_outlier([2, 4, 6, 8, 10, 3]), 3) <NEW_LINE> self.assertEqual(find_outlier([2, 4, 0, 100, 4, 11, 2602, 36]), 11) <NEW_LINE> self.assertEqual(find_outlier([160, 3, 1719, 19, 11, 13, -21]), 160) | Class to test 'find_it' function | 6259904a82261d6c527308ae |
class Page13(ToughSchedulingCasesPage): <NEW_LINE> <INDENT> def __init__(self, page_set): <NEW_LINE> <INDENT> super(Page13, self).__init__( url='file://tough_scheduling_cases/raf.html?medium_handler', page_set=page_set) <NEW_LINE> self.synthetic_delays = { 'cc.RasterRequiredForActivation': {'target_duration': 0.004}, 'cc.BeginMainFrame': {'target_duration': 0.004}, 'gpu.AsyncTexImage': {'target_duration': 0.004} } | Why: Test a moderately heavy requestAnimationFrame handler | 6259904a0fa83653e46f62af |
class ConvertDatatypeDict(dict): <NEW_LINE> <INDENT> decs = [] <NEW_LINE> hexs = [] <NEW_LINE> def convert_datatypes(self): <NEW_LINE> <INDENT> self._convert2int(self.decs, 10) <NEW_LINE> self._convert2int(self.hexs, 16) <NEW_LINE> <DEDENT> def _convert2int(self, keys, base=10, err_val=-1): <NEW_LINE> <INDENT> for k in keys: <NEW_LINE> <INDENT> if self.get(k): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self[k] = int(self[k], base) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self[k] = err_val | A dict that converts values for certain keys.
The keys for which the corresponding value should be converted can be
specified via the attributes self.decs and self.hexs.
The conversion must be done explicitly via convert_datatypes(self)
self.decs: A list of keys for which the values are interpreted as
*decimal* integer literals.
self.hexs: A list of keys for which the values are interpreted as
*hexadecimal* integer literals.
The value -1 represents an unknown/invalid value for every key in self.decs
and self.hexs. | 6259904a287bf620b6272fba |
class RefCountedPointer(ABC): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(RefCountedPointer, self).__init__() <NEW_LINE> self._ptr = ffi.new(self.cdecl+'*') <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self.ptr: <NEW_LINE> <INDENT> self.unref(self.ptr) <NEW_LINE> <DEDENT> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> p = self.__class__() <NEW_LINE> p.ptr = self.ptr <NEW_LINE> return p <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def cdecl(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def ptr(self): <NEW_LINE> <INDENT> return self._ptr[0] <NEW_LINE> <DEDENT> @ptr.setter <NEW_LINE> def ptr(self, value): <NEW_LINE> <INDENT> if (self.ptr): <NEW_LINE> <INDENT> self.unref(self.ptr) <NEW_LINE> <DEDENT> self.ref(value) <NEW_LINE> self._ptr[0] = value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ref(cls, ptr): <NEW_LINE> <INDENT> casted = ffi.cast('signal_type_base *', ptr) <NEW_LINE> lib.signal_type_ref(casted) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unref(cls, ptr): <NEW_LINE> <INDENT> casted = ffi.cast('signal_type_base *', ptr) <NEW_LINE> lib.signal_type_unref(casted) | Internal wrapper class for signal-protocol-c pointers.
signal-protocol-c uses its own memory management system based on an
internal ref count mechanism. It provides the following macros for that:
* SIGNAL_REF
* SIGNAL UNREF
As we can't call macros directly, we use the underlying functions:
* signal_type_ref()
* signal_type_unref()
This class is meant to inherit from. The following structs use the
ref count system:
* sender_key: sender_message_key, sender_chain_key
* device_consistency: device_consistency_signature,
device_consistency_commitment,
device_consistency_message
* session_pre_key: session_pre_key, session_signed_pre_key,
session_pre_key_bundle
* rachtet: ratchet_chain_key, ratchet_root_key, ratchet_identity_key_pair
symmetric_signal_protocol_parameters,
alice_signal_protocol_parameters, bob_signal_protocol_parameters
* session_record
* sender_key_state
* protocol: signal_message, pre_key_signal_message, sender_key_message,
sender_key_distribution_message
* curve: ec_public_key, ec_private_key, ec_key_pair
* session_state
* hkdf
* sender_key_record
* fingerprint: fingerprint, displayable_fingerprint, scannable_fingerprint
Attributes
----------
cdecl : str
type of of your pointer | 6259904a07f4c71912bb0804 |
class SplashMeshNode(SplashBaseNode): <NEW_LINE> <INDENT> bl_idname = 'SplashMeshNodeType' <NEW_LINE> bl_label = 'Mesh' <NEW_LINE> sp_acceptedLinks = [] <NEW_LINE> def update_mesh_type(self, context): <NEW_LINE> <INDENT> if self.sp_meshTypeProperty == "mesh_shmdata": <NEW_LINE> <INDENT> self.inputs['File'].enabled = True <NEW_LINE> self.inputs['Object'].enabled = False <NEW_LINE> <DEDENT> <DEDENT> sp_meshTypes = [ ("mesh", "OBJ file", "Mesh from OBJ file"), ("mesh_shmdata", "Shared memory", "Mesh from shared memory") ] <NEW_LINE> sp_meshTypeProperty = bpy.props.EnumProperty(name="Type", description="Mesh source type", items=sp_meshTypes, default="mesh", update=update_mesh_type) <NEW_LINE> def draw_buttons(self, context, layout): <NEW_LINE> <INDENT> row = layout.row() <NEW_LINE> layout.prop(self, "name") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(self, "sp_meshTypeProperty") <NEW_LINE> row = layout.row() <NEW_LINE> operator = row.operator("splash.select_file_path", text="Select file path") <NEW_LINE> operator.node_name = self.name <NEW_LINE> if self.sp_meshTypeProperty == "mesh": <NEW_LINE> <INDENT> row = layout.row() <NEW_LINE> operator = row.operator("splash.select_object", text="Select the active Object") <NEW_LINE> operator.node_name = self.name <NEW_LINE> <DEDENT> <DEDENT> def init(self, context): <NEW_LINE> <INDENT> self.inputs.new('NodeSocketString', 'File').default_value = "" <NEW_LINE> self.inputs.new('NodeSocketString', 'Object').default_value = "" <NEW_LINE> self.inputs['Object'].enabled = False <NEW_LINE> self.outputs.new('SplashLinkSocket', "Output link") <NEW_LINE> <DEDENT> def exportProperties(self, exportPath): <NEW_LINE> <INDENT> values = {} <NEW_LINE> values['type'] = "\"" + self.sp_meshTypeProperty + "\"" <NEW_LINE> if self.inputs['Object'].enabled: <NEW_LINE> <INDENT> import os <NEW_LINE> if bpy.context.edit_object is not None: <NEW_LINE> <INDENT> editedObject = context.edit_object.name <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> <DEDENT> objectName = self.inputs['Object'].default_value <NEW_LINE> bpy.ops.object.select_all(action='DESELECT') <NEW_LINE> bpy.data.objects[objectName].select = True <NEW_LINE> path = os.path.dirname(exportPath) + "/splash_" + objectName + ".obj" <NEW_LINE> bpy.ops.export_scene.obj(filepath=path, check_existing=False, use_selection=True, use_mesh_modifiers=True, use_materials=False, use_uvs=True, axis_forward='Y', axis_up='Z') <NEW_LINE> values['file'] = "\"" + path + "\"" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> values['file'] = "\"" + self.inputs['File'].default_value + "\"" <NEW_LINE> <DEDENT> return values <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> pass | Splash Mesh node | 6259904a45492302aabfd8a4 |
class MapBytes16TimestampUuid(_types._Bytes16TimestampKeysMixin, _types._UuidValuesMixin, PersistentMap): <NEW_LINE> <INDENT> def __init__(self, slot=None, compress=None): <NEW_LINE> <INDENT> PersistentMap.__init__(self, slot=slot, compress=compress) | Persistent map with (Bytes20, Timestamp) keys and UUID values. | 6259904ad53ae8145f919831 |
class x_cpmd_section_scf(MSection): <NEW_LINE> <INDENT> m_def = Section(validate=False, a_legacy=LegacyDefinition(name='x_cpmd_section_scf')) <NEW_LINE> x_cpmd_section_scf_iteration = SubSection( sub_section=SectionProxy('x_cpmd_section_scf_iteration'), repeats=True, a_legacy=LegacyDefinition(name='x_cpmd_section_scf_iteration')) | Contains information about self-consistent field calculation | 6259904ad53ae8145f919832 |
class AnotherWindow(QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> layout = QVBoxLayout() <NEW_LINE> self.label = QLabel("Another Window") <NEW_LINE> layout.addWidget(self.label) <NEW_LINE> self.setLayout(layout) | This "window" is a QWidget. If it has no parent, it
will appear as a free-floating window. | 6259904a0a366e3fb87dddb7 |
class SavedQueryList(APIView): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated,) <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> saved_query_list = saved_query_api.get_all() <NEW_LINE> user_id = self.request.query_params.get("user_id", None) <NEW_LINE> if user_id is not None: <NEW_LINE> <INDENT> saved_query_list = saved_query_list.filter(user_id=user_id) <NEW_LINE> <DEDENT> template_id = self.request.query_params.get("template_id", None) <NEW_LINE> if template_id is not None: <NEW_LINE> <INDENT> saved_query_list = saved_query_list.filter(template=str(template_id)) <NEW_LINE> <DEDENT> return_value = SavedQuerySerializer(saved_query_list, many=True) <NEW_LINE> return Response(return_value.data) <NEW_LINE> <DEDENT> except Exception as api_exception: <NEW_LINE> <INDENT> content = {"message": str(api_exception)} <NEW_LINE> return Response(content, status=status.HTTP_500_INTERNAL_SERVER_ERROR) | Get all SavedQuery | 6259904a23e79379d538d8d0 |
class DwmVersionResponse(DwmResponse): <NEW_LINE> <INDENT> def __init__(self, message: bytes): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> <DEDENT> def get_firmware_version(self) -> str: <NEW_LINE> <INDENT> return '{}.{}.{}.{}'.format(self.message[5], self.message[6], self.message[7], self.message[8] & 0x0F) <NEW_LINE> <DEDENT> def get_configuration_version(self) -> str: <NEW_LINE> <INDENT> return self.int_to_hex_string(self.int32(11, False)) <NEW_LINE> <DEDENT> def get_hardware_version(self) -> str: <NEW_LINE> <INDENT> return self.int_to_hex_string(self.int32(17, False)) | Returned by a dwm_ver_get request | 6259904a63b5f9789fe8653e |
class PrivateDataset(BaseDataset): <NEW_LINE> <INDENT> group_column = 'Device' <NEW_LINE> def __init__(self, root_dir='.', transform=None, data=None, split=None): <NEW_LINE> <INDENT> super().__init__(root_dir, transform, data, split) <NEW_LINE> self.signals = pd.read_pickle(path.join(datasets_dir(self.root_dir), self.name(), 'private_dataset.pkl')) <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> if self.features_exist(): <NEW_LINE> <INDENT> self.data = pd.read_pickle(path.join(data_dir(self.root_dir), f'{self.name()}Features.pkl')) <NEW_LINE> return <NEW_LINE> <DEDENT> if self.dataset_exists(): <NEW_LINE> <INDENT> self.data = pd.read_pickle(path.join(data_dir(self.root_dir), f'{self.name()}.pkl')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data = pd.read_pickle( path.join(datasets_dir(self.root_dir), self.name(), 'private_dataset.pkl') ).drop(columns=['Signal']) <NEW_LINE> self.data.columns = ['Device', 'Label', 'Record'] <NEW_LINE> self.data['Fs'] = 200 <NEW_LINE> <DEDENT> <DEDENT> def read_record(self, record): <NEW_LINE> <INDENT> row = self.signals[self.signals['Path'] == record].iloc[0] <NEW_LINE> return np.array(row.Signal[:200 * 60], dtype=np.float32) | The private ECG dataset. | 6259904a0a50d4780f7067a6 |
class ClassRegistry(object): <NEW_LINE> <INDENT> class RegistryError(Exception): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class AlreadyRegistered(RegistryError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class NotRegistered(RegistryError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.registry = {} <NEW_LINE> <DEDENT> def get_key_from_class(self, cls): <NEW_LINE> <INDENT> return "%s.%s" % (cls.__module__.split('.')[-2], cls.__name__) <NEW_LINE> <DEDENT> def register(self, cls): <NEW_LINE> <INDENT> key = self.get_key_from_class(cls) <NEW_LINE> if key in self.registry: <NEW_LINE> <INDENT> raise self.AlreadyRegistered(key) <NEW_LINE> <DEDENT> self.registry[key] = cls <NEW_LINE> <DEDENT> def unregister(self, cls): <NEW_LINE> <INDENT> key = self.get_key_from_class(cls) <NEW_LINE> if key not in self.registry: <NEW_LINE> <INDENT> raise self.NotRegistered(key) <NEW_LINE> <DEDENT> self.registry.pop(key) <NEW_LINE> <DEDENT> def as_choices(self): <NEW_LINE> <INDENT> return sorted( ( (k, self.get_item_label(k, v)) for (k, v) in self.registry.items() ), key=lambda t: t[1] ) <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> if key not in self.registry: <NEW_LINE> <INDENT> raise self.NotRegistered(key) <NEW_LINE> <DEDENT> return self.registry[key] <NEW_LINE> <DEDENT> def get_item_label(self, key, value): <NEW_LINE> <INDENT> return getattr(value, 'verbose_name', key) <NEW_LINE> <DEDENT> def configure_item(self, item_class, model_instance, config): <NEW_LINE> <INDENT> return item_class(model_instance, config) | Collects classes and allows to list them. | 6259904a8e71fb1e983bce97 |
class ResourceHealthMetadataCollection(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'required': True}, 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[ResourceHealthMetadata]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: List["ResourceHealthMetadata"], **kwargs ): <NEW_LINE> <INDENT> super(ResourceHealthMetadataCollection, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None | Collection of resource health metadata.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar value: Required. Collection of resources.
:vartype value: list[~azure.mgmt.web.v2020_09_01.models.ResourceHealthMetadata]
:ivar next_link: Link to next page of resources.
:vartype next_link: str | 6259904ab5575c28eb7136b2 |
class ViewSet(ApiGenericMixin, viewsets.ViewSet): <NEW_LINE> <INDENT> pass | 按需改造DRF默认的ViewSet类 | 6259904a498bea3a75a58ef1 |
class InvalidScopeName(CException): <NEW_LINE> <INDENT> pass | Invalid scope name.
This exception might be thrown when a request was made to set a breakpoint
to an unknown scope. | 6259904a462c4b4f79dbcdd2 |
class SensorMode(AbstractMode): <NEW_LINE> <INDENT> def __init__(self, parent, updateMs, label): <NEW_LINE> <INDENT> super(SensorMode, self).__init__(parent, updateMs) <NEW_LINE> self.label = label <NEW_LINE> self.previousState = None <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> waterDetected = water_detected() <NEW_LINE> if waterDetected or keyboard.is_pressed('p'): <NEW_LINE> <INDENT> startValve = WaterDetected(self, self.updateMs, self.maxOpenSeconds, self.label) <NEW_LINE> startValve.activate() <NEW_LINE> if self.previousState is None or not self.previousState: <NEW_LINE> <INDENT> self.parent.open_valve() <NEW_LINE> self.label.setText("Currently Running") <NEW_LINE> <DEDENT> self.previousState = True <NEW_LINE> <DEDENT> elif not waterDetected: <NEW_LINE> <INDENT> if self.previousState is None or self.previousState: <NEW_LINE> <INDENT> self.parent.close_valve() <NEW_LINE> self.label.setText("No Water Detected") <NEW_LINE> <DEDENT> self.previousState = False | Object representing the sensor mode of the UI.
Uses the water sensor for opening/closing the valve. | 6259904ad10714528d69f077 |
class TestWigner(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.frequency = -2017.96 <NEW_LINE> self.tunneling = Wigner( frequency = (self.frequency,"cm^-1"), ) <NEW_LINE> <DEDENT> def test_frequency(self): <NEW_LINE> <INDENT> self.assertAlmostEqual(self.tunneling.frequency.value_si, self.frequency, 4) <NEW_LINE> <DEDENT> def test_calculateTunnelingFactor(self): <NEW_LINE> <INDENT> Tlist = numpy.array([300,500,1000,1500,2000]) <NEW_LINE> kexplist = numpy.array([4.90263, 2.40495, 1.35124, 1.15611, 1.08781]) <NEW_LINE> for T, kexp in zip(Tlist, kexplist): <NEW_LINE> <INDENT> kact = self.tunneling.calculateTunnelingFactor(T) <NEW_LINE> self.assertAlmostEqual(kexp, kact, 4) <NEW_LINE> <DEDENT> <DEDENT> def test_pickle(self): <NEW_LINE> <INDENT> import cPickle <NEW_LINE> tunneling = cPickle.loads(cPickle.dumps(self.tunneling)) <NEW_LINE> self.assertAlmostEqual(self.tunneling.frequency.value, tunneling.frequency.value, 2) <NEW_LINE> self.assertEqual(self.tunneling.frequency.units, tunneling.frequency.units) <NEW_LINE> <DEDENT> def test_repr(self): <NEW_LINE> <INDENT> tunneling = None <NEW_LINE> exec('tunneling = {0!r}'.format(self.tunneling)) <NEW_LINE> self.assertAlmostEqual(self.tunneling.frequency.value, tunneling.frequency.value, 2) <NEW_LINE> self.assertEqual(self.tunneling.frequency.units, tunneling.frequency.units) | Contains unit tests of the :class:`Wigner` class. | 6259904aa8ecb033258725e4 |
class _Parser(object): <NEW_LINE> <INDENT> def __init__(self, lexer, handler): <NEW_LINE> <INDENT> self.lexer = lexer <NEW_LINE> self.handler = handler <NEW_LINE> <DEDENT> def parse(self): <NEW_LINE> <INDENT> result = None <NEW_LINE> if self.lexer.peek_token(tokens.LParen): <NEW_LINE> <INDENT> result = self.parse_node() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self.parse_leaf() <NEW_LINE> <DEDENT> remaining = self.lexer.remaining() <NEW_LINE> if remaining != '' and not self.lexer.peek_token(tokens.SemiColon): <NEW_LINE> <INDENT> raise ParserError("Unexpected token following tree: " + self.lexer.remaining()) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def parse_node(self): <NEW_LINE> <INDENT> self.lexer.read_token(tokens.LParen) <NEW_LINE> self.handler.new_tree_begin() <NEW_LINE> self.parse_edge_list() <NEW_LINE> self.handler.new_tree_end() <NEW_LINE> self.lexer.read_token(tokens.RParen) <NEW_LINE> <DEDENT> def parse_leaf(self): <NEW_LINE> <INDENT> if self.lexer.peek_token(tokens.Comma) or self.lexer.peek_token(tokens.RParen): <NEW_LINE> <INDENT> self.handler.new_leaf("") <NEW_LINE> return <NEW_LINE> <DEDENT> if self.lexer.peek_token(tokens.Number): <NEW_LINE> <INDENT> identifier = str(int(self.lexer.read_token(tokens.Number).get_number())) <NEW_LINE> self.handler.new_leaf(identifier) <NEW_LINE> return <NEW_LINE> <DEDENT> identifier = self.lexer.read_token(tokens.ID).get_name() <NEW_LINE> if identifier == '_': <NEW_LINE> <INDENT> self.handler.new_leaf('') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if '_' in identifier and ' ' not in identifier: <NEW_LINE> <INDENT> identifier = identifier.replace('_', ' ') <NEW_LINE> <DEDENT> self.handler.new_leaf(identifier) <NEW_LINE> <DEDENT> <DEDENT> def parse_edge_list(self): <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> self.parse_edge() <NEW_LINE> if self.lexer.peek_token(tokens.Comma): <NEW_LINE> <INDENT> self.lexer.read_token(tokens.Comma) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def parse_edge(self): <NEW_LINE> <INDENT> if self.lexer.peek_token(tokens.LParen): <NEW_LINE> <INDENT> self.parse_node() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.parse_leaf() <NEW_LINE> <DEDENT> if self.lexer.peek_token(tokens.Number): <NEW_LINE> <INDENT> bootstrap = self.lexer.read_token(tokens.Number).get_number() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bootstrap = None <NEW_LINE> <DEDENT> if self.lexer.peek_token(tokens.Colon): <NEW_LINE> <INDENT> self.lexer.read_token(tokens.Colon) <NEW_LINE> length = self.lexer.read_token(tokens.Number).get_number() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> length = None <NEW_LINE> <DEDENT> self.handler.new_edge(bootstrap,length) | State of the parser during parsing. Should not be used
directly by users of this package. | 6259904a50485f2cf55dc35c |
class Stdout(Report): <NEW_LINE> <INDENT> def send_message(self, report_text): <NEW_LINE> <INDENT> report_text = "Druptest {0}".format(report_text) <NEW_LINE> print(report_text) | Print plugin to screen. | 6259904ad99f1b3c44d06a6e |
class ModeratorProfile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, related_name='moderator_profile', primary_key=True) <NEW_LINE> language = models.CharField(max_length=6, blank=True, help_text="The language which this contact prefers to communicate " "in, as a W3C language tag. If this field is left blank, defaults " "to: " + settings.LANGUAGE_CODE, default=settings.LANGUAGE_CODE,) <NEW_LINE> facility = models.ForeignKey('user_registration.Facility', blank=True, null=True) <NEW_LINE> node = models.ForeignKey('user_registration.HierarchyNode', blank=True, null=True) <NEW_LINE> def get_home_url(self): <NEW_LINE> <INDENT> if self.facility and self.facility.pk != None: <NEW_LINE> <INDENT> return reverse('moderation.views.facility', args=[self.facility.pk]) <NEW_LINE> <DEDENT> elif self.node and self.node.pk != None: <NEW_LINE> <INDENT> return reverse('moderation.views.node', args=[self.node.pk]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | A profile for a user that uses the moderation interface. | 6259904ab57a9660fecd2e4f |
class SubgraphIndex(object): <NEW_LINE> <INDENT> def __init__(self, graph, parent, induced_nodes, induced_edges): <NEW_LINE> <INDENT> self.graph = graph <NEW_LINE> self.parent = parent <NEW_LINE> self.induced_nodes = induced_nodes <NEW_LINE> self.induced_edges = induced_edges <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> raise NotImplementedError( "SubgraphIndex pickling is not supported yet.") <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> raise NotImplementedError( "SubgraphIndex unpickling is not supported yet.") | Internal subgraph data structure.
Parameters
----------
graph : GraphIndex
The graph structure of this subgraph.
parent : GraphIndex
The parent graph index.
induced_nodes : utils.Index
The parent node ids in this subgraph.
induced_edges : utils.Index
The parent edge ids in this subgraph. | 6259904a287bf620b6272fbc |
class StyleError(Error): <NEW_LINE> <INDENT> def __init__(self, line_nums, lines, message): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> self.line_nums = line_nums <NEW_LINE> self.lines = lines <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> error_lines = '' <NEW_LINE> for index, line in enumerate(self.lines): <NEW_LINE> <INDENT> error_lines += '{}: {}\n'.format(self.line_nums[index], line) <NEW_LINE> <DEDENT> return MESSAGE_TEMPLATE.format(self.message, error_lines) | Exception raised when a Style rule is broken.
Attributes:
line_nums: the line numbers the rule as broken on
lines: lines where the style rule was broken
message: explanation of why error was thrown | 6259904a23e79379d538d8d1 |
class AssertionBuilder(DynamicMixin, ExceptionMixin, SnapshotMixin, ExtractingMixin, FileMixin, DateMixin, DictMixin, CollectionMixin, StringMixin, NumericMixin, ContainsMixin, HelpersMixin, BaseMixin, object): <NEW_LINE> <INDENT> def __init__(self, val, description='', kind=None, expected=None, logger=None): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.description = description <NEW_LINE> self.kind = kind <NEW_LINE> self.expected = expected <NEW_LINE> self.logger = logger if logger else _default_logger <NEW_LINE> <DEDENT> def _builder(self, val, description='', kind=None, expected=None, logger=None): <NEW_LINE> <INDENT> return builder(val, description, kind, expected, logger) <NEW_LINE> <DEDENT> def _err(self, msg): <NEW_LINE> <INDENT> out = '%s%s' % ('[%s] ' % self.description if len(self.description) > 0 else '', msg) <NEW_LINE> if self.kind == 'warn': <NEW_LINE> <INDENT> self.logger.warning(out) <NEW_LINE> return self <NEW_LINE> <DEDENT> elif self.kind == 'soft': <NEW_LINE> <INDENT> global _soft_err <NEW_LINE> _soft_err.append(out) <NEW_LINE> return self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AssertionError(out) | Assertion builder. | 6259904ab57a9660fecd2e50 |
class NotFoundError(Exception): <NEW_LINE> <INDENT> pass | Raised when Gitiles gives a HTTP 404 error. | 6259904a3617ad0b5ee07511 |
class Insights(pulumi.CustomResource): <NEW_LINE> <INDENT> def __init__(__self__, __name__, __opts__=None, application_type=None, location=None, name=None, resource_group_name=None, tags=None): <NEW_LINE> <INDENT> if not __name__: <NEW_LINE> <INDENT> raise TypeError('Missing resource name argument (for URN creation)') <NEW_LINE> <DEDENT> if not isinstance(__name__, str): <NEW_LINE> <INDENT> raise TypeError('Expected resource name to be a string') <NEW_LINE> <DEDENT> if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions): <NEW_LINE> <INDENT> raise TypeError('Expected resource options to be a ResourceOptions instance') <NEW_LINE> <DEDENT> __props__ = dict() <NEW_LINE> if not application_type: <NEW_LINE> <INDENT> raise TypeError('Missing required property application_type') <NEW_LINE> <DEDENT> __props__['application_type'] = application_type <NEW_LINE> if not location: <NEW_LINE> <INDENT> raise TypeError('Missing required property location') <NEW_LINE> <DEDENT> __props__['location'] = location <NEW_LINE> __props__['name'] = name <NEW_LINE> if not resource_group_name: <NEW_LINE> <INDENT> raise TypeError('Missing required property resource_group_name') <NEW_LINE> <DEDENT> __props__['resource_group_name'] = resource_group_name <NEW_LINE> __props__['tags'] = tags <NEW_LINE> __props__['app_id'] = None <NEW_LINE> __props__['instrumentation_key'] = None <NEW_LINE> super(Insights, __self__).__init__( 'azure:appinsights/insights:Insights', __name__, __props__, __opts__) <NEW_LINE> <DEDENT> def translate_output_property(self, prop): <NEW_LINE> <INDENT> return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop <NEW_LINE> <DEDENT> def translate_input_property(self, prop): <NEW_LINE> <INDENT> return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop | Manage an Application Insights component. | 6259904a8e71fb1e983bce98 |
class NoRootSOA(exception.DNSException): <NEW_LINE> <INDENT> pass | There is no SOA RR at the DNS root name. This should never happen! | 6259904a15baa72349463365 |
class DescribeSecurityGroupResourceResponseSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = { "Infos": fields.List( models.SGResourceInfoSchema(), required=True, load_from="Infos" ), "Message": fields.Str(required=True, load_from="Message"), "TotalCount": fields.Int(required=True, load_from="TotalCount"), } | DescribeSecurityGroupResource - 查询安全组绑定的资源信息 | 6259904a7cff6e4e811b6e0d |
class ValueFunction(nn.Module): <NEW_LINE> <INDENT> def __init__(self, params): <NEW_LINE> <INDENT> super(ValueFunction, self).__init__() <NEW_LINE> self.bn1 = nn.BatchNorm1d(params['l1'][0]) <NEW_LINE> self.fc1 = nn.Linear(params['l1'][0], params['l1'][1]) <NEW_LINE> nn.init.xavier_uniform_(self.fc1.weight) <NEW_LINE> self.fc2 = nn.Linear(params['l2'][0], params['l2'][1]) <NEW_LINE> nn.init.xavier_uniform_(self.fc2.weight) <NEW_LINE> self.fc3 = nn.Linear(params['l3'][0], params['l3'][1]) <NEW_LINE> nn.init.xavier_uniform_(self.fc3.weight) <NEW_LINE> self.fc4 = nn.Linear(params['l4'][0], params['l4'][1]) <NEW_LINE> nn.init.xavier_uniform_(self.fc4.weight) <NEW_LINE> self.V = nn.Linear(params['l5'][0], 1) <NEW_LINE> nn.init.xavier_uniform_(self.V.weight) <NEW_LINE> self.relu = nn.ReLU() <NEW_LINE> <DEDENT> def forward(self, state): <NEW_LINE> <INDENT> v_value = self.relu(self.fc1(state)) <NEW_LINE> v_value = self.relu(self.fc2(v_value)) <NEW_LINE> v_value = self.relu(self.fc3(v_value)) <NEW_LINE> v_value = self.relu(self.fc4(v_value)) <NEW_LINE> if torch.isnan(v_value).any(): <NEW_LINE> <INDENT> print('Nan') <NEW_LINE> print(self.fc1.weight.grad) <NEW_LINE> print(self.fc2.weight.grad) <NEW_LINE> print(self.fc3.weight.grad) <NEW_LINE> print(self.fc4.weight.grad) <NEW_LINE> <DEDENT> v_value = self.V(v_value) <NEW_LINE> return v_value | Value Function (Critic) Model. | 6259904a0a50d4780f7067a7 |
class TagReferenceSchema(YetiSchema): <NEW_LINE> <INDENT> name = fields.String(required=True) <NEW_LINE> expiration = fields.DateTime(required=True) <NEW_LINE> fresh = fields.Boolean() <NEW_LINE> first_seen = fields.DateTime(default=datetime.utcnow) <NEW_LINE> last_seen = fields.DateTime(default=datetime.utcnow) <NEW_LINE> @post_load <NEW_LINE> def load_tag(self, data): <NEW_LINE> <INDENT> return TagReference(**data) | (De)serialization marshmallow.Schema for Tag objects. | 6259904a8a349b6b43687621 |
class PortChainShow(extension.ClientExtensionShow, PortChain): <NEW_LINE> <INDENT> shell_command = 'port-chain-show' | Show information of a given Port Chain. | 6259904a3eb6a72ae038ba2f |
class MinibatchData(MinibatchBase): <NEW_LINE> <INDENT> def __init__(self, array, minibatch, shuffle_index=None): <NEW_LINE> <INDENT> xp = get_array_module(array) <NEW_LINE> assertion.assert_shapes('array', array, 'shuffle_index', shuffle_index, axes=[0]) <NEW_LINE> if shuffle_index is not None: <NEW_LINE> <INDENT> array = array[shuffle_index] <NEW_LINE> self.restore_index = xp.arange(len(array))[shuffle_index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> array = array <NEW_LINE> self.restore_index = xp.arange(len(array)) <NEW_LINE> <DEDENT> super(MinibatchData, self).__init__(array, minibatch) <NEW_LINE> <DEDENT> @property <NEW_LINE> def array(self): <NEW_LINE> <INDENT> index = self.restore_index.argsort() <NEW_LINE> return self._array[index] <NEW_LINE> <DEDENT> def shuffle(self, shuffle_index): <NEW_LINE> <INDENT> assertion.assert_shapes('array', self._array, 'shuffle_index', shuffle_index, axes=[0]) <NEW_LINE> self._array = self._array[shuffle_index] <NEW_LINE> self.restore_index = self.restore_index[shuffle_index] | Data for minibatching. | 6259904aec188e330fdf9c71 |
class Message_Get(GetWithIDandSlot): <NEW_LINE> <INDENT> no = 18 | The Message_Get packet consists of:
* A UInt32, board to get the message from.
* a list of,
* A SInt32, message slots | 6259904ae64d504609df9dba |
class InputError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message | Exception raised for errors in the input.
| 6259904a23e79379d538d8d3 |
class MP4(FileType): <NEW_LINE> <INDENT> MP4Tags = MP4Tags <NEW_LINE> _mimes = ["audio/mp4", "audio/x-m4a", "audio/mpeg4", "audio/aac"] <NEW_LINE> def load(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> with open(filename, "rb") as fileobj: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> atoms = Atoms(fileobj) <NEW_LINE> <DEDENT> except AtomError as err: <NEW_LINE> <INDENT> reraise(error, err, sys.exc_info()[2]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.info = MP4Info(atoms, fileobj) <NEW_LINE> <DEDENT> except error: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> reraise(MP4StreamInfoError, err, sys.exc_info()[2]) <NEW_LINE> <DEDENT> if not MP4Tags._can_load(atoms): <NEW_LINE> <INDENT> self.tags = None <NEW_LINE> self._padding = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.tags = self.MP4Tags(atoms, fileobj) <NEW_LINE> <DEDENT> except error: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> reraise(MP4MetadataError, err, sys.exc_info()[2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._padding = self.tags._padding <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def add_tags(self): <NEW_LINE> <INDENT> if self.tags is None: <NEW_LINE> <INDENT> self.tags = self.MP4Tags() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise error("an MP4 tag already exists") <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def score(filename, fileobj, header_data): <NEW_LINE> <INDENT> return (b"ftyp" in header_data) + (b"mp4" in header_data) | An MPEG-4 audio file, probably containing AAC.
If more than one track is present in the file, the first is used.
Only audio ('soun') tracks will be read.
:ivar info: :class:`MP4Info`
:ivar tags: :class:`MP4Tags` | 6259904a379a373c97d9a400 |
class SingletonDecorator: <NEW_LINE> <INDENT> def __init__(self, klass): <NEW_LINE> <INDENT> self.klass = klass <NEW_LINE> self.instance = None <NEW_LINE> <DEDENT> def __call__(self, *args, **kwds): <NEW_LINE> <INDENT> if self.instance is None: <NEW_LINE> <INDENT> self.instance = self.klass(*args, **kwds) <NEW_LINE> <DEDENT> return self.instance | A Singleton decorator for a class `klass`. | 6259904ad6c5a102081e34f2 |
class PatchElement: <NEW_LINE> <INDENT> names = ('game', 'client') <NEW_LINE> def __init__(self, name, version: PatchVersion): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> assert name in self.names <NEW_LINE> self.version = version <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<{self.__class__.__qualname__} {self.name} {self.version}>" <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, PatchElement): <NEW_LINE> <INDENT> return self.name == other.name and self.version == other.version <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.name, self.version)) <NEW_LINE> <DEDENT> def download(self, langs=True): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def fspaths(self, langs=True) -> Generator[str, None, None]: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def relpaths(self, langs=True) -> Generator[str, None, None]: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def paths(self, langs=True) -> Generator[Tuple[str, str], None, None]: <NEW_LINE> <INDENT> return zip(self.fspaths(langs=langs), self.relpaths(langs=langs)) | Element of a patch (game or client)
This base class must not be instantiated directly.
In methods parameters, `langs` is used to filter language-specific files
and can have the following values:
False -- language-independent
True -- all languages
lang -- single given language
[lang, ...] -- list of given languages | 6259904a15baa72349463367 |
class FileWrapper(object): <NEW_LINE> <INDENT> def __init__(self, file, buffer_size=8192): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.buffer_size = buffer_size <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if hasattr(self.file, 'close'): <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> data = self.file.read(self.buffer_size) <NEW_LINE> if data: <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> self.close() <NEW_LINE> raise StopIteration() | This class can be used to convert a :class:`file`-like object into
an iterable. It yields `buffer_size` blocks until the file is fully
read.
You should not use this class directly but rather use the
:func:`wrap_file` function that uses the WSGI server's file wrapper
support if it's available.
.. versionadded:: 0.5
If you're using this object together with a :class:`BaseResponse` you have
to use the `direct_passthrough` mode.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration. | 6259904a23e79379d538d8d4 |
class RubyMineIDETests(IdeaIDETests): <NEW_LINE> <INDENT> TIMEOUT_INSTALL_PROGRESS = 120 <NEW_LINE> TIMEOUT_START = 60 <NEW_LINE> TIMEOUT_STOP = 60 <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.installed_path = os.path.join(self.install_base_path, "ide", "rubymine") <NEW_LINE> self.desktop_filename = 'jetbrains-rubymine.desktop' <NEW_LINE> self.command_args = '{} ide rubymine'.format(UMAKE) <NEW_LINE> self.name = 'RubyMine' | RubyMine from the IDE collection. | 6259904a1f5feb6acb163fcb |
class RequestLayer(object): <NEW_LINE> <INDENT> def __init__(self, server): <NEW_LINE> <INDENT> self._server = server <NEW_LINE> <DEDENT> def receive_request(self, transaction): <NEW_LINE> <INDENT> method = transaction.request.code <NEW_LINE> if method == defines.Codes.GET.number: <NEW_LINE> <INDENT> transaction = self._handle_get(transaction) <NEW_LINE> <DEDENT> elif method == defines.Codes.POST.number: <NEW_LINE> <INDENT> transaction = self._handle_post(transaction) <NEW_LINE> <DEDENT> elif method == defines.Codes.PUT.number: <NEW_LINE> <INDENT> transaction = self._handle_put(transaction) <NEW_LINE> <DEDENT> elif method == defines.Codes.DELETE.number: <NEW_LINE> <INDENT> transaction = self._handle_delete(transaction) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> transaction.response = None <NEW_LINE> <DEDENT> return transaction <NEW_LINE> <DEDENT> def send_request(self, request): <NEW_LINE> <INDENT> return request <NEW_LINE> <DEDENT> def _handle_get(self, transaction): <NEW_LINE> <INDENT> wkc_resource_is_defined = defines.DISCOVERY_URL in self._server.root <NEW_LINE> path = str("/" + transaction.request.uri_path) <NEW_LINE> transaction.response = Response() <NEW_LINE> transaction.response.destination = transaction.request.source <NEW_LINE> transaction.response.token = transaction.request.token <NEW_LINE> if path == defines.DISCOVERY_URL and not wkc_resource_is_defined: <NEW_LINE> <INDENT> transaction = self._server.resourceLayer.discover(transaction) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resource = self._server.root[path] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> resource = None <NEW_LINE> <DEDENT> if resource is None or path == '/': <NEW_LINE> <INDENT> transaction.response.code = defines.Codes.NOT_FOUND.number <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> transaction.resource = resource <NEW_LINE> transaction = self._server.resourceLayer.get_resource(transaction) <NEW_LINE> <DEDENT> <DEDENT> return transaction <NEW_LINE> <DEDENT> def _handle_put(self, transaction): <NEW_LINE> <INDENT> path = str("/" + transaction.request.uri_path) <NEW_LINE> transaction.response = Response() <NEW_LINE> transaction.response.destination = transaction.request.source <NEW_LINE> transaction.response.token = transaction.request.token <NEW_LINE> try: <NEW_LINE> <INDENT> resource = self._server.root[path] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> resource = None <NEW_LINE> <DEDENT> if resource is None: <NEW_LINE> <INDENT> transaction.response.code = defines.Codes.NOT_FOUND.number <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> transaction.resource = resource <NEW_LINE> transaction = self._server.resourceLayer.update_resource(transaction) <NEW_LINE> <DEDENT> return transaction <NEW_LINE> <DEDENT> def _handle_post(self, transaction): <NEW_LINE> <INDENT> path = str("/" + transaction.request.uri_path) <NEW_LINE> transaction.response = Response() <NEW_LINE> transaction.response.destination = transaction.request.source <NEW_LINE> transaction.response.token = transaction.request.token <NEW_LINE> transaction = self._server.resourceLayer.create_resource(path, transaction) <NEW_LINE> return transaction <NEW_LINE> <DEDENT> def _handle_delete(self, transaction): <NEW_LINE> <INDENT> path = str("/" + transaction.request.uri_path) <NEW_LINE> transaction.response = Response() <NEW_LINE> transaction.response.destination = transaction.request.source <NEW_LINE> transaction.response.token = transaction.request.token <NEW_LINE> try: <NEW_LINE> <INDENT> resource = self._server.root[path] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> resource = None <NEW_LINE> <DEDENT> if resource is None: <NEW_LINE> <INDENT> transaction.response.code = defines.Codes.NOT_FOUND.number <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> transaction.resource = resource <NEW_LINE> transaction = self._server.resourceLayer.delete_resource(transaction, path) <NEW_LINE> <DEDENT> return transaction | Class to handle the Request/Response layer | 6259904a7cff6e4e811b6e0f |
class Cost(namedtuple('Cost', COST_LIST)): <NEW_LINE> <INDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> ntp = super(Cost, cls).__new__(cls, *args, **kwargs) <NEW_LINE> if hasattr(ntp.mac_op, '__len__'): <NEW_LINE> <INDENT> raise TypeError('Cost: mac_op must be a scalar') <NEW_LINE> <DEDENT> if not isinstance(ntp.mem_hier, tuple): <NEW_LINE> <INDENT> raise TypeError('Cost: mem_hier must be a tuple') <NEW_LINE> <DEDENT> if len(ntp.mem_hier) != me.NUM: <NEW_LINE> <INDENT> raise ValueError('Cost: mem_hier must have length {}' .format(me.NUM)) <NEW_LINE> <DEDENT> if hasattr(ntp.noc_hop, '__len__'): <NEW_LINE> <INDENT> raise TypeError('Cost: noc_hop must be a scalar') <NEW_LINE> <DEDENT> if hasattr(ntp.unit_static, '__len__'): <NEW_LINE> <INDENT> raise TypeError('Cost: unit_static must be a scalar') <NEW_LINE> <DEDENT> return ntp <NEW_LINE> <DEDENT> def mem_hier_at(self, mhe): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.mem_hier[mhe] <NEW_LINE> <DEDENT> except (IndexError, TypeError): <NEW_LINE> <INDENT> return None | Cost specification, including MAC operation cost, memory hierarchy cost,
NoC hop cost, and unit-time static cost. | 6259904a8a349b6b43687623 |
class AddSaltNPepper(DataIterator): <NEW_LINE> <INDENT> def __init__(self, iter, prob_dict, ratio_dict=None): <NEW_LINE> <INDENT> DataIterator.__init__(self, iter.data_shapes, iter.length) <NEW_LINE> ratio_keys = set() if ratio_dict is None else set(ratio_dict.keys()) <NEW_LINE> prob_keys = set(prob_dict.keys()) <NEW_LINE> if ratio_dict is not None and ratio_keys != prob_keys: <NEW_LINE> <INDENT> raise IteratorValidationError( "probabilities and ratios must be provided for the " "same data names. But {} != {}".format(prob_keys, ratio_keys)) <NEW_LINE> <DEDENT> for key in prob_keys: <NEW_LINE> <INDENT> if key not in iter.data_shapes: <NEW_LINE> <INDENT> raise IteratorValidationError( "key {} is not present in iterator. Available keys: {" "}".format(key, iter.data_shapes.keys())) <NEW_LINE> <DEDENT> <DEDENT> self.ratio_dict = {} if ratio_dict is None else ratio_dict <NEW_LINE> self.prob_dict = prob_dict <NEW_LINE> self.iter = iter <NEW_LINE> <DEDENT> def __call__(self, handler): <NEW_LINE> <INDENT> for data in self.iter(handler): <NEW_LINE> <INDENT> for key, pr in self.prob_dict.items(): <NEW_LINE> <INDENT> ratio = self.ratio_dict.get(key, 0.5) <NEW_LINE> d = data[key].copy() <NEW_LINE> r = self.rnd.rand(*d.shape) <NEW_LINE> d[r >= 1.0 - pr * ratio] = 1.0 <NEW_LINE> d[r <= pr * (1.0 - ratio)] = 0.0 <NEW_LINE> data[key] = d <NEW_LINE> <DEDENT> yield data | Adds Salt&Pepper noise to data generated by another iterator, which must
provide named data items (such as Online, Minibatches, Undivided). Only
Numpy data is supported,
Supports usage of different amounts and ratios of salt VS pepper for
different named data items. | 6259904a4e696a045264e80b |
@enum.unique <NEW_LINE> class ButtonMode(enum.IntEnum): <NEW_LINE> <INDENT> default = 0x00 <NEW_LINE> inverted = 0x01 | Microbot supports few behaviours for the case when user touches button on the device itself.
This enum represents those states. | 6259904a96565a6dacd2d974 |
class OrganizationConfigSettings(AbstractOrganizationConfigSettings): <NEW_LINE> <INDENT> class Meta(AbstractOrganizationConfigSettings.Meta): <NEW_LINE> <INDENT> abstract = False <NEW_LINE> swappable = swapper.swappable_setting('config', 'OrganizationConfigSettings') | Configuration management settings
specific to each organization | 6259904a498bea3a75a58ef5 |
class IntIndexer(ShapedIntIndexer): <NEW_LINE> <INDENT> def shaped_instance(self): <NEW_LINE> <INDENT> if self._shaped_inst is not None: <NEW_LINE> <INDENT> return self._shaped_inst <NEW_LINE> <DEDENT> if self._src_shape is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self._idx < 0: <NEW_LINE> <INDENT> self._shaped_inst = ShapedIntIndexer(self._idx + self._src_shape[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._shaped_inst = ShapedIntIndexer(self._idx) <NEW_LINE> <DEDENT> return self._shaped_inst._set_attrs(self) | Int indexing class that may or may not be 'shaped'.
Parameters
----------
idx : int
The index.
flat_src : bool or None
If True, treat source as flat. | 6259904a10dbd63aa1c71fb2 |
class constraint_kinematic_wave(flux_connection): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> residencetime = _swig_property(_cmf_core.constraint_kinematic_wave_residencetime_get, _cmf_core.constraint_kinematic_wave_residencetime_set) <NEW_LINE> beta = _swig_property(_cmf_core.constraint_kinematic_wave_beta_get, _cmf_core.constraint_kinematic_wave_beta_set) <NEW_LINE> residual = _swig_property(_cmf_core.constraint_kinematic_wave_residual_get, _cmf_core.constraint_kinematic_wave_residual_set) <NEW_LINE> V0 = _swig_property(_cmf_core.constraint_kinematic_wave_V0_get, _cmf_core.constraint_kinematic_wave_V0_set) <NEW_LINE> Vrmax = _swig_property(_cmf_core.constraint_kinematic_wave_Vrmax_get, _cmf_core.constraint_kinematic_wave_Vrmax_set) <NEW_LINE> gamma = _swig_property(_cmf_core.constraint_kinematic_wave_gamma_get, _cmf_core.constraint_kinematic_wave_gamma_set) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _cmf_core.constraint_kinematic_wave_swiginit(self, _cmf_core.new_constraint_kinematic_wave(*args, **kwargs)) <NEW_LINE> <DEDENT> __swig_destroy__ = _cmf_core.delete_constraint_kinematic_wave | Calculates flux out of a storage as a linear function of its volume to
a power, constraint by the volume stored in the target storage.
Deprecated Will be replaced by ConstraintLinearStorageConnection,
without beta and gamma.
.. math::
q = \frac 1 {t_r}
{\left(\frac{V_{l} - V_{residual}}{V_0} \right)^\beta}
\left(\frac{V_{r,max}-V_{r}}{V_{r,max}}\right)^\gamma
where: :math:`V_l` The actual volume stored by the left water storage
:math:`V_{residual} [m^3]` The volume of water not flowing out (default = 0)
:math:`V_0` The reference volume to scale the exponent (default = 1m3/day)
:math:`\beta` A parameter to shape the response curve. In case of
:math:`\beta \neq 1`, :math:`t_r` is not a residence time, but just a
parameter.
:math:`t_r [days]` The residence time of the water in this storage in days
:math:`V_{r,max}` The capacity of the right water storage in m3
:math:`V_{r}` The actual volume of the right water storage
:math:`\gamma` A shape parameter for the target capacity constriction
C++ includes: simple_connections.h | 6259904a6fece00bbacccd8e |
class Wall(GameObject): <NEW_LINE> <INDENT> def __init__(self, *sprite_groups, **kwargs): <NEW_LINE> <INDENT> GameObject.__init__(self, sprite_groups, kwargs) | Class for all impassable objects. | 6259904a462c4b4f79dbcdd6 |
class XLWriter(BookWriter): <NEW_LINE> <INDENT> def __init__(self, file, **keywords): <NEW_LINE> <INDENT> BookWriter.__init__(self, file, **keywords) <NEW_LINE> self.wb = Workbook() <NEW_LINE> <DEDENT> def create_sheet(self, name): <NEW_LINE> <INDENT> return XLSheetWriter(self.wb, None, name) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.wb.save(self.file) | xls, xlsx and xlsm writer | 6259904a73bcbd0ca4bcb664 |
class RouterSessionFactory(FutureMixin, protocol.RouterSessionFactory): <NEW_LINE> <INDENT> session = RouterSession | WAMP router session factory for asyncio-based applications. | 6259904a097d151d1a2c2444 |
class SignError(Exception): <NEW_LINE> <INDENT> pass | Docstring for SignError. | 6259904aa8ecb033258725e8 |
class Manager(object): <NEW_LINE> <INDENT> def get_peers(self, switch): <NEW_LINE> <INDENT> hotes = Hote.objects.filter(valid=True) <NEW_LINE> peers = [] <NEW_LINE> for hote in hotes: <NEW_LINE> <INDENT> peer = Peer() <NEW_LINE> peer.idPeer = hote.idhote <NEW_LINE> peer.Name = hote.nomhote <NEW_LINE> peer.Mac = hote.machote <NEW_LINE> peer.IPv6 = hote.ipv6hote <NEW_LINE> peer.IPv4 = hote.ipv4hote <NEW_LINE> if hote.idport.idswitch == switch: <NEW_LINE> <INDENT> peer.Egress = True <NEW_LINE> peer.outputPort = hote.idport.numport <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> peer.Egress = False <NEW_LINE> peer.nextHop = hote.idport.idswitch_id <NEW_LINE> <DEDENT> peers.append(peer) <NEW_LINE> <DEDENT> return peers <NEW_LINE> <DEDENT> def create_rules(self, switches): <NEW_LINE> <INDENT> for switch in switches: <NEW_LINE> <INDENT> peers = self.get_peers(switch) <NEW_LINE> rules = self.call_managers(switch.idswitch, peers) <NEW_LINE> Regles.objects.filter(idswitch=switch.idswitch).filter(etat="Production").delete() <NEW_LINE> db_rules = [] <NEW_LINE> for rule in rules: <NEW_LINE> <INDENT> db_rules.append(Regles(idswitch=switch, typeregle=rule.get("module"), regle=json.dumps(rule.get("rule")), source_id=rule.get("source"), destination_id=rule.get("destination"))) <NEW_LINE> <DEDENT> Regles.objects.bulk_create(db_rules) <NEW_LINE> groups_switch = settings.RULES_GENERATION_GROUPS_DEFINITION[switch.idswitch] <NEW_LINE> db_groups = [] <NEW_LINE> for group in groups_switch: <NEW_LINE> <INDENT> db_groups.append(Regles(idswitch=switch, typeregle="Group", regle=json.dumps(group))) <NEW_LINE> <DEDENT> Regles.objects.bulk_create(db_groups) <NEW_LINE> <DEDENT> <DEDENT> def call_managers(self, dpid, peers): <NEW_LINE> <INDENT> rules = [] <NEW_LINE> production = Production() <NEW_LINE> statistics = Statistics() <NEW_LINE> rules.extend(production.create_rules_members(dpid, peers)) <NEW_LINE> rules.extend(statistics.create_rules_members(dpid, peers)) <NEW_LINE> return rules | Main class for managing the core of Generate_rues app | 6259904ae64d504609df9dbb |
class URLContext(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.base_path = '/' <NEW_LINE> <DEDENT> def set_current_url(self, current_url): <NEW_LINE> <INDENT> self.base_path = posixpath.dirname(current_url) <NEW_LINE> <DEDENT> def make_relative(self, url): <NEW_LINE> <INDENT> suffix = '/' if (url.endswith('/') and len(url) > 1) else '' <NEW_LINE> if self.base_path == '/': <NEW_LINE> <INDENT> if url == '/': <NEW_LINE> <INDENT> return '.' <NEW_LINE> <DEDENT> return url.lstrip('/') <NEW_LINE> <DEDENT> relative_path = posixpath.relpath(url, start=self.base_path) + suffix <NEW_LINE> return relative_path.rstrip('/') | The URLContext is used to ensure that we can generate the appropriate
relative URLs to other pages from any given page in the site.
We use relative URLs so that static sites can be deployed to any location
without having to specify what the path component on the host will be
if the documentation is not hosted at the root path. | 6259904a45492302aabfd8aa |
class FIFODict: <NEW_LINE> <INDENT> def __init__(self, size, finalizer=None): <NEW_LINE> <INDENT> if not type(size) == type(0): <NEW_LINE> <INDENT> raise TypeError("size must be an int") <NEW_LINE> <DEDENT> if not size > 0: <NEW_LINE> <INDENT> raise ValueError("size must be positive") <NEW_LINE> <DEDENT> if finalizer is not None and not isinstance(finalizer, collections.Callable): <NEW_LINE> <INDENT> raise TypeError("finalizer must be None or a callable") <NEW_LINE> <DEDENT> self.__size = size <NEW_LINE> self.__data = {} <NEW_LINE> self.__order = deque() <NEW_LINE> self.__finalizer = finalizer <NEW_LINE> <DEDENT> def add(self, key, val): <NEW_LINE> <INDENT> if key in self.__data: <NEW_LINE> <INDENT> self.__data[key] = val <NEW_LINE> <DEDENT> elif len(self.__order) < self.__size: <NEW_LINE> <INDENT> self.__order.append(key) <NEW_LINE> self.__data[key] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> delKey = self.__order.popleft() <NEW_LINE> if self.__finalizer: <NEW_LINE> <INDENT> self.__finalizer(delKey, self.__data[delKey]) <NEW_LINE> <DEDENT> del self.__data[delKey] <NEW_LINE> self.__order.append(key) <NEW_LINE> self.__data[key] = val <NEW_LINE> <DEDENT> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> return self.__data.get(key, default) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.__data = {} <NEW_LINE> self.__order = [] <NEW_LINE> <DEDENT> def __setitem__(self, key, item): <NEW_LINE> <INDENT> self.add(key, item) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.__data[key] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.__data) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> allitems = [] <NEW_LINE> for key in self.__order: <NEW_LINE> <INDENT> val = self.__data[key] <NEW_LINE> item = "%s: %s" % (str(key), str(val)) <NEW_LINE> allitems.append(item) <NEW_LINE> <DEDENT> return "{%s}" % ", ".join(allitems) <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return (item in self.__data) <NEW_LINE> <DEDENT> def __delitem__(self, item): <NEW_LINE> <INDENT> if item not in self.__data: <NEW_LINE> <INDENT> raise KeyError(item) <NEW_LINE> <DEDENT> del self.__data[item] <NEW_LINE> self.__order.remove(item) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for k in self.__order: <NEW_LINE> <INDENT> yield k | A simple FIFO mapping between keys and values.
When the max. capacity is reached, the key/value pair that has been in
the dict the longest time is removed. | 6259904a0a366e3fb87dddbd |
class Auth(): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def generate_token(user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> payload = { 'exp': datetime.datetime.utcnow() + datetime.timedelta(days=1), 'iat': datetime.datetime.utcnow(), 'sub': user_id } <NEW_LINE> return jwt.encode( payload, os.getenv('JWT_SECRET_KEY'), 'HS256' ).decode("utf-8") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return Response( mimetype="application/json", response=json.dumps({'error': 'error generando el token'}), status=400 ) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def decode_token(token): <NEW_LINE> <INDENT> re = {'data': {}, 'error': {}} <NEW_LINE> try: <NEW_LINE> <INDENT> payload = jwt.decode(token, os.getenv('JWT_SECRET_KEY')) <NEW_LINE> re['data'] = {'user_id': payload['sub']} <NEW_LINE> return re <NEW_LINE> <DEDENT> except jwt.ExpiredSignatureError as e1: <NEW_LINE> <INDENT> re['error'] = {'message': 'Su token expiro, por favor vuelva a logearse'} <NEW_LINE> return re <NEW_LINE> <DEDENT> except jwt.InvalidTokenError: <NEW_LINE> <INDENT> re['error'] = {'message': 'Token invalido, por favor intentelo con un nuevo token'} <NEW_LINE> return re <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def auth_required(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def decorated_auth(*args, **kwargs): <NEW_LINE> <INDENT> if 'api-token' not in request.headers: <NEW_LINE> <INDENT> return Response( mimetype="application/json", response=json.dumps({'error': 'El token de autenticación no está disponible, inicia sesión para obtener uno'}), status=400 ) <NEW_LINE> <DEDENT> token = request.headers.get('api-token') <NEW_LINE> data = Auth.decode_token(token) <NEW_LINE> if data['error']: <NEW_LINE> <INDENT> return Response( mimetype="application/json", response=json.dumps(data['error']), status=400 ) <NEW_LINE> <DEDENT> user_id = data['data']['user_id'] <NEW_LINE> check_user = UserModel.get_one_user(user_id) <NEW_LINE> if not check_user: <NEW_LINE> <INDENT> return Response( mimetype="application/json", response=json.dumps({'error': 'el usuario no existe, token no válido'}), status=400 ) <NEW_LINE> <DEDENT> g.user = {'id': user_id} <NEW_LINE> return func(*args, **kwargs) <NEW_LINE> <DEDENT> return decorated_auth | Auth Class | 6259904a379a373c97d9a402 |
class UploadSuccess(JSONException): <NEW_LINE> <INDENT> code = 200 | A successful upload response. | 6259904a8a43f66fc4bf356d |
class Node(object): <NEW_LINE> <INDENT> def __init__(self, ip, port, node_id, last_seen=None): <NEW_LINE> <INDENT> self.ip = ip <NEW_LINE> self.port = port <NEW_LINE> self.node_id = node_id <NEW_LINE> self.last_seen = last_seen <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.node_id == other.node_id <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return self.node_id != other.node_id <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.last_seen < other.last_seen <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.last_seen > other.last_seen <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.last_seen >= other.last_seen <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return self.last_seen <= other.last_seen <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> message = 'node<%s>@%s:%d last_seen: %s' <NEW_LINE> return message % (self.node_id, self.ip, self.port, self.last_seen) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.__dict__) | Class that holds the contact information for a node in the Kademlia
network. Instances of this class are stored in the k-buckets. | 6259904a4e696a045264e80c |
class Ec2SnapshotDeleteError(Ec2Error): <NEW_LINE> <INDENT> pass | Raised when there was an error while deleting an ec2 snapshot | 6259904ab5575c28eb7136b5 |
class PcaModelSelect(Pipeline): <NEW_LINE> <INDENT> def __init__(self, score='bic', tol=1e-3, verbose=False, max_iter=None, alpha=1.0): <NEW_LINE> <INDENT> self.score = score <NEW_LINE> self.tol = tol <NEW_LINE> self.verbose = verbose <NEW_LINE> self.max_iter = max_iter <NEW_LINE> self.alpha = alpha <NEW_LINE> super().__init__(steps=[ ('scl', StandardScaler(with_mean=True, with_std=True, copy=True)), ('pca', PCA( n_components=1, svd_solver='full', whiten=False, copy=True)) ]) <NEW_LINE> <DEDENT> def transform(self, X, y=None): <NEW_LINE> <INDENT> return super().transform(X).astype(np.float16) <NEW_LINE> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> prev = None <NEW_LINE> redu = 1.0 <NEW_LINE> status = 'proceed' <NEW_LINE> if self.max_iter is None: <NEW_LINE> <INDENT> self.max_iter = X.shape[1] <NEW_LINE> <DEDENT> max_iter = min(self.max_iter, X.shape[1]) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("{:3s} | {:8s} | {:8s} | {}".format( 'num', 'score', 'improved', 'status')) <NEW_LINE> <DEDENT> for n_comp in range(1, max_iter): <NEW_LINE> <INDENT> super().set_params(**{'pca__n_components': n_comp}) <NEW_LINE> X_new = super().fit_transform(X) <NEW_LINE> lr = sm.Logit(y, sm.add_constant(X_new)).fit_regularized( method='l1', alpha=self.alpha, disp=0) <NEW_LINE> curr = getattr(lr, self.score) <NEW_LINE> if self.score == 'prsquared': <NEW_LINE> <INDENT> curr = 1 - curr <NEW_LINE> <DEDENT> if prev is None: <NEW_LINE> <INDENT> prev = curr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> redu = 1 - curr / prev <NEW_LINE> prev = curr <NEW_LINE> if redu > self.tol: <NEW_LINE> <INDENT> self.best_n_components_ = n_comp <NEW_LINE> self.best_statsmodel_ = lr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> status = 'terminated' <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> print("{:3d} | {:8.4f} | {:8.4f} | {}".format( n_comp, curr, redu, status)) <NEW_LINE> <DEDENT> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> print(self.best_statsmodel_.summary()) <NEW_LINE> <DEDENT> super().set_params(**{'pca__n_components': self.best_n_components_}) <NEW_LINE> <DEDENT> def fit_transform(self, X, y): <NEW_LINE> <INDENT> self.fit(X, y) <NEW_LINE> return self.transform(X) | Principal component analysis (PCA) wheras the number
of components by simple backtracking line search and
model selection criteria
Parameters
----------
score : str (Default: 'bic')
The model selection criteria to minimze
- 'bic' (default)
- 'aic'
- 'prsquared'
tol : float (Default: 1e-3)
Required percentage decrease in each iteration.
max_iter: int (Default: None)
Any number between 1 and the number of features
verbose: bool (Default: False)
Algorithm
---------
1. Increment the number of components by one
2. Run the PCA
3. Fit a Lasso-Logit model with the components
4. Check if model selection criteria improves
5. If not, terminate backtracking; Else got to 1 | 6259904ab57a9660fecd2e55 |
@dataclass <NEW_LINE> class ChatPhoto(Base): <NEW_LINE> <INDENT> small_file_id: str <NEW_LINE> big_file_id: str | This object represents a chat photo. | 6259904a8e71fb1e983bce9e |
class TwoPCPhase1(object): <NEW_LINE> <INDENT> def __init__(self, txn_id, where_list): <NEW_LINE> <INDENT> self.txn_id = txn_id <NEW_LINE> self.where_list = where_list <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "TwoPCPhase1(txn_id={!r},where_list={!r})".format(self.txn_id, self.where_list) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.txn_id == other.txn_id and self.where_list == other.where_list) <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> return (struct.pack(">H{}sI".format(len(txn_id)), len(txn_id), txn_id, len(where_list)) + b''.join(( struct.pack('>QQQ', stream_id, start_por, end_por) for (stream_id, start_por, end_por) in self.where_list))) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decode(bs): <NEW_LINE> <INDENT> reader = StringIO(bs) <NEW_LINE> length = struct.unpack(">H", reader.read(2))[0] <NEW_LINE> txn_id = reader.read(length).decode() <NEW_LINE> where_list = [] <NEW_LINE> length = struct.unpack(">I", reader.read(4))[0] <NEW_LINE> for i in range(0, length): <NEW_LINE> <INDENT> stream_id = struct.unpack(">Q", reader.read(8))[0] <NEW_LINE> start_por = struct.unpack(">Q", reader.read(8))[0] <NEW_LINE> end_por = struct.unpack(">Q", reader.read(8))[0] <NEW_LINE> where_list.append((stream_id, start_por, end_por)) <NEW_LINE> <DEDENT> return TwoPCPhase1(txn_id, where_list) | TwoPCPhase1(txn_id: String,
where_list: [(stream_id: U64, start_por: U64, end_por: U64)]) | 6259904a7cff6e4e811b6e13 |
class VenueParser: <NEW_LINE> <INDENT> def parse(self, document): <NEW_LINE> <INDENT> soup = BeautifulSoup(document, features="html.parser") <NEW_LINE> venues = [] <NEW_LINE> conference_name = soup.find("h1").text <NEW_LINE> h2 = soup.find("h2") <NEW_LINE> if h2 is not None: <NEW_LINE> <INDENT> venues.append(self.get_venue(conference_name, h2)) <NEW_LINE> elem = h2.find_next(["h2", "li"]) <NEW_LINE> while elem is not None: <NEW_LINE> <INDENT> if elem.name == "h2": <NEW_LINE> <INDENT> venues.append(self.get_venue(conference_name, elem)) <NEW_LINE> <DEDENT> if elem.name == "li" and elem.get("class") == "entry editor toc".split(): <NEW_LINE> <INDENT> li = elem <NEW_LINE> venues[len(venues) - 1].proceedings.append(li["id"]) <NEW_LINE> <DEDENT> elem = elem.find_next(["h2", "li"]) <NEW_LINE> <DEDENT> <DEDENT> return conference_name, venues <NEW_LINE> <DEDENT> def get_venue(self, conference_name, h2): <NEW_LINE> <INDENT> year = h2['id'] if 'id' in h2 else None <NEW_LINE> dblp_link = None <NEW_LINE> if h2.find('a'): <NEW_LINE> <INDENT> dblp_link = h2.a['href'] <NEW_LINE> <DEDENT> title = h2.text <NEW_LINE> location = h2.text.split(':')[1].strip() if ':' in h2.text else None <NEW_LINE> return Venue(title, location, year, dblp_link, conference_name) <NEW_LINE> <DEDENT> def get_text_if_present(self, elem, tag): <NEW_LINE> <INDENT> val = elem.find(tag) <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> return val.text <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_proceeding(self, conference_name, xml): <NEW_LINE> <INDENT> root = ET.fromstring(xml) <NEW_LINE> proceeding_tag = root[0] <NEW_LINE> title = self.get_text_if_present(proceeding_tag, 'title') <NEW_LINE> proceeding = Proceeding(title) <NEW_LINE> proceeding.proceeding_key = proceeding_tag.get('key') <NEW_LINE> proceeding.mdate = proceeding_tag.get('mdate') <NEW_LINE> editors = proceeding_tag.findall('editor') <NEW_LINE> proceeding.editors = [e.text for e in editors] if editors is not None else [] <NEW_LINE> proceeding.booktitle = self.get_text_if_present(proceeding_tag, 'booktitle') <NEW_LINE> proceeding.publisher = self.get_text_if_present(proceeding_tag, 'publisher') <NEW_LINE> proceeding.series = self.get_text_if_present(proceeding_tag, 'series') <NEW_LINE> proceeding.volume = self.get_text_if_present(proceeding_tag, 'volume') <NEW_LINE> proceeding.year = self.get_text_if_present(proceeding_tag, 'year') <NEW_LINE> proceeding.isbn = self.get_text_if_present(proceeding_tag, 'isbn') <NEW_LINE> proceeding.ee = self.get_text_if_present(proceeding_tag, 'ee') <NEW_LINE> proceeding.dblp_url = self.get_text_if_present(proceeding_tag, 'url') <NEW_LINE> proceeding.conference_name = conference_name <NEW_LINE> return proceeding | Naming convention:
venue: It represents a conference instance. ex: NIPS/2017
proceeding: It represents different events in that instance like workshops, conferences. ex: ViGIL, ML4H, etc. | 6259904abaa26c4b54d50684 |
@skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> class DevMapTestCase(TestCase, LoaderModuleMockMixin): <NEW_LINE> <INDENT> def setup_loader_modules(self): <NEW_LINE> <INDENT> return {devmap: {}} <NEW_LINE> <DEDENT> def test_multipath_list(self): <NEW_LINE> <INDENT> mock = MagicMock(return_value='A') <NEW_LINE> with patch.dict(devmap.__salt__, {'cmd.run': mock}): <NEW_LINE> <INDENT> self.assertEqual(devmap.multipath_list(), ['A']) <NEW_LINE> <DEDENT> <DEDENT> def test_multipath_flush(self): <NEW_LINE> <INDENT> mock = MagicMock(return_value=False) <NEW_LINE> with patch.object(os.path, 'exists', mock): <NEW_LINE> <INDENT> self.assertEqual(devmap.multipath_flush('device'), 'device does not exist') <NEW_LINE> <DEDENT> mock = MagicMock(return_value=True) <NEW_LINE> with patch.object(os.path, 'exists', mock): <NEW_LINE> <INDENT> mock = MagicMock(return_value='A') <NEW_LINE> with patch.dict(devmap.__salt__, {'cmd.run': mock}): <NEW_LINE> <INDENT> self.assertEqual(devmap.multipath_flush('device'), ['A']) | Test cases for salt.modules.devmap | 6259904a8a349b6b43687627 |
class CandidateFormView(View): <NEW_LINE> <INDENT> model = Candidate <NEW_LINE> form_class=CandidateForm <NEW_LINE> template_name='homepage/candidate.html' <NEW_LINE> def get(self,request): <NEW_LINE> <INDENT> form=self.form_class() <NEW_LINE> return render(request,self.template_name,{'form':form}) <NEW_LINE> <DEDENT> def post(self,request): <NEW_LINE> <INDENT> print(self.request) <NEW_LINE> form=self.form_class(request.POST) <NEW_LINE> if(form.is_valid()): <NEW_LINE> <INDENT> form.save() <NEW_LINE> return render(request,'homepage/response.html') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return render(request,self.template_name,{'form':form}) | view for candidate form in homepage of app | 6259904a91af0d3eaad3b1ff |
class GeneratePointView(TemplateView): <NEW_LINE> <INDENT> template_name = 'map/generate_point.html' <NEW_LINE> redirect_field_name = 'map/map_clusters.html' <NEW_LINE> model = Point | Class used to automatically generate points | 6259904abe383301e0254bf4 |
class MobileMessage(models.Model): <NEW_LINE> <INDENT> mobile = models.ForeignKey(Mobile, on_delete=models.CASCADE) <NEW_LINE> reception_time = models.DateTimeField(default=timezone.now) <NEW_LINE> message_text = models.CharField(max_length=50) <NEW_LINE> lat = models.FloatField(default=-1, blank=True, null=True) <NEW_LINE> lng = models.FloatField(default=-1, blank=True, null=True) | 携帯端末からサーバへのメッセージのモデル | 6259904a96565a6dacd2d976 |
class Recovery: <NEW_LINE> <INDENT> def __init__(self, loss_upper_bound: float, checkpoint_manager: tf.train.CheckpointManager, recovery_begin_steps: int = 0, recovery_max_trials: int = 3): <NEW_LINE> <INDENT> self.recover_counter = 0 <NEW_LINE> self.recovery_begin_steps = recovery_begin_steps <NEW_LINE> self.recovery_max_trials = recovery_max_trials <NEW_LINE> self.loss_upper_bound = loss_upper_bound <NEW_LINE> self.checkpoint_manager = checkpoint_manager <NEW_LINE> <DEDENT> def should_recover(self, loss_value, global_step): <NEW_LINE> <INDENT> if tf.math.is_nan(loss_value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if (global_step >= self.recovery_begin_steps and loss_value > self.loss_upper_bound): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def maybe_recover(self, loss_value, global_step): <NEW_LINE> <INDENT> if not self.should_recover(loss_value, global_step): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.recover_counter += 1 <NEW_LINE> if self.recover_counter > self.recovery_max_trials: <NEW_LINE> <INDENT> raise RuntimeError( "The loss value is NaN after training loop and it happens %d times." % self.recover_counter) <NEW_LINE> <DEDENT> checkpoint_path = self.checkpoint_manager.restore_or_initialize() <NEW_LINE> logging.warning( "Recovering the model from checkpoint: %s. The loss value becomes " "%f at step %d.", checkpoint_path, loss_value, global_step) | Built-in model blowup recovery module.
Checks the loss value by the given threshold. If applicable, recover the
model by reading the checkpoint on disk. | 6259904aec188e330fdf9c77 |
class GenieAmbientGroup(GenieGameEntityGroup): <NEW_LINE> <INDENT> def contains_unit(self, ambient_id: int) -> bool: <NEW_LINE> <INDENT> return self.contains_entity(ambient_id) <NEW_LINE> <DEDENT> def is_creatable(self, civ_id: int = -1) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_gatherer(self, civ_id: int = -1) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_melee(self, civ_id: int = -1) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_ranged(self, civ_id: int = -1) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_projectile_shooter(self, civ_id: int = -1) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_unique(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"GenieAmbientGroup<{self.get_id()}>" | One Genie unit that is an ambient scenery object.
Mostly for resources, specifically trees. For these objects
every frame in their graphics file is a variant.
Example: Trees, Gold mines, Sign | 6259904ab5575c28eb7136b6 |
@widgets.register <NEW_LINE> class PicSureLogin(widgets.DOMWidget): <NEW_LINE> <INDENT> _view_name = Unicode('LoginView').tag(sync=True) <NEW_LINE> _model_name = Unicode('LoginModel').tag(sync=True) <NEW_LINE> _view_module = Unicode('pic-sure-jupyter-widgets').tag(sync=True) <NEW_LINE> _model_module = Unicode('pic-sure-jupyter-widgets').tag(sync=True) <NEW_LINE> _view_module_version = Unicode('^0.1.0').tag(sync=True) <NEW_LINE> _model_module_version = Unicode('^0.1.0').tag(sync=True) <NEW_LINE> endpoint = Unicode('').tag(sync=True) <NEW_LINE> token = Unicode('').tag(sync=True) <NEW_LINE> resourceUUID = Unicode('').tag(sync=True) | An example widget. | 6259904acb5e8a47e493cb74 |
class ResultsView(generic.DetailView): <NEW_LINE> <INDENT> model = Question <NEW_LINE> template_name = 'polls/results.html' | 显示投票问题详细页 | 6259904a15baa7234946336c |
class SetProcessContextMixin(object): <NEW_LINE> <INDENT> name = "cc" <NEW_LINE> interactive = True <NEW_LINE> def __enter__(self): <NEW_LINE> <INDENT> self.process_context = self.session.GetParameter("process_context") <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, unused_type, unused_value, unused_traceback): <NEW_LINE> <INDENT> self.SwitchProcessContext(self.process_context) <NEW_LINE> <DEDENT> def SwitchProcessContext(self, process=None): <NEW_LINE> <INDENT> if process == None: <NEW_LINE> <INDENT> message = "Switching to Kernel context" <NEW_LINE> self.session.SetCache("default_address_space", self.session.kernel_address_space, volatile=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = ("Switching to process context: {0} " "(Pid {1}@{2:#x})").format( process.name, process.pid, process) <NEW_LINE> self.session.SetCache( "default_address_space", process.get_process_address_space() or None, volatile=False) <NEW_LINE> <DEDENT> self.session.SetCache("process_context", process, volatile=False) <NEW_LINE> self.session.logging.debug(message) <NEW_LINE> return message <NEW_LINE> <DEDENT> def SwitchContext(self): <NEW_LINE> <INDENT> if not self.filtering_requested: <NEW_LINE> <INDENT> return self.SwitchProcessContext(process=None) <NEW_LINE> <DEDENT> for process in self.filter_processes(): <NEW_LINE> <INDENT> return self.SwitchProcessContext(process=process) <NEW_LINE> <DEDENT> return "Process not found!\n" <NEW_LINE> <DEDENT> def render(self, renderer): <NEW_LINE> <INDENT> message = self.SwitchContext() <NEW_LINE> renderer.format(message + "\n") | Set the current process context.
The basic functionality of all platforms' cc plugin. | 6259904a009cb60464d02910 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.