body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
454092ee1b28c156b5d51654c926c944a14e324b34eeef403ef0c07877948cad
@property def is_suppressed(self): '\n Gets the is_suppressed of this LogAnalyticsWarning.\n A flag indicating if the warning is currently suppressed\n\n\n :return: The is_suppressed of this LogAnalyticsWarning.\n :rtype: bool\n ' return self._is_suppressed
Gets the is_suppressed of this LogAnalyticsWarning. A flag indicating if the warning is currently suppressed :return: The is_suppressed of this LogAnalyticsWarning. :rtype: bool
src/oci/log_analytics/models/log_analytics_warning.py
is_suppressed
ezequielramos/oci-python-sdk
249
python
@property def is_suppressed(self): '\n Gets the is_suppressed of this LogAnalyticsWarning.\n A flag indicating if the warning is currently suppressed\n\n\n :return: The is_suppressed of this LogAnalyticsWarning.\n :rtype: bool\n ' return self._is_suppressed
@property def is_suppressed(self): '\n Gets the is_suppressed of this LogAnalyticsWarning.\n A flag indicating if the warning is currently suppressed\n\n\n :return: The is_suppressed of this LogAnalyticsWarning.\n :rtype: bool\n ' return self._is_suppressed<|docstring|>Gets the is_suppressed of this LogAnalyticsWarning. A flag indicating if the warning is currently suppressed :return: The is_suppressed of this LogAnalyticsWarning. :rtype: bool<|endoftext|>
5c862c189179ef3758431fa9f4658a83bd14c02569768459de6107a9f43145de
@is_suppressed.setter def is_suppressed(self, is_suppressed): '\n Sets the is_suppressed of this LogAnalyticsWarning.\n A flag indicating if the warning is currently suppressed\n\n\n :param is_suppressed: The is_suppressed of this LogAnalyticsWarning.\n :type: bool\n ' self._is_suppressed = is_suppressed
Sets the is_suppressed of this LogAnalyticsWarning. A flag indicating if the warning is currently suppressed :param is_suppressed: The is_suppressed of this LogAnalyticsWarning. :type: bool
src/oci/log_analytics/models/log_analytics_warning.py
is_suppressed
ezequielramos/oci-python-sdk
249
python
@is_suppressed.setter def is_suppressed(self, is_suppressed): '\n Sets the is_suppressed of this LogAnalyticsWarning.\n A flag indicating if the warning is currently suppressed\n\n\n :param is_suppressed: The is_suppressed of this LogAnalyticsWarning.\n :type: bool\n ' self._is_suppressed = is_suppressed
@is_suppressed.setter def is_suppressed(self, is_suppressed): '\n Sets the is_suppressed of this LogAnalyticsWarning.\n A flag indicating if the warning is currently suppressed\n\n\n :param is_suppressed: The is_suppressed of this LogAnalyticsWarning.\n :type: bool\n ' self._is_suppressed = is_suppressed<|docstring|>Sets the is_suppressed of this LogAnalyticsWarning. A flag indicating if the warning is currently suppressed :param is_suppressed: The is_suppressed of this LogAnalyticsWarning. :type: bool<|endoftext|>
43ab154f3f19b412d86d95ddeafb7700069dd31b0f9d42092c56394c0d33d7cc
@property def time_of_latest_warning(self): '\n Gets the time_of_latest_warning of this LogAnalyticsWarning.\n The most recent date on which the warning was triggered\n\n\n :return: The time_of_latest_warning of this LogAnalyticsWarning.\n :rtype: datetime\n ' return self._time_of_latest_warning
Gets the time_of_latest_warning of this LogAnalyticsWarning. The most recent date on which the warning was triggered :return: The time_of_latest_warning of this LogAnalyticsWarning. :rtype: datetime
src/oci/log_analytics/models/log_analytics_warning.py
time_of_latest_warning
ezequielramos/oci-python-sdk
249
python
@property def time_of_latest_warning(self): '\n Gets the time_of_latest_warning of this LogAnalyticsWarning.\n The most recent date on which the warning was triggered\n\n\n :return: The time_of_latest_warning of this LogAnalyticsWarning.\n :rtype: datetime\n ' return self._time_of_latest_warning
@property def time_of_latest_warning(self): '\n Gets the time_of_latest_warning of this LogAnalyticsWarning.\n The most recent date on which the warning was triggered\n\n\n :return: The time_of_latest_warning of this LogAnalyticsWarning.\n :rtype: datetime\n ' return self._time_of_latest_warning<|docstring|>Gets the time_of_latest_warning of this LogAnalyticsWarning. The most recent date on which the warning was triggered :return: The time_of_latest_warning of this LogAnalyticsWarning. :rtype: datetime<|endoftext|>
64a63cea8ac49a31c0a188c8ac6a827941b43a94d241cc9828499a41b7cdef96
@time_of_latest_warning.setter def time_of_latest_warning(self, time_of_latest_warning): '\n Sets the time_of_latest_warning of this LogAnalyticsWarning.\n The most recent date on which the warning was triggered\n\n\n :param time_of_latest_warning: The time_of_latest_warning of this LogAnalyticsWarning.\n :type: datetime\n ' self._time_of_latest_warning = time_of_latest_warning
Sets the time_of_latest_warning of this LogAnalyticsWarning. The most recent date on which the warning was triggered :param time_of_latest_warning: The time_of_latest_warning of this LogAnalyticsWarning. :type: datetime
src/oci/log_analytics/models/log_analytics_warning.py
time_of_latest_warning
ezequielramos/oci-python-sdk
249
python
@time_of_latest_warning.setter def time_of_latest_warning(self, time_of_latest_warning): '\n Sets the time_of_latest_warning of this LogAnalyticsWarning.\n The most recent date on which the warning was triggered\n\n\n :param time_of_latest_warning: The time_of_latest_warning of this LogAnalyticsWarning.\n :type: datetime\n ' self._time_of_latest_warning = time_of_latest_warning
@time_of_latest_warning.setter def time_of_latest_warning(self, time_of_latest_warning): '\n Sets the time_of_latest_warning of this LogAnalyticsWarning.\n The most recent date on which the warning was triggered\n\n\n :param time_of_latest_warning: The time_of_latest_warning of this LogAnalyticsWarning.\n :type: datetime\n ' self._time_of_latest_warning = time_of_latest_warning<|docstring|>Sets the time_of_latest_warning of this LogAnalyticsWarning. The most recent date on which the warning was triggered :param time_of_latest_warning: The time_of_latest_warning of this LogAnalyticsWarning. :type: datetime<|endoftext|>
224a726b0fc3856e8b3f9a35e41ca5c9f4302e36d970aa5b97ef66938884c904
@property def warning_level(self): '\n Gets the warning_level of this LogAnalyticsWarning.\n The warning level - either pattern, rule, or source.\n\n\n :return: The warning_level of this LogAnalyticsWarning.\n :rtype: str\n ' return self._warning_level
Gets the warning_level of this LogAnalyticsWarning. The warning level - either pattern, rule, or source. :return: The warning_level of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
warning_level
ezequielramos/oci-python-sdk
249
python
@property def warning_level(self): '\n Gets the warning_level of this LogAnalyticsWarning.\n The warning level - either pattern, rule, or source.\n\n\n :return: The warning_level of this LogAnalyticsWarning.\n :rtype: str\n ' return self._warning_level
@property def warning_level(self): '\n Gets the warning_level of this LogAnalyticsWarning.\n The warning level - either pattern, rule, or source.\n\n\n :return: The warning_level of this LogAnalyticsWarning.\n :rtype: str\n ' return self._warning_level<|docstring|>Gets the warning_level of this LogAnalyticsWarning. The warning level - either pattern, rule, or source. :return: The warning_level of this LogAnalyticsWarning. :rtype: str<|endoftext|>
5ce29d7748afd48aa2bd776a2271defa034bcdeef45bec81e98cdeb45bc06e79
@warning_level.setter def warning_level(self, warning_level): '\n Sets the warning_level of this LogAnalyticsWarning.\n The warning level - either pattern, rule, or source.\n\n\n :param warning_level: The warning_level of this LogAnalyticsWarning.\n :type: str\n ' self._warning_level = warning_level
Sets the warning_level of this LogAnalyticsWarning. The warning level - either pattern, rule, or source. :param warning_level: The warning_level of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
warning_level
ezequielramos/oci-python-sdk
249
python
@warning_level.setter def warning_level(self, warning_level): '\n Sets the warning_level of this LogAnalyticsWarning.\n The warning level - either pattern, rule, or source.\n\n\n :param warning_level: The warning_level of this LogAnalyticsWarning.\n :type: str\n ' self._warning_level = warning_level
@warning_level.setter def warning_level(self, warning_level): '\n Sets the warning_level of this LogAnalyticsWarning.\n The warning level - either pattern, rule, or source.\n\n\n :param warning_level: The warning_level of this LogAnalyticsWarning.\n :type: str\n ' self._warning_level = warning_level<|docstring|>Sets the warning_level of this LogAnalyticsWarning. The warning level - either pattern, rule, or source. :param warning_level: The warning_level of this LogAnalyticsWarning. :type: str<|endoftext|>
47f33b0db92c22e537bbdc6ee5eeea3c0e084398c55edfb5e8ec679fbbab847c
@property def warning_message(self): '\n Gets the warning_message of this LogAnalyticsWarning.\n A description of the warning intended for the consumer of the warning. It will\n usually detail the cause of the warning, may suggest a remedy, and can contain any\n other relevant information the consumer might find useful\n\n\n :return: The warning_message of this LogAnalyticsWarning.\n :rtype: str\n ' return self._warning_message
Gets the warning_message of this LogAnalyticsWarning. A description of the warning intended for the consumer of the warning. It will usually detail the cause of the warning, may suggest a remedy, and can contain any other relevant information the consumer might find useful :return: The warning_message of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
warning_message
ezequielramos/oci-python-sdk
249
python
@property def warning_message(self): '\n Gets the warning_message of this LogAnalyticsWarning.\n A description of the warning intended for the consumer of the warning. It will\n usually detail the cause of the warning, may suggest a remedy, and can contain any\n other relevant information the consumer might find useful\n\n\n :return: The warning_message of this LogAnalyticsWarning.\n :rtype: str\n ' return self._warning_message
@property def warning_message(self): '\n Gets the warning_message of this LogAnalyticsWarning.\n A description of the warning intended for the consumer of the warning. It will\n usually detail the cause of the warning, may suggest a remedy, and can contain any\n other relevant information the consumer might find useful\n\n\n :return: The warning_message of this LogAnalyticsWarning.\n :rtype: str\n ' return self._warning_message<|docstring|>Gets the warning_message of this LogAnalyticsWarning. A description of the warning intended for the consumer of the warning. It will usually detail the cause of the warning, may suggest a remedy, and can contain any other relevant information the consumer might find useful :return: The warning_message of this LogAnalyticsWarning. :rtype: str<|endoftext|>
29289e13447bc1437147777752e59807cb70bd34f658f5fc0c5d0a98a3abeff0
@warning_message.setter def warning_message(self, warning_message): '\n Sets the warning_message of this LogAnalyticsWarning.\n A description of the warning intended for the consumer of the warning. It will\n usually detail the cause of the warning, may suggest a remedy, and can contain any\n other relevant information the consumer might find useful\n\n\n :param warning_message: The warning_message of this LogAnalyticsWarning.\n :type: str\n ' self._warning_message = warning_message
Sets the warning_message of this LogAnalyticsWarning. A description of the warning intended for the consumer of the warning. It will usually detail the cause of the warning, may suggest a remedy, and can contain any other relevant information the consumer might find useful :param warning_message: The warning_message of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
warning_message
ezequielramos/oci-python-sdk
249
python
@warning_message.setter def warning_message(self, warning_message): '\n Sets the warning_message of this LogAnalyticsWarning.\n A description of the warning intended for the consumer of the warning. It will\n usually detail the cause of the warning, may suggest a remedy, and can contain any\n other relevant information the consumer might find useful\n\n\n :param warning_message: The warning_message of this LogAnalyticsWarning.\n :type: str\n ' self._warning_message = warning_message
@warning_message.setter def warning_message(self, warning_message): '\n Sets the warning_message of this LogAnalyticsWarning.\n A description of the warning intended for the consumer of the warning. It will\n usually detail the cause of the warning, may suggest a remedy, and can contain any\n other relevant information the consumer might find useful\n\n\n :param warning_message: The warning_message of this LogAnalyticsWarning.\n :type: str\n ' self._warning_message = warning_message<|docstring|>Sets the warning_message of this LogAnalyticsWarning. A description of the warning intended for the consumer of the warning. It will usually detail the cause of the warning, may suggest a remedy, and can contain any other relevant information the consumer might find useful :param warning_message: The warning_message of this LogAnalyticsWarning. :type: str<|endoftext|>
cfcfa9e2812719ef14e822f757e1d16278195ed43ea481391529a0fd4b12af09
@property def pattern_id(self): '\n Gets the pattern_id of this LogAnalyticsWarning.\n The unique identifier of the warning pattern\n\n\n :return: The pattern_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._pattern_id
Gets the pattern_id of this LogAnalyticsWarning. The unique identifier of the warning pattern :return: The pattern_id of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
pattern_id
ezequielramos/oci-python-sdk
249
python
@property def pattern_id(self): '\n Gets the pattern_id of this LogAnalyticsWarning.\n The unique identifier of the warning pattern\n\n\n :return: The pattern_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._pattern_id
@property def pattern_id(self): '\n Gets the pattern_id of this LogAnalyticsWarning.\n The unique identifier of the warning pattern\n\n\n :return: The pattern_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._pattern_id<|docstring|>Gets the pattern_id of this LogAnalyticsWarning. The unique identifier of the warning pattern :return: The pattern_id of this LogAnalyticsWarning. :rtype: str<|endoftext|>
a12953f5b94307ff89d424cc9d45a7881cfc155b094e203ef109b6d054cd38ba
@pattern_id.setter def pattern_id(self, pattern_id): '\n Sets the pattern_id of this LogAnalyticsWarning.\n The unique identifier of the warning pattern\n\n\n :param pattern_id: The pattern_id of this LogAnalyticsWarning.\n :type: str\n ' self._pattern_id = pattern_id
Sets the pattern_id of this LogAnalyticsWarning. The unique identifier of the warning pattern :param pattern_id: The pattern_id of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
pattern_id
ezequielramos/oci-python-sdk
249
python
@pattern_id.setter def pattern_id(self, pattern_id): '\n Sets the pattern_id of this LogAnalyticsWarning.\n The unique identifier of the warning pattern\n\n\n :param pattern_id: The pattern_id of this LogAnalyticsWarning.\n :type: str\n ' self._pattern_id = pattern_id
@pattern_id.setter def pattern_id(self, pattern_id): '\n Sets the pattern_id of this LogAnalyticsWarning.\n The unique identifier of the warning pattern\n\n\n :param pattern_id: The pattern_id of this LogAnalyticsWarning.\n :type: str\n ' self._pattern_id = pattern_id<|docstring|>Sets the pattern_id of this LogAnalyticsWarning. The unique identifier of the warning pattern :param pattern_id: The pattern_id of this LogAnalyticsWarning. :type: str<|endoftext|>
991b9798b456f9521880e6cfe9b3239048d3d69a98fbb1c07dab104d5c058642
@property def pattern_text(self): '\n Gets the pattern_text of this LogAnalyticsWarning.\n The text of the pattern used by the warning\n\n\n :return: The pattern_text of this LogAnalyticsWarning.\n :rtype: str\n ' return self._pattern_text
Gets the pattern_text of this LogAnalyticsWarning. The text of the pattern used by the warning :return: The pattern_text of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
pattern_text
ezequielramos/oci-python-sdk
249
python
@property def pattern_text(self): '\n Gets the pattern_text of this LogAnalyticsWarning.\n The text of the pattern used by the warning\n\n\n :return: The pattern_text of this LogAnalyticsWarning.\n :rtype: str\n ' return self._pattern_text
@property def pattern_text(self): '\n Gets the pattern_text of this LogAnalyticsWarning.\n The text of the pattern used by the warning\n\n\n :return: The pattern_text of this LogAnalyticsWarning.\n :rtype: str\n ' return self._pattern_text<|docstring|>Gets the pattern_text of this LogAnalyticsWarning. The text of the pattern used by the warning :return: The pattern_text of this LogAnalyticsWarning. :rtype: str<|endoftext|>
b3aa173042f5443f2214658bbd9586097b57bde5b165e092cfdac71441e8c6f4
@pattern_text.setter def pattern_text(self, pattern_text): '\n Sets the pattern_text of this LogAnalyticsWarning.\n The text of the pattern used by the warning\n\n\n :param pattern_text: The pattern_text of this LogAnalyticsWarning.\n :type: str\n ' self._pattern_text = pattern_text
Sets the pattern_text of this LogAnalyticsWarning. The text of the pattern used by the warning :param pattern_text: The pattern_text of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
pattern_text
ezequielramos/oci-python-sdk
249
python
@pattern_text.setter def pattern_text(self, pattern_text): '\n Sets the pattern_text of this LogAnalyticsWarning.\n The text of the pattern used by the warning\n\n\n :param pattern_text: The pattern_text of this LogAnalyticsWarning.\n :type: str\n ' self._pattern_text = pattern_text
@pattern_text.setter def pattern_text(self, pattern_text): '\n Sets the pattern_text of this LogAnalyticsWarning.\n The text of the pattern used by the warning\n\n\n :param pattern_text: The pattern_text of this LogAnalyticsWarning.\n :type: str\n ' self._pattern_text = pattern_text<|docstring|>Sets the pattern_text of this LogAnalyticsWarning. The text of the pattern used by the warning :param pattern_text: The pattern_text of this LogAnalyticsWarning. :type: str<|endoftext|>
822af900dab0db3a20eceaa3d45805770ad910208db8f0c901c3cc5953877f6c
@property def rule_id(self): '\n Gets the rule_id of this LogAnalyticsWarning.\n The unique identifier of the rule associated with the warning\n\n\n :return: The rule_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._rule_id
Gets the rule_id of this LogAnalyticsWarning. The unique identifier of the rule associated with the warning :return: The rule_id of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
rule_id
ezequielramos/oci-python-sdk
249
python
@property def rule_id(self): '\n Gets the rule_id of this LogAnalyticsWarning.\n The unique identifier of the rule associated with the warning\n\n\n :return: The rule_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._rule_id
@property def rule_id(self): '\n Gets the rule_id of this LogAnalyticsWarning.\n The unique identifier of the rule associated with the warning\n\n\n :return: The rule_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._rule_id<|docstring|>Gets the rule_id of this LogAnalyticsWarning. The unique identifier of the rule associated with the warning :return: The rule_id of this LogAnalyticsWarning. :rtype: str<|endoftext|>
65f158af3abdd9e5429a82d693adff04475c41a37121d1cd02f48ff86278c8e3
@rule_id.setter def rule_id(self, rule_id): '\n Sets the rule_id of this LogAnalyticsWarning.\n The unique identifier of the rule associated with the warning\n\n\n :param rule_id: The rule_id of this LogAnalyticsWarning.\n :type: str\n ' self._rule_id = rule_id
Sets the rule_id of this LogAnalyticsWarning. The unique identifier of the rule associated with the warning :param rule_id: The rule_id of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
rule_id
ezequielramos/oci-python-sdk
249
python
@rule_id.setter def rule_id(self, rule_id): '\n Sets the rule_id of this LogAnalyticsWarning.\n The unique identifier of the rule associated with the warning\n\n\n :param rule_id: The rule_id of this LogAnalyticsWarning.\n :type: str\n ' self._rule_id = rule_id
@rule_id.setter def rule_id(self, rule_id): '\n Sets the rule_id of this LogAnalyticsWarning.\n The unique identifier of the rule associated with the warning\n\n\n :param rule_id: The rule_id of this LogAnalyticsWarning.\n :type: str\n ' self._rule_id = rule_id<|docstring|>Sets the rule_id of this LogAnalyticsWarning. The unique identifier of the rule associated with the warning :param rule_id: The rule_id of this LogAnalyticsWarning. :type: str<|endoftext|>
092071f62ff2191f81caf8efe27adcefbd44f36dd839bec99ff326b331e88fbd
@property def source_id(self): '\n Gets the source_id of this LogAnalyticsWarning.\n The unique identifier of the source associated with the warning\n\n\n :return: The source_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._source_id
Gets the source_id of this LogAnalyticsWarning. The unique identifier of the source associated with the warning :return: The source_id of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
source_id
ezequielramos/oci-python-sdk
249
python
@property def source_id(self): '\n Gets the source_id of this LogAnalyticsWarning.\n The unique identifier of the source associated with the warning\n\n\n :return: The source_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._source_id
@property def source_id(self): '\n Gets the source_id of this LogAnalyticsWarning.\n The unique identifier of the source associated with the warning\n\n\n :return: The source_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._source_id<|docstring|>Gets the source_id of this LogAnalyticsWarning. The unique identifier of the source associated with the warning :return: The source_id of this LogAnalyticsWarning. :rtype: str<|endoftext|>
f30a0e3edba0159cc36840fa17db3ddce1150b4b2726358bc0c22b9708961b89
@source_id.setter def source_id(self, source_id): '\n Sets the source_id of this LogAnalyticsWarning.\n The unique identifier of the source associated with the warning\n\n\n :param source_id: The source_id of this LogAnalyticsWarning.\n :type: str\n ' self._source_id = source_id
Sets the source_id of this LogAnalyticsWarning. The unique identifier of the source associated with the warning :param source_id: The source_id of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
source_id
ezequielramos/oci-python-sdk
249
python
@source_id.setter def source_id(self, source_id): '\n Sets the source_id of this LogAnalyticsWarning.\n The unique identifier of the source associated with the warning\n\n\n :param source_id: The source_id of this LogAnalyticsWarning.\n :type: str\n ' self._source_id = source_id
@source_id.setter def source_id(self, source_id): '\n Sets the source_id of this LogAnalyticsWarning.\n The unique identifier of the source associated with the warning\n\n\n :param source_id: The source_id of this LogAnalyticsWarning.\n :type: str\n ' self._source_id = source_id<|docstring|>Sets the source_id of this LogAnalyticsWarning. The unique identifier of the source associated with the warning :param source_id: The source_id of this LogAnalyticsWarning. :type: str<|endoftext|>
9ca14982bbeb1375fea051f6fdc063e17b2038d1d0911e2e64cb1f59e4874d68
@property def suppressed_by(self): '\n Gets the suppressed_by of this LogAnalyticsWarning.\n The user who suppressed the warning, or empty if the warning is not suppressed\n\n\n :return: The suppressed_by of this LogAnalyticsWarning.\n :rtype: str\n ' return self._suppressed_by
Gets the suppressed_by of this LogAnalyticsWarning. The user who suppressed the warning, or empty if the warning is not suppressed :return: The suppressed_by of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
suppressed_by
ezequielramos/oci-python-sdk
249
python
@property def suppressed_by(self): '\n Gets the suppressed_by of this LogAnalyticsWarning.\n The user who suppressed the warning, or empty if the warning is not suppressed\n\n\n :return: The suppressed_by of this LogAnalyticsWarning.\n :rtype: str\n ' return self._suppressed_by
@property def suppressed_by(self): '\n Gets the suppressed_by of this LogAnalyticsWarning.\n The user who suppressed the warning, or empty if the warning is not suppressed\n\n\n :return: The suppressed_by of this LogAnalyticsWarning.\n :rtype: str\n ' return self._suppressed_by<|docstring|>Gets the suppressed_by of this LogAnalyticsWarning. The user who suppressed the warning, or empty if the warning is not suppressed :return: The suppressed_by of this LogAnalyticsWarning. :rtype: str<|endoftext|>
da9bd5002cf0198522e9922d73647462e5f536dd3c21330e183958994dda73f6
@suppressed_by.setter def suppressed_by(self, suppressed_by): '\n Sets the suppressed_by of this LogAnalyticsWarning.\n The user who suppressed the warning, or empty if the warning is not suppressed\n\n\n :param suppressed_by: The suppressed_by of this LogAnalyticsWarning.\n :type: str\n ' self._suppressed_by = suppressed_by
Sets the suppressed_by of this LogAnalyticsWarning. The user who suppressed the warning, or empty if the warning is not suppressed :param suppressed_by: The suppressed_by of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
suppressed_by
ezequielramos/oci-python-sdk
249
python
@suppressed_by.setter def suppressed_by(self, suppressed_by): '\n Sets the suppressed_by of this LogAnalyticsWarning.\n The user who suppressed the warning, or empty if the warning is not suppressed\n\n\n :param suppressed_by: The suppressed_by of this LogAnalyticsWarning.\n :type: str\n ' self._suppressed_by = suppressed_by
@suppressed_by.setter def suppressed_by(self, suppressed_by): '\n Sets the suppressed_by of this LogAnalyticsWarning.\n The user who suppressed the warning, or empty if the warning is not suppressed\n\n\n :param suppressed_by: The suppressed_by of this LogAnalyticsWarning.\n :type: str\n ' self._suppressed_by = suppressed_by<|docstring|>Sets the suppressed_by of this LogAnalyticsWarning. The user who suppressed the warning, or empty if the warning is not suppressed :param suppressed_by: The suppressed_by of this LogAnalyticsWarning. :type: str<|endoftext|>
ad8fe9ade70ecd0aa536de9f2a1d362344a796485960e8d6a1289c4376b49d16
@property def entity_id(self): '\n Gets the entity_id of this LogAnalyticsWarning.\n The unique identifier of the entity associated with the warning\n\n\n :return: The entity_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_id
Gets the entity_id of this LogAnalyticsWarning. The unique identifier of the entity associated with the warning :return: The entity_id of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
entity_id
ezequielramos/oci-python-sdk
249
python
@property def entity_id(self): '\n Gets the entity_id of this LogAnalyticsWarning.\n The unique identifier of the entity associated with the warning\n\n\n :return: The entity_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_id
@property def entity_id(self): '\n Gets the entity_id of this LogAnalyticsWarning.\n The unique identifier of the entity associated with the warning\n\n\n :return: The entity_id of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_id<|docstring|>Gets the entity_id of this LogAnalyticsWarning. The unique identifier of the entity associated with the warning :return: The entity_id of this LogAnalyticsWarning. :rtype: str<|endoftext|>
abc77b454dfb8a71d8f1fa212e55f6bad57df3a61704d9f46a0897f92abb0961
@entity_id.setter def entity_id(self, entity_id): '\n Sets the entity_id of this LogAnalyticsWarning.\n The unique identifier of the entity associated with the warning\n\n\n :param entity_id: The entity_id of this LogAnalyticsWarning.\n :type: str\n ' self._entity_id = entity_id
Sets the entity_id of this LogAnalyticsWarning. The unique identifier of the entity associated with the warning :param entity_id: The entity_id of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
entity_id
ezequielramos/oci-python-sdk
249
python
@entity_id.setter def entity_id(self, entity_id): '\n Sets the entity_id of this LogAnalyticsWarning.\n The unique identifier of the entity associated with the warning\n\n\n :param entity_id: The entity_id of this LogAnalyticsWarning.\n :type: str\n ' self._entity_id = entity_id
@entity_id.setter def entity_id(self, entity_id): '\n Sets the entity_id of this LogAnalyticsWarning.\n The unique identifier of the entity associated with the warning\n\n\n :param entity_id: The entity_id of this LogAnalyticsWarning.\n :type: str\n ' self._entity_id = entity_id<|docstring|>Sets the entity_id of this LogAnalyticsWarning. The unique identifier of the entity associated with the warning :param entity_id: The entity_id of this LogAnalyticsWarning. :type: str<|endoftext|>
aa9059880c4c63053dd4368956554f1d0af0238fc5e2a540c230597a9bac4db6
@property def entity_type(self): '\n Gets the entity_type of this LogAnalyticsWarning.\n The type of the entity associated with the warning\n\n\n :return: The entity_type of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_type
Gets the entity_type of this LogAnalyticsWarning. The type of the entity associated with the warning :return: The entity_type of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
entity_type
ezequielramos/oci-python-sdk
249
python
@property def entity_type(self): '\n Gets the entity_type of this LogAnalyticsWarning.\n The type of the entity associated with the warning\n\n\n :return: The entity_type of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_type
@property def entity_type(self): '\n Gets the entity_type of this LogAnalyticsWarning.\n The type of the entity associated with the warning\n\n\n :return: The entity_type of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_type<|docstring|>Gets the entity_type of this LogAnalyticsWarning. The type of the entity associated with the warning :return: The entity_type of this LogAnalyticsWarning. :rtype: str<|endoftext|>
a430b9210b676bfebbe7378336554541a7ab45163de33edfcb5dbc3c705a09af
@entity_type.setter def entity_type(self, entity_type): '\n Sets the entity_type of this LogAnalyticsWarning.\n The type of the entity associated with the warning\n\n\n :param entity_type: The entity_type of this LogAnalyticsWarning.\n :type: str\n ' self._entity_type = entity_type
Sets the entity_type of this LogAnalyticsWarning. The type of the entity associated with the warning :param entity_type: The entity_type of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
entity_type
ezequielramos/oci-python-sdk
249
python
@entity_type.setter def entity_type(self, entity_type): '\n Sets the entity_type of this LogAnalyticsWarning.\n The type of the entity associated with the warning\n\n\n :param entity_type: The entity_type of this LogAnalyticsWarning.\n :type: str\n ' self._entity_type = entity_type
@entity_type.setter def entity_type(self, entity_type): '\n Sets the entity_type of this LogAnalyticsWarning.\n The type of the entity associated with the warning\n\n\n :param entity_type: The entity_type of this LogAnalyticsWarning.\n :type: str\n ' self._entity_type = entity_type<|docstring|>Sets the entity_type of this LogAnalyticsWarning. The type of the entity associated with the warning :param entity_type: The entity_type of this LogAnalyticsWarning. :type: str<|endoftext|>
df9ea207b5d7d5954eaa9ce6eefe6a58f7fc9d7fd56d04e4566e944b6c32333d
@property def entity_type_display_name(self): '\n Gets the entity_type_display_name of this LogAnalyticsWarning.\n The display name of the entity type associated with the warning\n\n\n :return: The entity_type_display_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_type_display_name
Gets the entity_type_display_name of this LogAnalyticsWarning. The display name of the entity type associated with the warning :return: The entity_type_display_name of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
entity_type_display_name
ezequielramos/oci-python-sdk
249
python
@property def entity_type_display_name(self): '\n Gets the entity_type_display_name of this LogAnalyticsWarning.\n The display name of the entity type associated with the warning\n\n\n :return: The entity_type_display_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_type_display_name
@property def entity_type_display_name(self): '\n Gets the entity_type_display_name of this LogAnalyticsWarning.\n The display name of the entity type associated with the warning\n\n\n :return: The entity_type_display_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._entity_type_display_name<|docstring|>Gets the entity_type_display_name of this LogAnalyticsWarning. The display name of the entity type associated with the warning :return: The entity_type_display_name of this LogAnalyticsWarning. :rtype: str<|endoftext|>
f65a6f5cab238b8e0dc5acbe10743f2a45179edbb80ace8998f6c8f3fa62979d
@entity_type_display_name.setter def entity_type_display_name(self, entity_type_display_name): '\n Sets the entity_type_display_name of this LogAnalyticsWarning.\n The display name of the entity type associated with the warning\n\n\n :param entity_type_display_name: The entity_type_display_name of this LogAnalyticsWarning.\n :type: str\n ' self._entity_type_display_name = entity_type_display_name
Sets the entity_type_display_name of this LogAnalyticsWarning. The display name of the entity type associated with the warning :param entity_type_display_name: The entity_type_display_name of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
entity_type_display_name
ezequielramos/oci-python-sdk
249
python
@entity_type_display_name.setter def entity_type_display_name(self, entity_type_display_name): '\n Sets the entity_type_display_name of this LogAnalyticsWarning.\n The display name of the entity type associated with the warning\n\n\n :param entity_type_display_name: The entity_type_display_name of this LogAnalyticsWarning.\n :type: str\n ' self._entity_type_display_name = entity_type_display_name
@entity_type_display_name.setter def entity_type_display_name(self, entity_type_display_name): '\n Sets the entity_type_display_name of this LogAnalyticsWarning.\n The display name of the entity type associated with the warning\n\n\n :param entity_type_display_name: The entity_type_display_name of this LogAnalyticsWarning.\n :type: str\n ' self._entity_type_display_name = entity_type_display_name<|docstring|>Sets the entity_type_display_name of this LogAnalyticsWarning. The display name of the entity type associated with the warning :param entity_type_display_name: The entity_type_display_name of this LogAnalyticsWarning. :type: str<|endoftext|>
bd7d70e35bd71203da5bfb6a6d55e727542020def17ef04e8bbfc56d63eecc90
@property def type_display_name(self): '\n Gets the type_display_name of this LogAnalyticsWarning.\n The display name of the warning type\n\n\n :return: The type_display_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._type_display_name
Gets the type_display_name of this LogAnalyticsWarning. The display name of the warning type :return: The type_display_name of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
type_display_name
ezequielramos/oci-python-sdk
249
python
@property def type_display_name(self): '\n Gets the type_display_name of this LogAnalyticsWarning.\n The display name of the warning type\n\n\n :return: The type_display_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._type_display_name
@property def type_display_name(self): '\n Gets the type_display_name of this LogAnalyticsWarning.\n The display name of the warning type\n\n\n :return: The type_display_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._type_display_name<|docstring|>Gets the type_display_name of this LogAnalyticsWarning. The display name of the warning type :return: The type_display_name of this LogAnalyticsWarning. :rtype: str<|endoftext|>
5ad023d6b94e487077568630b845bee982c18126b3fc0b0b864c7a0efc70d672
@type_display_name.setter def type_display_name(self, type_display_name): '\n Sets the type_display_name of this LogAnalyticsWarning.\n The display name of the warning type\n\n\n :param type_display_name: The type_display_name of this LogAnalyticsWarning.\n :type: str\n ' self._type_display_name = type_display_name
Sets the type_display_name of this LogAnalyticsWarning. The display name of the warning type :param type_display_name: The type_display_name of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
type_display_name
ezequielramos/oci-python-sdk
249
python
@type_display_name.setter def type_display_name(self, type_display_name): '\n Sets the type_display_name of this LogAnalyticsWarning.\n The display name of the warning type\n\n\n :param type_display_name: The type_display_name of this LogAnalyticsWarning.\n :type: str\n ' self._type_display_name = type_display_name
@type_display_name.setter def type_display_name(self, type_display_name): '\n Sets the type_display_name of this LogAnalyticsWarning.\n The display name of the warning type\n\n\n :param type_display_name: The type_display_name of this LogAnalyticsWarning.\n :type: str\n ' self._type_display_name = type_display_name<|docstring|>Sets the type_display_name of this LogAnalyticsWarning. The display name of the warning type :param type_display_name: The type_display_name of this LogAnalyticsWarning. :type: str<|endoftext|>
611c6279595a0e5575cfe1ba210b6893f3168bcf6d00c3ccfab9530c8c9f16fa
@property def type_name(self): '\n Gets the type_name of this LogAnalyticsWarning.\n The internal name of the warning\n\n\n :return: The type_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._type_name
Gets the type_name of this LogAnalyticsWarning. The internal name of the warning :return: The type_name of this LogAnalyticsWarning. :rtype: str
src/oci/log_analytics/models/log_analytics_warning.py
type_name
ezequielramos/oci-python-sdk
249
python
@property def type_name(self): '\n Gets the type_name of this LogAnalyticsWarning.\n The internal name of the warning\n\n\n :return: The type_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._type_name
@property def type_name(self): '\n Gets the type_name of this LogAnalyticsWarning.\n The internal name of the warning\n\n\n :return: The type_name of this LogAnalyticsWarning.\n :rtype: str\n ' return self._type_name<|docstring|>Gets the type_name of this LogAnalyticsWarning. The internal name of the warning :return: The type_name of this LogAnalyticsWarning. :rtype: str<|endoftext|>
4ba6a03f7d935a77f738006f20db75a3ee873d8810a13a71c0791b62d38e83e9
@type_name.setter def type_name(self, type_name): '\n Sets the type_name of this LogAnalyticsWarning.\n The internal name of the warning\n\n\n :param type_name: The type_name of this LogAnalyticsWarning.\n :type: str\n ' self._type_name = type_name
Sets the type_name of this LogAnalyticsWarning. The internal name of the warning :param type_name: The type_name of this LogAnalyticsWarning. :type: str
src/oci/log_analytics/models/log_analytics_warning.py
type_name
ezequielramos/oci-python-sdk
249
python
@type_name.setter def type_name(self, type_name): '\n Sets the type_name of this LogAnalyticsWarning.\n The internal name of the warning\n\n\n :param type_name: The type_name of this LogAnalyticsWarning.\n :type: str\n ' self._type_name = type_name
@type_name.setter def type_name(self, type_name): '\n Sets the type_name of this LogAnalyticsWarning.\n The internal name of the warning\n\n\n :param type_name: The type_name of this LogAnalyticsWarning.\n :type: str\n ' self._type_name = type_name<|docstring|>Sets the type_name of this LogAnalyticsWarning. The internal name of the warning :param type_name: The type_name of this LogAnalyticsWarning. :type: str<|endoftext|>
fef39eadaac9e3a11a6b7687e17968f8b6ca5e5a6b92ab57c01d0375ec233599
@property def severity(self): '\n Gets the severity of this LogAnalyticsWarning.\n The warning severity\n\n\n :return: The severity of this LogAnalyticsWarning.\n :rtype: int\n ' return self._severity
Gets the severity of this LogAnalyticsWarning. The warning severity :return: The severity of this LogAnalyticsWarning. :rtype: int
src/oci/log_analytics/models/log_analytics_warning.py
severity
ezequielramos/oci-python-sdk
249
python
@property def severity(self): '\n Gets the severity of this LogAnalyticsWarning.\n The warning severity\n\n\n :return: The severity of this LogAnalyticsWarning.\n :rtype: int\n ' return self._severity
@property def severity(self): '\n Gets the severity of this LogAnalyticsWarning.\n The warning severity\n\n\n :return: The severity of this LogAnalyticsWarning.\n :rtype: int\n ' return self._severity<|docstring|>Gets the severity of this LogAnalyticsWarning. The warning severity :return: The severity of this LogAnalyticsWarning. :rtype: int<|endoftext|>
8683e1629d76b2994dcfebf4f11864824b8c1aac7fc363316e0f2e8d1eeda6e0
@severity.setter def severity(self, severity): '\n Sets the severity of this LogAnalyticsWarning.\n The warning severity\n\n\n :param severity: The severity of this LogAnalyticsWarning.\n :type: int\n ' self._severity = severity
Sets the severity of this LogAnalyticsWarning. The warning severity :param severity: The severity of this LogAnalyticsWarning. :type: int
src/oci/log_analytics/models/log_analytics_warning.py
severity
ezequielramos/oci-python-sdk
249
python
@severity.setter def severity(self, severity): '\n Sets the severity of this LogAnalyticsWarning.\n The warning severity\n\n\n :param severity: The severity of this LogAnalyticsWarning.\n :type: int\n ' self._severity = severity
@severity.setter def severity(self, severity): '\n Sets the severity of this LogAnalyticsWarning.\n The warning severity\n\n\n :param severity: The severity of this LogAnalyticsWarning.\n :type: int\n ' self._severity = severity<|docstring|>Sets the severity of this LogAnalyticsWarning. The warning severity :param severity: The severity of this LogAnalyticsWarning. :type: int<|endoftext|>
0dc0b17d49132dd699444e782d106279b2c7bd2bffb0834bfe8ee5cd6814858c
def register(linter: PyLinter) -> None: 'Register the checker.' linter.register_checker(HassConstructorFormatChecker(linter))
Register the checker.
pylint/plugins/hass_constructor.py
register
orcema/core
30,023
python
def register(linter: PyLinter) -> None: linter.register_checker(HassConstructorFormatChecker(linter))
def register(linter: PyLinter) -> None: linter.register_checker(HassConstructorFormatChecker(linter))<|docstring|>Register the checker.<|endoftext|>
1938c4662a2a8d355c0e65df4c4709eeb56a997c4587222f724f6722d9a2f6bd
def visit_functiondef(self, node: nodes.FunctionDef) -> None: 'Called when a FunctionDef node is visited.' if ((not node.is_method()) or (node.name != '__init__')): return args = node.args annotations = ((args.posonlyargs_annotations + args.annotations) + args.kwonlyargs_annotations)[1:] if (args.vararg is not None): annotations.append(args.varargannotation) if (args.kwarg is not None): annotations.append(args.kwargannotation) if ((not annotations) or (None in annotations)): return if ((not isinstance(node.returns, nodes.Const)) or (node.returns.value is not None)): self.add_message('hass-constructor-return', node=node)
Called when a FunctionDef node is visited.
pylint/plugins/hass_constructor.py
visit_functiondef
orcema/core
30,023
python
def visit_functiondef(self, node: nodes.FunctionDef) -> None: if ((not node.is_method()) or (node.name != '__init__')): return args = node.args annotations = ((args.posonlyargs_annotations + args.annotations) + args.kwonlyargs_annotations)[1:] if (args.vararg is not None): annotations.append(args.varargannotation) if (args.kwarg is not None): annotations.append(args.kwargannotation) if ((not annotations) or (None in annotations)): return if ((not isinstance(node.returns, nodes.Const)) or (node.returns.value is not None)): self.add_message('hass-constructor-return', node=node)
def visit_functiondef(self, node: nodes.FunctionDef) -> None: if ((not node.is_method()) or (node.name != '__init__')): return args = node.args annotations = ((args.posonlyargs_annotations + args.annotations) + args.kwonlyargs_annotations)[1:] if (args.vararg is not None): annotations.append(args.varargannotation) if (args.kwarg is not None): annotations.append(args.kwargannotation) if ((not annotations) or (None in annotations)): return if ((not isinstance(node.returns, nodes.Const)) or (node.returns.value is not None)): self.add_message('hass-constructor-return', node=node)<|docstring|>Called when a FunctionDef node is visited.<|endoftext|>
53b07ad8eabb960a34ddebfed0787ab87f826b67d5d96d32fa2284270f35ca18
def test_insert(server: Eve): 'Test to ensure records can be inserted into DynamoDB\n\n :param Eve server: Eve server\n :raises: AssertionError\n ' with server.app_context(): id_field = server.config['DOMAIN']['actor']['id_field'] assert server.data.insert('actor', [{id_field: '1', 'fname': 'Oprah'}])
Test to ensure records can be inserted into DynamoDB :param Eve server: Eve server :raises: AssertionError
test/test_insert.py
test_insert
jlane9/eve-dynamodb
0
python
def test_insert(server: Eve): 'Test to ensure records can be inserted into DynamoDB\n\n :param Eve server: Eve server\n :raises: AssertionError\n ' with server.app_context(): id_field = server.config['DOMAIN']['actor']['id_field'] assert server.data.insert('actor', [{id_field: '1', 'fname': 'Oprah'}])
def test_insert(server: Eve): 'Test to ensure records can be inserted into DynamoDB\n\n :param Eve server: Eve server\n :raises: AssertionError\n ' with server.app_context(): id_field = server.config['DOMAIN']['actor']['id_field'] assert server.data.insert('actor', [{id_field: '1', 'fname': 'Oprah'}])<|docstring|>Test to ensure records can be inserted into DynamoDB :param Eve server: Eve server :raises: AssertionError<|endoftext|>
c5370a725fbd78808dd2fe5e76944f771214c8284bf91f7312e13e8bad9b618a
def test_duplicate_insert(server: Eve): 'Test to ensure id must remain unique\n\n :param Eve server: Eve server\n :raises: AssertionError\n ' with server.app_context(): id_field = server.config['DOMAIN']['actor']['id_field'] server.data.insert('actor', [{id_field: '1', 'fname': 'Oprah'}]) server.data.insert('actor', [{id_field: '1', 'fname': 'Kanye'}])
Test to ensure id must remain unique :param Eve server: Eve server :raises: AssertionError
test/test_insert.py
test_duplicate_insert
jlane9/eve-dynamodb
0
python
def test_duplicate_insert(server: Eve): 'Test to ensure id must remain unique\n\n :param Eve server: Eve server\n :raises: AssertionError\n ' with server.app_context(): id_field = server.config['DOMAIN']['actor']['id_field'] server.data.insert('actor', [{id_field: '1', 'fname': 'Oprah'}]) server.data.insert('actor', [{id_field: '1', 'fname': 'Kanye'}])
def test_duplicate_insert(server: Eve): 'Test to ensure id must remain unique\n\n :param Eve server: Eve server\n :raises: AssertionError\n ' with server.app_context(): id_field = server.config['DOMAIN']['actor']['id_field'] server.data.insert('actor', [{id_field: '1', 'fname': 'Oprah'}]) server.data.insert('actor', [{id_field: '1', 'fname': 'Kanye'}])<|docstring|>Test to ensure id must remain unique :param Eve server: Eve server :raises: AssertionError<|endoftext|>
e1ff9581088552106023f21575d0661d949059d43f29b7b40dfd2b29e7e190ad
def get_performance_test_results(self, page_url: str=None, metric_names: str=None, start_date: str=None, end_date: str=None): '\n https://docs.saucelabs.com/dev/api/performance/#get-performance-test-results\n\n Retrieves the results of performance tests run by the requesting account and returns the\n metric values for those tests.\n :param page_url: Filter results to return only tests run on a specific URL\n :param metric_names: Provide a list of specific metric values to return\n :param start_date: Filter results based on tests run on or after this date\n :param end_date: Filter results based on tests run on or before this date\n :return:\n ' params = {key: value for (key, value) in locals().items() if ((value is not None) and (key != 'self'))} return self._valid(self._session.request('get', f'{self.__sub_host}/', params=params), Performance, 'items')
https://docs.saucelabs.com/dev/api/performance/#get-performance-test-results Retrieves the results of performance tests run by the requesting account and returns the metric values for those tests. :param page_url: Filter results to return only tests run on a specific URL :param metric_names: Provide a list of specific metric values to return :param start_date: Filter results based on tests run on or after this date :param end_date: Filter results based on tests run on or before this date :return:
saucelab_api_client/base_classes/performance_api.py
get_performance_test_results
Slamnlc/saucelab-api-client
0
python
def get_performance_test_results(self, page_url: str=None, metric_names: str=None, start_date: str=None, end_date: str=None): '\n https://docs.saucelabs.com/dev/api/performance/#get-performance-test-results\n\n Retrieves the results of performance tests run by the requesting account and returns the\n metric values for those tests.\n :param page_url: Filter results to return only tests run on a specific URL\n :param metric_names: Provide a list of specific metric values to return\n :param start_date: Filter results based on tests run on or after this date\n :param end_date: Filter results based on tests run on or before this date\n :return:\n ' params = {key: value for (key, value) in locals().items() if ((value is not None) and (key != 'self'))} return self._valid(self._session.request('get', f'{self.__sub_host}/', params=params), Performance, 'items')
def get_performance_test_results(self, page_url: str=None, metric_names: str=None, start_date: str=None, end_date: str=None): '\n https://docs.saucelabs.com/dev/api/performance/#get-performance-test-results\n\n Retrieves the results of performance tests run by the requesting account and returns the\n metric values for those tests.\n :param page_url: Filter results to return only tests run on a specific URL\n :param metric_names: Provide a list of specific metric values to return\n :param start_date: Filter results based on tests run on or after this date\n :param end_date: Filter results based on tests run on or before this date\n :return:\n ' params = {key: value for (key, value) in locals().items() if ((value is not None) and (key != 'self'))} return self._valid(self._session.request('get', f'{self.__sub_host}/', params=params), Performance, 'items')<|docstring|>https://docs.saucelabs.com/dev/api/performance/#get-performance-test-results Retrieves the results of performance tests run by the requesting account and returns the metric values for those tests. :param page_url: Filter results to return only tests run on a specific URL :param metric_names: Provide a list of specific metric values to return :param start_date: Filter results based on tests run on or after this date :param end_date: Filter results based on tests run on or before this date :return:<|endoftext|>
81f36f98172ff5c05d9b22a18c153665997e45466cc2a00351edde07e0a59011
def get_performance_test_results_for_test(self, job_id: str, full: bool=True): '\n https://docs.saucelabs.com/performance/one-page/#get-performance-results-for-a-specific-test\n\n Retrieves the results of a specific performance test run by the requesting account\n :param job_id: The unique identifier of the requested test results\n :param full: Set to false to return only basic job data, excluding metric values. Defaults to true\n :return:\n ' params = {'full': full} return self._valid(self._session.request('get', f'{self.__sub_host}/{job_id}', params=params), PerformanceJob)
https://docs.saucelabs.com/performance/one-page/#get-performance-results-for-a-specific-test Retrieves the results of a specific performance test run by the requesting account :param job_id: The unique identifier of the requested test results :param full: Set to false to return only basic job data, excluding metric values. Defaults to true :return:
saucelab_api_client/base_classes/performance_api.py
get_performance_test_results_for_test
Slamnlc/saucelab-api-client
0
python
def get_performance_test_results_for_test(self, job_id: str, full: bool=True): '\n https://docs.saucelabs.com/performance/one-page/#get-performance-results-for-a-specific-test\n\n Retrieves the results of a specific performance test run by the requesting account\n :param job_id: The unique identifier of the requested test results\n :param full: Set to false to return only basic job data, excluding metric values. Defaults to true\n :return:\n ' params = {'full': full} return self._valid(self._session.request('get', f'{self.__sub_host}/{job_id}', params=params), PerformanceJob)
def get_performance_test_results_for_test(self, job_id: str, full: bool=True): '\n https://docs.saucelabs.com/performance/one-page/#get-performance-results-for-a-specific-test\n\n Retrieves the results of a specific performance test run by the requesting account\n :param job_id: The unique identifier of the requested test results\n :param full: Set to false to return only basic job data, excluding metric values. Defaults to true\n :return:\n ' params = {'full': full} return self._valid(self._session.request('get', f'{self.__sub_host}/{job_id}', params=params), PerformanceJob)<|docstring|>https://docs.saucelabs.com/performance/one-page/#get-performance-results-for-a-specific-test Retrieves the results of a specific performance test run by the requesting account :param job_id: The unique identifier of the requested test results :param full: Set to false to return only basic job data, excluding metric values. Defaults to true :return:<|endoftext|>
8d28d7602be19d3f5d722265ec6bc474bea7c63c9be56b5ff4489908f1067e31
def main(): ' MAIN! ' SW1 = NetworkDevice('172.16.63.100', 'cisco', 'cisco') R1 = NetworkDevice('172.16.63.101', 'cisco', 'cisco') R2 = NetworkDevice('172.16.63.102', 'cisco', 'cisco') network_devices = [SW1, R1, R2] for device in network_devices: print() device.get_device_details() for (key, value) in device.device_details.items(): print(('%15s : %-30s' % (key, value))) pickle.dump(network_devices, open('foo.pickle', 'wb')) print('\n') print('Reading objects from pickle files:') with open('foo.pickle', 'rb') as f: netdev_objects = pickle.load(f) for netdev_obj in netdev_objects: print((50 * '-')) for (key, value) in netdev_obj.device_details.items(): print(('%15s : %-30s' % (key, value))) print((50 * '-'))
MAIN!
week10/NetworkDevice.py
main
gerards/pynet_learning_python
0
python
def main(): ' ' SW1 = NetworkDevice('172.16.63.100', 'cisco', 'cisco') R1 = NetworkDevice('172.16.63.101', 'cisco', 'cisco') R2 = NetworkDevice('172.16.63.102', 'cisco', 'cisco') network_devices = [SW1, R1, R2] for device in network_devices: print() device.get_device_details() for (key, value) in device.device_details.items(): print(('%15s : %-30s' % (key, value))) pickle.dump(network_devices, open('foo.pickle', 'wb')) print('\n') print('Reading objects from pickle files:') with open('foo.pickle', 'rb') as f: netdev_objects = pickle.load(f) for netdev_obj in netdev_objects: print((50 * '-')) for (key, value) in netdev_obj.device_details.items(): print(('%15s : %-30s' % (key, value))) print((50 * '-'))
def main(): ' ' SW1 = NetworkDevice('172.16.63.100', 'cisco', 'cisco') R1 = NetworkDevice('172.16.63.101', 'cisco', 'cisco') R2 = NetworkDevice('172.16.63.102', 'cisco', 'cisco') network_devices = [SW1, R1, R2] for device in network_devices: print() device.get_device_details() for (key, value) in device.device_details.items(): print(('%15s : %-30s' % (key, value))) pickle.dump(network_devices, open('foo.pickle', 'wb')) print('\n') print('Reading objects from pickle files:') with open('foo.pickle', 'rb') as f: netdev_objects = pickle.load(f) for netdev_obj in netdev_objects: print((50 * '-')) for (key, value) in netdev_obj.device_details.items(): print(('%15s : %-30s' % (key, value))) print((50 * '-'))<|docstring|>MAIN!<|endoftext|>
2e8ba65e952fc708c4cd6a96dddb8d0528add04ed0804c896b7a1d5b2e71d10b
def _connect(self, disable_paging=True): ' Setup connect to network device ' remote_conn_pre = paramiko.SSHClient() remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) remote_conn_pre.connect(self.ipaddr, username=self.username, password=self.password, look_for_keys=False, allow_agent=False) remote_conn = remote_conn_pre.invoke_shell() if disable_paging: remote_conn.send('terminal length 0\n') time.sleep(1) remote_conn.recv(1000) return remote_conn
Setup connect to network device
week10/NetworkDevice.py
_connect
gerards/pynet_learning_python
0
python
def _connect(self, disable_paging=True): ' ' remote_conn_pre = paramiko.SSHClient() remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) remote_conn_pre.connect(self.ipaddr, username=self.username, password=self.password, look_for_keys=False, allow_agent=False) remote_conn = remote_conn_pre.invoke_shell() if disable_paging: remote_conn.send('terminal length 0\n') time.sleep(1) remote_conn.recv(1000) return remote_conn
def _connect(self, disable_paging=True): ' ' remote_conn_pre = paramiko.SSHClient() remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) remote_conn_pre.connect(self.ipaddr, username=self.username, password=self.password, look_for_keys=False, allow_agent=False) remote_conn = remote_conn_pre.invoke_shell() if disable_paging: remote_conn.send('terminal length 0\n') time.sleep(1) remote_conn.recv(1000) return remote_conn<|docstring|>Setup connect to network device<|endoftext|>
1f1eab6d0532b9a6557acfeb0738e143611e4bfdce68d1f43a6e4970992001e1
def send_command(self, command): ' Send command to network device and return output ' remote_conn = self._connect() remote_conn.send((('\n' + command) + '\n')) time.sleep(2) output = remote_conn.recv(65535) output = output.decode() remote_conn.close() return output
Send command to network device and return output
week10/NetworkDevice.py
send_command
gerards/pynet_learning_python
0
python
def send_command(self, command): ' ' remote_conn = self._connect() remote_conn.send((('\n' + command) + '\n')) time.sleep(2) output = remote_conn.recv(65535) output = output.decode() remote_conn.close() return output
def send_command(self, command): ' ' remote_conn = self._connect() remote_conn.send((('\n' + command) + '\n')) time.sleep(2) output = remote_conn.recv(65535) output = output.decode() remote_conn.close() return output<|docstring|>Send command to network device and return output<|endoftext|>
cb5d72a2ef8893356949a88d182d24e3f2124447315d47ceb08038b0bc5ee9f3
def get_device_details(self): ' Return device details pulled from show version ' show_version = self.send_command('show version') show_version_lines = show_version.split('\n') for line in show_version_lines: if ('Cisco IOS Software' in line): vendor_line = line.split(', ') self.device_details['vendor'] = vendor_line[0].split()[0] self.device_details['model'] = vendor_line[1].split()[0] self.device_details['os_version'] = vendor_line[2].split()[1] if (' uptime is ' in line): (self.device_details['hostname'], uptime_line) = line.split(' uptime is ') uptime_line = uptime_line.split(', ') seconds_per_minute = 60 seconds_per_hour = (60 * seconds_per_minute) seconds_per_day = (24 * seconds_per_hour) seconds_per_week = (7 * seconds_per_day) seconds_per_year = (52 * seconds_per_week) for time_period in uptime_line: if ('year' in time_period): self.device_details['uptime'] = (seconds_per_year * int(time_period.split()[0])) if ('week' in time_period): self.device_details['uptime'] += (seconds_per_week * int(time_period.split()[0])) if ('day' in time_period): self.device_details['uptime'] += (seconds_per_day * int(time_period.split()[0])) if ('hour' in time_period): self.device_details['uptime'] += (seconds_per_hour * int(time_period.split()[0])) if ('minute' in time_period): self.device_details['uptime'] += (seconds_per_minute * int(time_period.split()[0])) if ('Processor board ID ' in line): serial_line = line.split('Processor board ID ') self.device_details['serial_number'] = serial_line[1].strip() return self.device_details
Return device details pulled from show version
week10/NetworkDevice.py
get_device_details
gerards/pynet_learning_python
0
python
def get_device_details(self): ' ' show_version = self.send_command('show version') show_version_lines = show_version.split('\n') for line in show_version_lines: if ('Cisco IOS Software' in line): vendor_line = line.split(', ') self.device_details['vendor'] = vendor_line[0].split()[0] self.device_details['model'] = vendor_line[1].split()[0] self.device_details['os_version'] = vendor_line[2].split()[1] if (' uptime is ' in line): (self.device_details['hostname'], uptime_line) = line.split(' uptime is ') uptime_line = uptime_line.split(', ') seconds_per_minute = 60 seconds_per_hour = (60 * seconds_per_minute) seconds_per_day = (24 * seconds_per_hour) seconds_per_week = (7 * seconds_per_day) seconds_per_year = (52 * seconds_per_week) for time_period in uptime_line: if ('year' in time_period): self.device_details['uptime'] = (seconds_per_year * int(time_period.split()[0])) if ('week' in time_period): self.device_details['uptime'] += (seconds_per_week * int(time_period.split()[0])) if ('day' in time_period): self.device_details['uptime'] += (seconds_per_day * int(time_period.split()[0])) if ('hour' in time_period): self.device_details['uptime'] += (seconds_per_hour * int(time_period.split()[0])) if ('minute' in time_period): self.device_details['uptime'] += (seconds_per_minute * int(time_period.split()[0])) if ('Processor board ID ' in line): serial_line = line.split('Processor board ID ') self.device_details['serial_number'] = serial_line[1].strip() return self.device_details
def get_device_details(self): ' ' show_version = self.send_command('show version') show_version_lines = show_version.split('\n') for line in show_version_lines: if ('Cisco IOS Software' in line): vendor_line = line.split(', ') self.device_details['vendor'] = vendor_line[0].split()[0] self.device_details['model'] = vendor_line[1].split()[0] self.device_details['os_version'] = vendor_line[2].split()[1] if (' uptime is ' in line): (self.device_details['hostname'], uptime_line) = line.split(' uptime is ') uptime_line = uptime_line.split(', ') seconds_per_minute = 60 seconds_per_hour = (60 * seconds_per_minute) seconds_per_day = (24 * seconds_per_hour) seconds_per_week = (7 * seconds_per_day) seconds_per_year = (52 * seconds_per_week) for time_period in uptime_line: if ('year' in time_period): self.device_details['uptime'] = (seconds_per_year * int(time_period.split()[0])) if ('week' in time_period): self.device_details['uptime'] += (seconds_per_week * int(time_period.split()[0])) if ('day' in time_period): self.device_details['uptime'] += (seconds_per_day * int(time_period.split()[0])) if ('hour' in time_period): self.device_details['uptime'] += (seconds_per_hour * int(time_period.split()[0])) if ('minute' in time_period): self.device_details['uptime'] += (seconds_per_minute * int(time_period.split()[0])) if ('Processor board ID ' in line): serial_line = line.split('Processor board ID ') self.device_details['serial_number'] = serial_line[1].strip() return self.device_details<|docstring|>Return device details pulled from show version<|endoftext|>
92f2aaab1106c6f2ede4d7b6e3fd5dd15939afe98150a46b4884ba72012372ba
def __init__(self, phantom_model: str, phantom_dim: PhantomDimensions, human_mesh: Optional[str]=None): 'Create the phantom of choice.\n\n Parameters\n ----------\n phantom_model : str\n Type of phantom to create. Valid selections are \'plane\',\n \'cylinder\', \'human\', "table" an "pad".\n phantom_dim : PhantomDimensions\n instance of class PhantomDimensions containing dimensions for\n all phantoms models except human phantoms: Length, width, radius,\n thickness etc.\n human_mesh : str, optional\n Choose which human mesh phantom to use. Valid selection are names\n of the *.stl-files in the phantom_data folder (The default is none.\n\n Raises\n ------\n ValueError\n Raises value error if unsupported phantom type are selected,\n or if phantom_model=\'human\' selected, without specifying\n human_mesh\n\n ' self.phantom_model = phantom_model.lower() if (self.phantom_model not in VALID_PHANTOM_MODELS): raise ValueError(f"Unknown phantom model selected. Valid type:{'.'.join(VALID_PHANTOM_MODELS)}") self.r_ref: np.array self.table_length = phantom_dim.table_length if (phantom_model == 'plane'): if (phantom_dim.plane_resolution.lower() == 'dense'): res_length = res_width = 2.0 elif (phantom_dim.plane_resolution.lower() == 'sparse'): res_length = res_width = 1.0 x = np.linspace(((- phantom_dim.plane_width) / 2), ((+ phantom_dim.plane_width) / 2), ((res_width * phantom_dim.plane_width) + 1)) y = np.linspace(0, phantom_dim.plane_length, (res_length * phantom_dim.plane_length)) (x_plane, y_plane) = np.meshgrid(x, y) t = phantom_dim.plane_width i2: List[int] = [] i1 = j1 = k1 = i2 for i in range((len(x) - 1)): for j in range((len(y) - 1)): i1 = (i1 + [((j * len(x)) + i)]) j1 = (j1 + [(((j * len(x)) + i) + 1)]) k1 = (k1 + [(((j * len(x)) + i) + len(x))]) i2 = (i2 + [((((j * len(x)) + i) + len(x)) + 1)]) self.r = np.column_stack((x_plane.ravel(), y_plane.ravel(), np.zeros(len(x_plane.ravel())))) self.ijk = np.column_stack(((i1 + i2), (j1 + k1), (k1 + j1))) self.dose = np.zeros(len(self.r)) elif (phantom_model == 'cylinder'): if (phantom_dim.cylinder_resolution.lower() == 'dense'): res_length = 4 res_width = 0.05 elif (phantom_dim.cylinder_resolution.lower() == 'sparse'): res_length = 1.0 res_width = 0.1 t = np.arange((0 * np.pi), (2 * np.pi), res_width) x = (phantom_dim.cylinder_radii_a * np.cos(t)).tolist() z = (phantom_dim.cylinder_radii_b * np.sin(t)).tolist() nx = (np.cos(t) / np.sqrt(np.square((np.cos(t) + (4 * np.square(np.sin(t))))))) ny = np.zeros(len(t)) nz = ((2 * np.sin(t)) / np.sqrt(np.square((np.cos(t) + (4 * np.square(np.sin(t))))))) nx = nx.tolist() ny = ny.tolist() nz = nz.tolist() n = [[nx[ind], ny[ind], nz[ind]] for ind in range(len(t))] output: Dict = dict(n=[], x=[], y=[], z=[]) for index in range(0, (int(res_length) * (phantom_dim.cylinder_length + 2)), 1): output['x'] = (output['x'] + x) output['y'] = (output['y'] + ([((1 / res_length) * index)] * len(x))) output['z'] = (output['z'] + z) output['n'] = (output['n'] + n) i1 = list(range(0, (len(output['x']) - len(t)))) j1 = list(range(1, ((len(output['x']) - len(t)) + 1))) k1 = list(range(len(t), len(output['x']))) i2 = list(range(0, (len(output['x']) - len(t)))) k2 = list(range((len(t) - 1), (len(output['x']) - 1))) j2 = list(range(len(t), len(output['x']))) self.r = np.column_stack((output['x'], output['y'], output['z'])) self.ijk = np.column_stack(((i1 + i2), (j1 + j2), (k1 + k2))) self.dose = np.zeros(len(self.r)) self.n = np.asarray(output['n']) elif (phantom_model == 'human'): if (human_mesh is None): raise ValueError('Human model needs to be specified forphantom_model = "human"') phantom_path = os.path.join(os.path.dirname(__file__), 'phantom_data', f'{human_mesh}.stl') phantom_mesh = mesh.Mesh.from_file(phantom_path) r = phantom_mesh.vectors n = phantom_mesh.normals self.r = np.asarray([el for el_list in r for el in el_list]) self.n = np.asarray([x for pair in zip(n, n, n) for x in pair]) self.ijk = np.column_stack((np.arange(0, (len(self.r) - 3), 3), np.arange(1, (len(self.r) - 2), 3), np.arange(2, (len(self.r) - 1), 3))) self.dose = np.zeros(len(self.r)) elif (phantom_model == 'table'): x_tab = [(index * phantom_dim.table_width) for index in [0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5, 0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5]] y_tab = [(index * phantom_dim.table_length) for index in [1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0, 1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0]] z_tab = [(index * phantom_dim.table_thickness) for index in [0, 0, 0, 0, 0, 0, 0, 0, (- 1), (- 1), (- 1), (- 1), (- 1), (- 1), (- 1), (- 1)]] i_tab = [0, 0, 1, 1, 8, 8, 9, 9, 0, 7, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7] j_tab = [5, 6, 2, 3, 13, 14, 10, 11, 7, 15, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15] k_tab = [6, 7, 3, 4, 14, 15, 11, 12, 8, 8, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14] self.r = np.column_stack((x_tab, y_tab, z_tab)) self.ijk = np.column_stack((i_tab, j_tab, k_tab)) elif (phantom_model == 'pad'): x_pad = [(index * phantom_dim.pad_width) for index in [0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5, 0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5]] y_pad = [(index * phantom_dim.pad_length) for index in [1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0, 1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0]] z_pad = [(index * phantom_dim.pad_thickness) for index in [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]] i_pad = [0, 0, 1, 1, 8, 8, 9, 9, 0, 7, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7] j_pad = [5, 6, 2, 3, 13, 14, 10, 11, 7, 15, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15] k_pad = [6, 7, 3, 4, 14, 15, 11, 12, 8, 8, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14] self.r = np.column_stack((x_pad, y_pad, z_pad)) self.ijk = np.column_stack((i_pad, j_pad, k_pad))
Create the phantom of choice. Parameters ---------- phantom_model : str Type of phantom to create. Valid selections are 'plane', 'cylinder', 'human', "table" an "pad". phantom_dim : PhantomDimensions instance of class PhantomDimensions containing dimensions for all phantoms models except human phantoms: Length, width, radius, thickness etc. human_mesh : str, optional Choose which human mesh phantom to use. Valid selection are names of the *.stl-files in the phantom_data folder (The default is none. Raises ------ ValueError Raises value error if unsupported phantom type are selected, or if phantom_model='human' selected, without specifying human_mesh
src/pyskindose/phantom_class.py
__init__
notZaki/PySkinDose
0
python
def __init__(self, phantom_model: str, phantom_dim: PhantomDimensions, human_mesh: Optional[str]=None): 'Create the phantom of choice.\n\n Parameters\n ----------\n phantom_model : str\n Type of phantom to create. Valid selections are \'plane\',\n \'cylinder\', \'human\', "table" an "pad".\n phantom_dim : PhantomDimensions\n instance of class PhantomDimensions containing dimensions for\n all phantoms models except human phantoms: Length, width, radius,\n thickness etc.\n human_mesh : str, optional\n Choose which human mesh phantom to use. Valid selection are names\n of the *.stl-files in the phantom_data folder (The default is none.\n\n Raises\n ------\n ValueError\n Raises value error if unsupported phantom type are selected,\n or if phantom_model=\'human\' selected, without specifying\n human_mesh\n\n ' self.phantom_model = phantom_model.lower() if (self.phantom_model not in VALID_PHANTOM_MODELS): raise ValueError(f"Unknown phantom model selected. Valid type:{'.'.join(VALID_PHANTOM_MODELS)}") self.r_ref: np.array self.table_length = phantom_dim.table_length if (phantom_model == 'plane'): if (phantom_dim.plane_resolution.lower() == 'dense'): res_length = res_width = 2.0 elif (phantom_dim.plane_resolution.lower() == 'sparse'): res_length = res_width = 1.0 x = np.linspace(((- phantom_dim.plane_width) / 2), ((+ phantom_dim.plane_width) / 2), ((res_width * phantom_dim.plane_width) + 1)) y = np.linspace(0, phantom_dim.plane_length, (res_length * phantom_dim.plane_length)) (x_plane, y_plane) = np.meshgrid(x, y) t = phantom_dim.plane_width i2: List[int] = [] i1 = j1 = k1 = i2 for i in range((len(x) - 1)): for j in range((len(y) - 1)): i1 = (i1 + [((j * len(x)) + i)]) j1 = (j1 + [(((j * len(x)) + i) + 1)]) k1 = (k1 + [(((j * len(x)) + i) + len(x))]) i2 = (i2 + [((((j * len(x)) + i) + len(x)) + 1)]) self.r = np.column_stack((x_plane.ravel(), y_plane.ravel(), np.zeros(len(x_plane.ravel())))) self.ijk = np.column_stack(((i1 + i2), (j1 + k1), (k1 + j1))) self.dose = np.zeros(len(self.r)) elif (phantom_model == 'cylinder'): if (phantom_dim.cylinder_resolution.lower() == 'dense'): res_length = 4 res_width = 0.05 elif (phantom_dim.cylinder_resolution.lower() == 'sparse'): res_length = 1.0 res_width = 0.1 t = np.arange((0 * np.pi), (2 * np.pi), res_width) x = (phantom_dim.cylinder_radii_a * np.cos(t)).tolist() z = (phantom_dim.cylinder_radii_b * np.sin(t)).tolist() nx = (np.cos(t) / np.sqrt(np.square((np.cos(t) + (4 * np.square(np.sin(t))))))) ny = np.zeros(len(t)) nz = ((2 * np.sin(t)) / np.sqrt(np.square((np.cos(t) + (4 * np.square(np.sin(t))))))) nx = nx.tolist() ny = ny.tolist() nz = nz.tolist() n = [[nx[ind], ny[ind], nz[ind]] for ind in range(len(t))] output: Dict = dict(n=[], x=[], y=[], z=[]) for index in range(0, (int(res_length) * (phantom_dim.cylinder_length + 2)), 1): output['x'] = (output['x'] + x) output['y'] = (output['y'] + ([((1 / res_length) * index)] * len(x))) output['z'] = (output['z'] + z) output['n'] = (output['n'] + n) i1 = list(range(0, (len(output['x']) - len(t)))) j1 = list(range(1, ((len(output['x']) - len(t)) + 1))) k1 = list(range(len(t), len(output['x']))) i2 = list(range(0, (len(output['x']) - len(t)))) k2 = list(range((len(t) - 1), (len(output['x']) - 1))) j2 = list(range(len(t), len(output['x']))) self.r = np.column_stack((output['x'], output['y'], output['z'])) self.ijk = np.column_stack(((i1 + i2), (j1 + j2), (k1 + k2))) self.dose = np.zeros(len(self.r)) self.n = np.asarray(output['n']) elif (phantom_model == 'human'): if (human_mesh is None): raise ValueError('Human model needs to be specified forphantom_model = "human"') phantom_path = os.path.join(os.path.dirname(__file__), 'phantom_data', f'{human_mesh}.stl') phantom_mesh = mesh.Mesh.from_file(phantom_path) r = phantom_mesh.vectors n = phantom_mesh.normals self.r = np.asarray([el for el_list in r for el in el_list]) self.n = np.asarray([x for pair in zip(n, n, n) for x in pair]) self.ijk = np.column_stack((np.arange(0, (len(self.r) - 3), 3), np.arange(1, (len(self.r) - 2), 3), np.arange(2, (len(self.r) - 1), 3))) self.dose = np.zeros(len(self.r)) elif (phantom_model == 'table'): x_tab = [(index * phantom_dim.table_width) for index in [0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5, 0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5]] y_tab = [(index * phantom_dim.table_length) for index in [1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0, 1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0]] z_tab = [(index * phantom_dim.table_thickness) for index in [0, 0, 0, 0, 0, 0, 0, 0, (- 1), (- 1), (- 1), (- 1), (- 1), (- 1), (- 1), (- 1)]] i_tab = [0, 0, 1, 1, 8, 8, 9, 9, 0, 7, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7] j_tab = [5, 6, 2, 3, 13, 14, 10, 11, 7, 15, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15] k_tab = [6, 7, 3, 4, 14, 15, 11, 12, 8, 8, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14] self.r = np.column_stack((x_tab, y_tab, z_tab)) self.ijk = np.column_stack((i_tab, j_tab, k_tab)) elif (phantom_model == 'pad'): x_pad = [(index * phantom_dim.pad_width) for index in [0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5, 0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5]] y_pad = [(index * phantom_dim.pad_length) for index in [1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0, 1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0]] z_pad = [(index * phantom_dim.pad_thickness) for index in [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]] i_pad = [0, 0, 1, 1, 8, 8, 9, 9, 0, 7, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7] j_pad = [5, 6, 2, 3, 13, 14, 10, 11, 7, 15, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15] k_pad = [6, 7, 3, 4, 14, 15, 11, 12, 8, 8, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14] self.r = np.column_stack((x_pad, y_pad, z_pad)) self.ijk = np.column_stack((i_pad, j_pad, k_pad))
def __init__(self, phantom_model: str, phantom_dim: PhantomDimensions, human_mesh: Optional[str]=None): 'Create the phantom of choice.\n\n Parameters\n ----------\n phantom_model : str\n Type of phantom to create. Valid selections are \'plane\',\n \'cylinder\', \'human\', "table" an "pad".\n phantom_dim : PhantomDimensions\n instance of class PhantomDimensions containing dimensions for\n all phantoms models except human phantoms: Length, width, radius,\n thickness etc.\n human_mesh : str, optional\n Choose which human mesh phantom to use. Valid selection are names\n of the *.stl-files in the phantom_data folder (The default is none.\n\n Raises\n ------\n ValueError\n Raises value error if unsupported phantom type are selected,\n or if phantom_model=\'human\' selected, without specifying\n human_mesh\n\n ' self.phantom_model = phantom_model.lower() if (self.phantom_model not in VALID_PHANTOM_MODELS): raise ValueError(f"Unknown phantom model selected. Valid type:{'.'.join(VALID_PHANTOM_MODELS)}") self.r_ref: np.array self.table_length = phantom_dim.table_length if (phantom_model == 'plane'): if (phantom_dim.plane_resolution.lower() == 'dense'): res_length = res_width = 2.0 elif (phantom_dim.plane_resolution.lower() == 'sparse'): res_length = res_width = 1.0 x = np.linspace(((- phantom_dim.plane_width) / 2), ((+ phantom_dim.plane_width) / 2), ((res_width * phantom_dim.plane_width) + 1)) y = np.linspace(0, phantom_dim.plane_length, (res_length * phantom_dim.plane_length)) (x_plane, y_plane) = np.meshgrid(x, y) t = phantom_dim.plane_width i2: List[int] = [] i1 = j1 = k1 = i2 for i in range((len(x) - 1)): for j in range((len(y) - 1)): i1 = (i1 + [((j * len(x)) + i)]) j1 = (j1 + [(((j * len(x)) + i) + 1)]) k1 = (k1 + [(((j * len(x)) + i) + len(x))]) i2 = (i2 + [((((j * len(x)) + i) + len(x)) + 1)]) self.r = np.column_stack((x_plane.ravel(), y_plane.ravel(), np.zeros(len(x_plane.ravel())))) self.ijk = np.column_stack(((i1 + i2), (j1 + k1), (k1 + j1))) self.dose = np.zeros(len(self.r)) elif (phantom_model == 'cylinder'): if (phantom_dim.cylinder_resolution.lower() == 'dense'): res_length = 4 res_width = 0.05 elif (phantom_dim.cylinder_resolution.lower() == 'sparse'): res_length = 1.0 res_width = 0.1 t = np.arange((0 * np.pi), (2 * np.pi), res_width) x = (phantom_dim.cylinder_radii_a * np.cos(t)).tolist() z = (phantom_dim.cylinder_radii_b * np.sin(t)).tolist() nx = (np.cos(t) / np.sqrt(np.square((np.cos(t) + (4 * np.square(np.sin(t))))))) ny = np.zeros(len(t)) nz = ((2 * np.sin(t)) / np.sqrt(np.square((np.cos(t) + (4 * np.square(np.sin(t))))))) nx = nx.tolist() ny = ny.tolist() nz = nz.tolist() n = [[nx[ind], ny[ind], nz[ind]] for ind in range(len(t))] output: Dict = dict(n=[], x=[], y=[], z=[]) for index in range(0, (int(res_length) * (phantom_dim.cylinder_length + 2)), 1): output['x'] = (output['x'] + x) output['y'] = (output['y'] + ([((1 / res_length) * index)] * len(x))) output['z'] = (output['z'] + z) output['n'] = (output['n'] + n) i1 = list(range(0, (len(output['x']) - len(t)))) j1 = list(range(1, ((len(output['x']) - len(t)) + 1))) k1 = list(range(len(t), len(output['x']))) i2 = list(range(0, (len(output['x']) - len(t)))) k2 = list(range((len(t) - 1), (len(output['x']) - 1))) j2 = list(range(len(t), len(output['x']))) self.r = np.column_stack((output['x'], output['y'], output['z'])) self.ijk = np.column_stack(((i1 + i2), (j1 + j2), (k1 + k2))) self.dose = np.zeros(len(self.r)) self.n = np.asarray(output['n']) elif (phantom_model == 'human'): if (human_mesh is None): raise ValueError('Human model needs to be specified forphantom_model = "human"') phantom_path = os.path.join(os.path.dirname(__file__), 'phantom_data', f'{human_mesh}.stl') phantom_mesh = mesh.Mesh.from_file(phantom_path) r = phantom_mesh.vectors n = phantom_mesh.normals self.r = np.asarray([el for el_list in r for el in el_list]) self.n = np.asarray([x for pair in zip(n, n, n) for x in pair]) self.ijk = np.column_stack((np.arange(0, (len(self.r) - 3), 3), np.arange(1, (len(self.r) - 2), 3), np.arange(2, (len(self.r) - 1), 3))) self.dose = np.zeros(len(self.r)) elif (phantom_model == 'table'): x_tab = [(index * phantom_dim.table_width) for index in [0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5, 0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5]] y_tab = [(index * phantom_dim.table_length) for index in [1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0, 1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0]] z_tab = [(index * phantom_dim.table_thickness) for index in [0, 0, 0, 0, 0, 0, 0, 0, (- 1), (- 1), (- 1), (- 1), (- 1), (- 1), (- 1), (- 1)]] i_tab = [0, 0, 1, 1, 8, 8, 9, 9, 0, 7, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7] j_tab = [5, 6, 2, 3, 13, 14, 10, 11, 7, 15, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15] k_tab = [6, 7, 3, 4, 14, 15, 11, 12, 8, 8, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14] self.r = np.column_stack((x_tab, y_tab, z_tab)) self.ijk = np.column_stack((i_tab, j_tab, k_tab)) elif (phantom_model == 'pad'): x_pad = [(index * phantom_dim.pad_width) for index in [0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5, 0.5, 0.25, 0.25, (- 0.25), (- 0.25), (- 0.5), (- 0.5), 0.5]] y_pad = [(index * phantom_dim.pad_length) for index in [1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0, 1.0, 1.0, 1, 1, 1.0, 1.0, 0, 0]] z_pad = [(index * phantom_dim.pad_thickness) for index in [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]] i_pad = [0, 0, 1, 1, 8, 8, 9, 9, 0, 7, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7] j_pad = [5, 6, 2, 3, 13, 14, 10, 11, 7, 15, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15] k_pad = [6, 7, 3, 4, 14, 15, 11, 12, 8, 8, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14] self.r = np.column_stack((x_pad, y_pad, z_pad)) self.ijk = np.column_stack((i_pad, j_pad, k_pad))<|docstring|>Create the phantom of choice. Parameters ---------- phantom_model : str Type of phantom to create. Valid selections are 'plane', 'cylinder', 'human', "table" an "pad". phantom_dim : PhantomDimensions instance of class PhantomDimensions containing dimensions for all phantoms models except human phantoms: Length, width, radius, thickness etc. human_mesh : str, optional Choose which human mesh phantom to use. Valid selection are names of the *.stl-files in the phantom_data folder (The default is none. Raises ------ ValueError Raises value error if unsupported phantom type are selected, or if phantom_model='human' selected, without specifying human_mesh<|endoftext|>
dd7814745a7bafa28c94ea7048e64b266191675db3065cf0ab1bee0f612c24ea
def rotate(self, angles: List[int]) -> None: 'Rotate the phantom about the angles specified in rotation.\n\n Parameters\n ----------\n angles: List[int]\n list of angles in degrees the phantom should be rotated about,\n given as [x_rot: <int>, y_rot: <int>, z_rot: <int>]. E.g.\n rotation = [0, 90, 0] will rotate the phantom 90 degrees about the\n y-axis.\n\n ' angles = np.deg2rad(angles) x_rot = angles[0] y_rot = angles[1] z_rot = angles[2] Rx = np.array([[(+ 1), (+ 0), (+ 0)], [(+ 0), (+ np.cos(x_rot)), (- np.sin(x_rot))], [(+ 0), (+ np.sin(x_rot)), (+ np.cos(x_rot))]]) Ry = np.array([[(+ np.cos(y_rot)), (+ 0), (+ np.sin(y_rot))], [(+ 0), (+ 1), (+ 0)], [(- np.sin(y_rot)), (+ 0), (+ np.cos(y_rot))]]) Rz = np.array([[(+ np.cos(z_rot)), (- np.sin(z_rot)), (+ 0)], [(+ np.sin(z_rot)), (+ np.cos(z_rot)), (+ 0)], [(+ 0), (+ 0), (+ 1)]]) self.r = np.matmul(Rx, np.matmul(Ry, np.matmul(Rz, self.r.T))).T if (self.phantom_model in ['cylinder', 'human']): self.n = np.matmul(Rx, np.matmul(Ry, np.matmul(Rz, self.n.T))).T
Rotate the phantom about the angles specified in rotation. Parameters ---------- angles: List[int] list of angles in degrees the phantom should be rotated about, given as [x_rot: <int>, y_rot: <int>, z_rot: <int>]. E.g. rotation = [0, 90, 0] will rotate the phantom 90 degrees about the y-axis.
src/pyskindose/phantom_class.py
rotate
notZaki/PySkinDose
0
python
def rotate(self, angles: List[int]) -> None: 'Rotate the phantom about the angles specified in rotation.\n\n Parameters\n ----------\n angles: List[int]\n list of angles in degrees the phantom should be rotated about,\n given as [x_rot: <int>, y_rot: <int>, z_rot: <int>]. E.g.\n rotation = [0, 90, 0] will rotate the phantom 90 degrees about the\n y-axis.\n\n ' angles = np.deg2rad(angles) x_rot = angles[0] y_rot = angles[1] z_rot = angles[2] Rx = np.array([[(+ 1), (+ 0), (+ 0)], [(+ 0), (+ np.cos(x_rot)), (- np.sin(x_rot))], [(+ 0), (+ np.sin(x_rot)), (+ np.cos(x_rot))]]) Ry = np.array([[(+ np.cos(y_rot)), (+ 0), (+ np.sin(y_rot))], [(+ 0), (+ 1), (+ 0)], [(- np.sin(y_rot)), (+ 0), (+ np.cos(y_rot))]]) Rz = np.array([[(+ np.cos(z_rot)), (- np.sin(z_rot)), (+ 0)], [(+ np.sin(z_rot)), (+ np.cos(z_rot)), (+ 0)], [(+ 0), (+ 0), (+ 1)]]) self.r = np.matmul(Rx, np.matmul(Ry, np.matmul(Rz, self.r.T))).T if (self.phantom_model in ['cylinder', 'human']): self.n = np.matmul(Rx, np.matmul(Ry, np.matmul(Rz, self.n.T))).T
def rotate(self, angles: List[int]) -> None: 'Rotate the phantom about the angles specified in rotation.\n\n Parameters\n ----------\n angles: List[int]\n list of angles in degrees the phantom should be rotated about,\n given as [x_rot: <int>, y_rot: <int>, z_rot: <int>]. E.g.\n rotation = [0, 90, 0] will rotate the phantom 90 degrees about the\n y-axis.\n\n ' angles = np.deg2rad(angles) x_rot = angles[0] y_rot = angles[1] z_rot = angles[2] Rx = np.array([[(+ 1), (+ 0), (+ 0)], [(+ 0), (+ np.cos(x_rot)), (- np.sin(x_rot))], [(+ 0), (+ np.sin(x_rot)), (+ np.cos(x_rot))]]) Ry = np.array([[(+ np.cos(y_rot)), (+ 0), (+ np.sin(y_rot))], [(+ 0), (+ 1), (+ 0)], [(- np.sin(y_rot)), (+ 0), (+ np.cos(y_rot))]]) Rz = np.array([[(+ np.cos(z_rot)), (- np.sin(z_rot)), (+ 0)], [(+ np.sin(z_rot)), (+ np.cos(z_rot)), (+ 0)], [(+ 0), (+ 0), (+ 1)]]) self.r = np.matmul(Rx, np.matmul(Ry, np.matmul(Rz, self.r.T))).T if (self.phantom_model in ['cylinder', 'human']): self.n = np.matmul(Rx, np.matmul(Ry, np.matmul(Rz, self.n.T))).T<|docstring|>Rotate the phantom about the angles specified in rotation. Parameters ---------- angles: List[int] list of angles in degrees the phantom should be rotated about, given as [x_rot: <int>, y_rot: <int>, z_rot: <int>]. E.g. rotation = [0, 90, 0] will rotate the phantom 90 degrees about the y-axis.<|endoftext|>
17470644cfc134ba6ae9c8d369650e36db5d53194d85da167066a647ac503e6c
def translate(self, dr: List[int]) -> None: 'Translate the phantom in the x, y or z direction.\n\n Parameters\n ----------\n dr : List[int]\n list of distances the phantom should be translated, given in cm.\n Specified as dr = [dx: <int>, dy: <int>, dz: <int>]. E.g.\n dr = [0, 0, 10] will translate the phantom 10 cm in the z direction\n\n ' self.r[(:, 0)] += dr[0] self.r[(:, 1)] += dr[1] self.r[(:, 2)] += dr[2]
Translate the phantom in the x, y or z direction. Parameters ---------- dr : List[int] list of distances the phantom should be translated, given in cm. Specified as dr = [dx: <int>, dy: <int>, dz: <int>]. E.g. dr = [0, 0, 10] will translate the phantom 10 cm in the z direction
src/pyskindose/phantom_class.py
translate
notZaki/PySkinDose
0
python
def translate(self, dr: List[int]) -> None: 'Translate the phantom in the x, y or z direction.\n\n Parameters\n ----------\n dr : List[int]\n list of distances the phantom should be translated, given in cm.\n Specified as dr = [dx: <int>, dy: <int>, dz: <int>]. E.g.\n dr = [0, 0, 10] will translate the phantom 10 cm in the z direction\n\n ' self.r[(:, 0)] += dr[0] self.r[(:, 1)] += dr[1] self.r[(:, 2)] += dr[2]
def translate(self, dr: List[int]) -> None: 'Translate the phantom in the x, y or z direction.\n\n Parameters\n ----------\n dr : List[int]\n list of distances the phantom should be translated, given in cm.\n Specified as dr = [dx: <int>, dy: <int>, dz: <int>]. E.g.\n dr = [0, 0, 10] will translate the phantom 10 cm in the z direction\n\n ' self.r[(:, 0)] += dr[0] self.r[(:, 1)] += dr[1] self.r[(:, 2)] += dr[2]<|docstring|>Translate the phantom in the x, y or z direction. Parameters ---------- dr : List[int] list of distances the phantom should be translated, given in cm. Specified as dr = [dx: <int>, dy: <int>, dz: <int>]. E.g. dr = [0, 0, 10] will translate the phantom 10 cm in the z direction<|endoftext|>
44fdc1119a5ecc365bf8ed89a3f76611d487207565f2884b13b575511819a952
def save_position(self) -> None: 'Store a reference position of the phantom.\n\n This function is supposed to be used to store the patient fixation\n conducted in the function position_geometry\n\n ' r_ref = copy.copy(self.r) self.r_ref = r_ref
Store a reference position of the phantom. This function is supposed to be used to store the patient fixation conducted in the function position_geometry
src/pyskindose/phantom_class.py
save_position
notZaki/PySkinDose
0
python
def save_position(self) -> None: 'Store a reference position of the phantom.\n\n This function is supposed to be used to store the patient fixation\n conducted in the function position_geometry\n\n ' r_ref = copy.copy(self.r) self.r_ref = r_ref
def save_position(self) -> None: 'Store a reference position of the phantom.\n\n This function is supposed to be used to store the patient fixation\n conducted in the function position_geometry\n\n ' r_ref = copy.copy(self.r) self.r_ref = r_ref<|docstring|>Store a reference position of the phantom. This function is supposed to be used to store the patient fixation conducted in the function position_geometry<|endoftext|>
3be814430079b375017c320c69b6e60a1e60ec0cebe8e2146cbc79fb8d17e31c
def position(self, data_norm: pd.DataFrame, event: int) -> None: 'Position the phantom for a event by adding RDSR table displacement.\n\n Positions the phantom from reference position to actual position\n according to the table displacement info in data_norm.\n\n Parameters\n ----------\n data_norm : pd.DataFrame\n Table containing dicom RDSR information from each irradiation event\n See rdsr_normalizer.py for more information.\n event : int\n Irradiation event index\n\n ' self.r = copy.copy(self.r_ref) self.r[(:, 2)] += (self.table_length / 2) rot = np.deg2rad(data_norm['At1'][event]) tilt = np.deg2rad(data_norm['At2'][event]) cradle = np.deg2rad(data_norm['At3'][event]) R1 = np.array([[(+ np.cos(rot)), 0, (+ np.sin(rot))], [0, 1, 0], [(- np.sin(rot)), 0, (+ np.cos(rot))]]) R2 = np.array([[(+ 1), (+ 0), (+ 0)], [(+ 0), (+ np.cos(tilt)), (- np.sin(tilt))], [(+ 0), (+ np.sin(tilt)), (+ np.cos(tilt))]]) R3 = np.array([[(+ np.cos(cradle)), (- np.sin(cradle)), 0], [(+ np.sin(cradle)), (+ np.cos(cradle)), (+ 0)], [(+ 0), (+ 0), (+ 1)]]) self.r = np.matmul(np.matmul(R3, np.matmul(R2, R1)), self.r.T).T self.r[(:, 2)] -= (self.table_length / 2) t = np.array([data_norm.Tx[event], data_norm.Ty[event], data_norm.Tz[event]]) self.r = (self.r + t)
Position the phantom for a event by adding RDSR table displacement. Positions the phantom from reference position to actual position according to the table displacement info in data_norm. Parameters ---------- data_norm : pd.DataFrame Table containing dicom RDSR information from each irradiation event See rdsr_normalizer.py for more information. event : int Irradiation event index
src/pyskindose/phantom_class.py
position
notZaki/PySkinDose
0
python
def position(self, data_norm: pd.DataFrame, event: int) -> None: 'Position the phantom for a event by adding RDSR table displacement.\n\n Positions the phantom from reference position to actual position\n according to the table displacement info in data_norm.\n\n Parameters\n ----------\n data_norm : pd.DataFrame\n Table containing dicom RDSR information from each irradiation event\n See rdsr_normalizer.py for more information.\n event : int\n Irradiation event index\n\n ' self.r = copy.copy(self.r_ref) self.r[(:, 2)] += (self.table_length / 2) rot = np.deg2rad(data_norm['At1'][event]) tilt = np.deg2rad(data_norm['At2'][event]) cradle = np.deg2rad(data_norm['At3'][event]) R1 = np.array([[(+ np.cos(rot)), 0, (+ np.sin(rot))], [0, 1, 0], [(- np.sin(rot)), 0, (+ np.cos(rot))]]) R2 = np.array([[(+ 1), (+ 0), (+ 0)], [(+ 0), (+ np.cos(tilt)), (- np.sin(tilt))], [(+ 0), (+ np.sin(tilt)), (+ np.cos(tilt))]]) R3 = np.array([[(+ np.cos(cradle)), (- np.sin(cradle)), 0], [(+ np.sin(cradle)), (+ np.cos(cradle)), (+ 0)], [(+ 0), (+ 0), (+ 1)]]) self.r = np.matmul(np.matmul(R3, np.matmul(R2, R1)), self.r.T).T self.r[(:, 2)] -= (self.table_length / 2) t = np.array([data_norm.Tx[event], data_norm.Ty[event], data_norm.Tz[event]]) self.r = (self.r + t)
def position(self, data_norm: pd.DataFrame, event: int) -> None: 'Position the phantom for a event by adding RDSR table displacement.\n\n Positions the phantom from reference position to actual position\n according to the table displacement info in data_norm.\n\n Parameters\n ----------\n data_norm : pd.DataFrame\n Table containing dicom RDSR information from each irradiation event\n See rdsr_normalizer.py for more information.\n event : int\n Irradiation event index\n\n ' self.r = copy.copy(self.r_ref) self.r[(:, 2)] += (self.table_length / 2) rot = np.deg2rad(data_norm['At1'][event]) tilt = np.deg2rad(data_norm['At2'][event]) cradle = np.deg2rad(data_norm['At3'][event]) R1 = np.array([[(+ np.cos(rot)), 0, (+ np.sin(rot))], [0, 1, 0], [(- np.sin(rot)), 0, (+ np.cos(rot))]]) R2 = np.array([[(+ 1), (+ 0), (+ 0)], [(+ 0), (+ np.cos(tilt)), (- np.sin(tilt))], [(+ 0), (+ np.sin(tilt)), (+ np.cos(tilt))]]) R3 = np.array([[(+ np.cos(cradle)), (- np.sin(cradle)), 0], [(+ np.sin(cradle)), (+ np.cos(cradle)), (+ 0)], [(+ 0), (+ 0), (+ 1)]]) self.r = np.matmul(np.matmul(R3, np.matmul(R2, R1)), self.r.T).T self.r[(:, 2)] -= (self.table_length / 2) t = np.array([data_norm.Tx[event], data_norm.Ty[event], data_norm.Tz[event]]) self.r = (self.r + t)<|docstring|>Position the phantom for a event by adding RDSR table displacement. Positions the phantom from reference position to actual position according to the table displacement info in data_norm. Parameters ---------- data_norm : pd.DataFrame Table containing dicom RDSR information from each irradiation event See rdsr_normalizer.py for more information. event : int Irradiation event index<|endoftext|>
0ad9557eaa7d38fa4fb4e9eafd837f6bb1ce10e6c4420ad0df3ba1ce556e8ed7
def plot_dosemap(self, dark_mode: bool=True, notebook_mode: bool=False): 'Plot a map of the absorbed skindose upon the patient phantom.\n\n This function creates and plots an offline plotly graph of the\n skin dose distribution on the phantom. The colorscale is mapped to the\n absorbed skin dose value. Only available for phantom type: "plane",\n "cylinder" or "human"\n\n Parameters\n ----------\n dark_mode : bool\n set dark for for plot\n notebook_mode : bool, default is true\n optimize plot size and margin for notebooks.\n\n ' (COLOR_CANVAS, COLOR_PLOT_TEXT, COLOR_GRID, COLOR_ZERO_LINE) = fetch_plot_colors(dark_mode=dark_mode) (PLOT_HEIGHT, PLOT_WIDTH) = fetch_plot_size(notebook_mode=notebook_mode) PLOT_MARGINS = fetch_plot_margin(notebook_mode=notebook_mode) lat_text = [f'<b>lat:</b> {np.around(self.r[(ind, 2)], 2)} cm<br>' for ind in range(len(self.r))] lon_text = [f'<b>lon:</b> {np.around(self.r[(ind, 0)], 2)} cm<br>' for ind in range(len(self.r))] ver_text = [f'<b>ver:</b> {np.around(self.r[(ind, 1)], 2)} cm<br>' for ind in range(len(self.r))] dose_text = [f'<b>skin dose:</b> {round(self.dose[ind], 2)} mGy' for ind in range(len(self.r))] hover_text = [(((lat_text[cell] + lon_text[cell]) + ver_text[cell]) + dose_text[cell]) for cell in range(len(self.r))] phantom_mesh = [go.Mesh3d(x=self.r[(:, 0)], y=self.r[(:, 1)], z=self.r[(:, 2)], i=self.ijk[(:, 0)], j=self.ijk[(:, 1)], k=self.ijk[(:, 2)], intensity=self.dose, colorscale=DOSEMAP_COLORSCALE, showscale=True, hoverinfo='text', text=hover_text, name='Human', colorbar=dict(tickfont=dict(color=COLOR_PLOT_TEXT), title='Skin dose [mGy]', titlefont=dict(family=PLOT_FONT_FAMILY, color=COLOR_PLOT_TEXT)))] layout = go.Layout(height=PLOT_HEIGHT, width=PLOT_WIDTH, margin=PLOT_MARGINS, font=dict(family=PLOT_FONT_FAMILY, color=COLOR_PLOT_TEXT, size=PLOT_FONT_SIZE), hoverlabel=dict(font=dict(family=PLOT_HOVERLABEL_FONT_FAMILY, size=PLOT_HOVERLABEL_FONT_SIZE)), title='<b>P</b>y<b>S</b>kin<b>D</b>ose [mode: dosemap]', titlefont=dict(family=PLOT_FONT_FAMILY, size=PLOT_FONT_SIZE, color=COLOR_PLOT_TEXT), paper_bgcolor=COLOR_CANVAS, scene=dict(aspectmode=PLOT_ASPECTMODE_PLOT_DOSEMAP, xaxis=dict(title='', backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False), yaxis=dict(title='', backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False), zaxis=dict(title='', backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False))) fig = go.Figure(data=phantom_mesh, layout=layout) fig.show()
Plot a map of the absorbed skindose upon the patient phantom. This function creates and plots an offline plotly graph of the skin dose distribution on the phantom. The colorscale is mapped to the absorbed skin dose value. Only available for phantom type: "plane", "cylinder" or "human" Parameters ---------- dark_mode : bool set dark for for plot notebook_mode : bool, default is true optimize plot size and margin for notebooks.
src/pyskindose/phantom_class.py
plot_dosemap
notZaki/PySkinDose
0
python
def plot_dosemap(self, dark_mode: bool=True, notebook_mode: bool=False): 'Plot a map of the absorbed skindose upon the patient phantom.\n\n This function creates and plots an offline plotly graph of the\n skin dose distribution on the phantom. The colorscale is mapped to the\n absorbed skin dose value. Only available for phantom type: "plane",\n "cylinder" or "human"\n\n Parameters\n ----------\n dark_mode : bool\n set dark for for plot\n notebook_mode : bool, default is true\n optimize plot size and margin for notebooks.\n\n ' (COLOR_CANVAS, COLOR_PLOT_TEXT, COLOR_GRID, COLOR_ZERO_LINE) = fetch_plot_colors(dark_mode=dark_mode) (PLOT_HEIGHT, PLOT_WIDTH) = fetch_plot_size(notebook_mode=notebook_mode) PLOT_MARGINS = fetch_plot_margin(notebook_mode=notebook_mode) lat_text = [f'<b>lat:</b> {np.around(self.r[(ind, 2)], 2)} cm<br>' for ind in range(len(self.r))] lon_text = [f'<b>lon:</b> {np.around(self.r[(ind, 0)], 2)} cm<br>' for ind in range(len(self.r))] ver_text = [f'<b>ver:</b> {np.around(self.r[(ind, 1)], 2)} cm<br>' for ind in range(len(self.r))] dose_text = [f'<b>skin dose:</b> {round(self.dose[ind], 2)} mGy' for ind in range(len(self.r))] hover_text = [(((lat_text[cell] + lon_text[cell]) + ver_text[cell]) + dose_text[cell]) for cell in range(len(self.r))] phantom_mesh = [go.Mesh3d(x=self.r[(:, 0)], y=self.r[(:, 1)], z=self.r[(:, 2)], i=self.ijk[(:, 0)], j=self.ijk[(:, 1)], k=self.ijk[(:, 2)], intensity=self.dose, colorscale=DOSEMAP_COLORSCALE, showscale=True, hoverinfo='text', text=hover_text, name='Human', colorbar=dict(tickfont=dict(color=COLOR_PLOT_TEXT), title='Skin dose [mGy]', titlefont=dict(family=PLOT_FONT_FAMILY, color=COLOR_PLOT_TEXT)))] layout = go.Layout(height=PLOT_HEIGHT, width=PLOT_WIDTH, margin=PLOT_MARGINS, font=dict(family=PLOT_FONT_FAMILY, color=COLOR_PLOT_TEXT, size=PLOT_FONT_SIZE), hoverlabel=dict(font=dict(family=PLOT_HOVERLABEL_FONT_FAMILY, size=PLOT_HOVERLABEL_FONT_SIZE)), title='<b>P</b>y<b>S</b>kin<b>D</b>ose [mode: dosemap]', titlefont=dict(family=PLOT_FONT_FAMILY, size=PLOT_FONT_SIZE, color=COLOR_PLOT_TEXT), paper_bgcolor=COLOR_CANVAS, scene=dict(aspectmode=PLOT_ASPECTMODE_PLOT_DOSEMAP, xaxis=dict(title=, backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False), yaxis=dict(title=, backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False), zaxis=dict(title=, backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False))) fig = go.Figure(data=phantom_mesh, layout=layout) fig.show()
def plot_dosemap(self, dark_mode: bool=True, notebook_mode: bool=False): 'Plot a map of the absorbed skindose upon the patient phantom.\n\n This function creates and plots an offline plotly graph of the\n skin dose distribution on the phantom. The colorscale is mapped to the\n absorbed skin dose value. Only available for phantom type: "plane",\n "cylinder" or "human"\n\n Parameters\n ----------\n dark_mode : bool\n set dark for for plot\n notebook_mode : bool, default is true\n optimize plot size and margin for notebooks.\n\n ' (COLOR_CANVAS, COLOR_PLOT_TEXT, COLOR_GRID, COLOR_ZERO_LINE) = fetch_plot_colors(dark_mode=dark_mode) (PLOT_HEIGHT, PLOT_WIDTH) = fetch_plot_size(notebook_mode=notebook_mode) PLOT_MARGINS = fetch_plot_margin(notebook_mode=notebook_mode) lat_text = [f'<b>lat:</b> {np.around(self.r[(ind, 2)], 2)} cm<br>' for ind in range(len(self.r))] lon_text = [f'<b>lon:</b> {np.around(self.r[(ind, 0)], 2)} cm<br>' for ind in range(len(self.r))] ver_text = [f'<b>ver:</b> {np.around(self.r[(ind, 1)], 2)} cm<br>' for ind in range(len(self.r))] dose_text = [f'<b>skin dose:</b> {round(self.dose[ind], 2)} mGy' for ind in range(len(self.r))] hover_text = [(((lat_text[cell] + lon_text[cell]) + ver_text[cell]) + dose_text[cell]) for cell in range(len(self.r))] phantom_mesh = [go.Mesh3d(x=self.r[(:, 0)], y=self.r[(:, 1)], z=self.r[(:, 2)], i=self.ijk[(:, 0)], j=self.ijk[(:, 1)], k=self.ijk[(:, 2)], intensity=self.dose, colorscale=DOSEMAP_COLORSCALE, showscale=True, hoverinfo='text', text=hover_text, name='Human', colorbar=dict(tickfont=dict(color=COLOR_PLOT_TEXT), title='Skin dose [mGy]', titlefont=dict(family=PLOT_FONT_FAMILY, color=COLOR_PLOT_TEXT)))] layout = go.Layout(height=PLOT_HEIGHT, width=PLOT_WIDTH, margin=PLOT_MARGINS, font=dict(family=PLOT_FONT_FAMILY, color=COLOR_PLOT_TEXT, size=PLOT_FONT_SIZE), hoverlabel=dict(font=dict(family=PLOT_HOVERLABEL_FONT_FAMILY, size=PLOT_HOVERLABEL_FONT_SIZE)), title='<b>P</b>y<b>S</b>kin<b>D</b>ose [mode: dosemap]', titlefont=dict(family=PLOT_FONT_FAMILY, size=PLOT_FONT_SIZE, color=COLOR_PLOT_TEXT), paper_bgcolor=COLOR_CANVAS, scene=dict(aspectmode=PLOT_ASPECTMODE_PLOT_DOSEMAP, xaxis=dict(title=, backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False), yaxis=dict(title=, backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False), zaxis=dict(title=, backgroundcolor=COLOR_CANVAS, showgrid=False, zeroline=False, showticklabels=False))) fig = go.Figure(data=phantom_mesh, layout=layout) fig.show()<|docstring|>Plot a map of the absorbed skindose upon the patient phantom. This function creates and plots an offline plotly graph of the skin dose distribution on the phantom. The colorscale is mapped to the absorbed skin dose value. Only available for phantom type: "plane", "cylinder" or "human" Parameters ---------- dark_mode : bool set dark for for plot notebook_mode : bool, default is true optimize plot size and margin for notebooks.<|endoftext|>
ac8835bde3f1b68dbe6d3b49fa29159c7ee706aa2da498ab9f0d022ab13e8ae0
def plot_graph(vertices, edges, ax, xlim=None, highlight=None, highlight_color='magenta', node_color='b', edge_color='#999999', label=None): 'Takes a graph given as vertices and edges and visualizes its structure' start = vertices.ravel().min() stop = vertices.ravel().max() ax.grid(b=True, which='major', linestyle='--', linewidth=0.2, color='#222222') ax.yaxis.grid(False) patchlist = [] exon_num = np.zeros(((stop - start),)) exon_loc = np.zeros((1, (stop - start))) exon_level = np.zeros((vertices.shape[1],)) for i in range(vertices.shape[1]): cur_vertex = (vertices[(:, i)] - start) exon_num[cur_vertex[0]:cur_vertex[1]] += 1 if np.all((exon_num < 2)): exon_loc[(0, :)] = exon_num level = 0 elif (exon_num.max() > exon_loc.shape[0]): exon_loc = np.r_[(exon_loc, np.zeros((1, (stop - start))))] exon_loc[((- 1), cur_vertex[0]:cur_vertex[1])] = 1 level = (exon_loc.shape[0] - 1) elif (exon_num.max() <= exon_loc.shape[0]): idx = np.where(np.all((exon_loc[(:, cur_vertex[0]:cur_vertex[1])] == 0), 1))[0].min() exon_loc[(idx, cur_vertex[0]:cur_vertex[1])] = 1 level = idx exon_level[i] = level patchlist.append(mpatches.Rectangle([(cur_vertex[0] + start), (20 + (level * 20))], (cur_vertex[1] - cur_vertex[0]), 10, facecolor=node_color, edgecolor='none', alpha=0.7)) linelist = [] intron_loc = np.zeros((1, (stop - start))) if (edges.shape[0] > 1): (ii, jj) = np.where((np.triu(edges) > 0)) for (i, j) in zip(ii, jj): if (vertices[(0, i)] < vertices[(0, j)]): istart = vertices[(1, i)] istop = vertices[(0, j)] level1 = exon_level[i] level2 = exon_level[j] else: istart = vertices[(1, j)] istop = vertices[(0, i)] level1 = exon_level[j] level2 = exon_level[i] cur_intron = [(istart - start), (istop - start)] intron_loc[cur_intron[0]:cur_intron[1]] += 1 leveli = [((istart + istop) * 0.5), ((level1 + level2) * 0.5)] linelist.append(mlines.Line2D([istart, leveli[0], istop], [(25 + (level1 * 20)), (32 + (leveli[1] * 20)), (25 + (level2 * 20))], color=edge_color, linewidth=0.5)) for node in patchlist: ax.add_patch(node) for line in linelist: ax.add_line(line) if label: ax.text((start + ((stop - start) / 2)), 12, label, verticalalignment='center', horizontalalignment='center') if (xlim is not None): ax.set_xlim(xlim) else: ax.set_xlim([max((start - 20), 0), (stop + 20)]) ax.set_ylim([0, (40 + (exon_loc.shape[0] * 20))]) ax.set_yticks([]) if (highlight is not None): rect = patches.Rectangle((highlight[0], 0), (highlight[1] - highlight[0]), ax.get_ylim()[1], facecolor=highlight_color, edgecolor='none', alpha=0.5) ax.add_patch(rect)
Takes a graph given as vertices and edges and visualizes its structure
spladder/viz/graph.py
plot_graph
ratschlab/spladder
96
python
def plot_graph(vertices, edges, ax, xlim=None, highlight=None, highlight_color='magenta', node_color='b', edge_color='#999999', label=None): start = vertices.ravel().min() stop = vertices.ravel().max() ax.grid(b=True, which='major', linestyle='--', linewidth=0.2, color='#222222') ax.yaxis.grid(False) patchlist = [] exon_num = np.zeros(((stop - start),)) exon_loc = np.zeros((1, (stop - start))) exon_level = np.zeros((vertices.shape[1],)) for i in range(vertices.shape[1]): cur_vertex = (vertices[(:, i)] - start) exon_num[cur_vertex[0]:cur_vertex[1]] += 1 if np.all((exon_num < 2)): exon_loc[(0, :)] = exon_num level = 0 elif (exon_num.max() > exon_loc.shape[0]): exon_loc = np.r_[(exon_loc, np.zeros((1, (stop - start))))] exon_loc[((- 1), cur_vertex[0]:cur_vertex[1])] = 1 level = (exon_loc.shape[0] - 1) elif (exon_num.max() <= exon_loc.shape[0]): idx = np.where(np.all((exon_loc[(:, cur_vertex[0]:cur_vertex[1])] == 0), 1))[0].min() exon_loc[(idx, cur_vertex[0]:cur_vertex[1])] = 1 level = idx exon_level[i] = level patchlist.append(mpatches.Rectangle([(cur_vertex[0] + start), (20 + (level * 20))], (cur_vertex[1] - cur_vertex[0]), 10, facecolor=node_color, edgecolor='none', alpha=0.7)) linelist = [] intron_loc = np.zeros((1, (stop - start))) if (edges.shape[0] > 1): (ii, jj) = np.where((np.triu(edges) > 0)) for (i, j) in zip(ii, jj): if (vertices[(0, i)] < vertices[(0, j)]): istart = vertices[(1, i)] istop = vertices[(0, j)] level1 = exon_level[i] level2 = exon_level[j] else: istart = vertices[(1, j)] istop = vertices[(0, i)] level1 = exon_level[j] level2 = exon_level[i] cur_intron = [(istart - start), (istop - start)] intron_loc[cur_intron[0]:cur_intron[1]] += 1 leveli = [((istart + istop) * 0.5), ((level1 + level2) * 0.5)] linelist.append(mlines.Line2D([istart, leveli[0], istop], [(25 + (level1 * 20)), (32 + (leveli[1] * 20)), (25 + (level2 * 20))], color=edge_color, linewidth=0.5)) for node in patchlist: ax.add_patch(node) for line in linelist: ax.add_line(line) if label: ax.text((start + ((stop - start) / 2)), 12, label, verticalalignment='center', horizontalalignment='center') if (xlim is not None): ax.set_xlim(xlim) else: ax.set_xlim([max((start - 20), 0), (stop + 20)]) ax.set_ylim([0, (40 + (exon_loc.shape[0] * 20))]) ax.set_yticks([]) if (highlight is not None): rect = patches.Rectangle((highlight[0], 0), (highlight[1] - highlight[0]), ax.get_ylim()[1], facecolor=highlight_color, edgecolor='none', alpha=0.5) ax.add_patch(rect)
def plot_graph(vertices, edges, ax, xlim=None, highlight=None, highlight_color='magenta', node_color='b', edge_color='#999999', label=None): start = vertices.ravel().min() stop = vertices.ravel().max() ax.grid(b=True, which='major', linestyle='--', linewidth=0.2, color='#222222') ax.yaxis.grid(False) patchlist = [] exon_num = np.zeros(((stop - start),)) exon_loc = np.zeros((1, (stop - start))) exon_level = np.zeros((vertices.shape[1],)) for i in range(vertices.shape[1]): cur_vertex = (vertices[(:, i)] - start) exon_num[cur_vertex[0]:cur_vertex[1]] += 1 if np.all((exon_num < 2)): exon_loc[(0, :)] = exon_num level = 0 elif (exon_num.max() > exon_loc.shape[0]): exon_loc = np.r_[(exon_loc, np.zeros((1, (stop - start))))] exon_loc[((- 1), cur_vertex[0]:cur_vertex[1])] = 1 level = (exon_loc.shape[0] - 1) elif (exon_num.max() <= exon_loc.shape[0]): idx = np.where(np.all((exon_loc[(:, cur_vertex[0]:cur_vertex[1])] == 0), 1))[0].min() exon_loc[(idx, cur_vertex[0]:cur_vertex[1])] = 1 level = idx exon_level[i] = level patchlist.append(mpatches.Rectangle([(cur_vertex[0] + start), (20 + (level * 20))], (cur_vertex[1] - cur_vertex[0]), 10, facecolor=node_color, edgecolor='none', alpha=0.7)) linelist = [] intron_loc = np.zeros((1, (stop - start))) if (edges.shape[0] > 1): (ii, jj) = np.where((np.triu(edges) > 0)) for (i, j) in zip(ii, jj): if (vertices[(0, i)] < vertices[(0, j)]): istart = vertices[(1, i)] istop = vertices[(0, j)] level1 = exon_level[i] level2 = exon_level[j] else: istart = vertices[(1, j)] istop = vertices[(0, i)] level1 = exon_level[j] level2 = exon_level[i] cur_intron = [(istart - start), (istop - start)] intron_loc[cur_intron[0]:cur_intron[1]] += 1 leveli = [((istart + istop) * 0.5), ((level1 + level2) * 0.5)] linelist.append(mlines.Line2D([istart, leveli[0], istop], [(25 + (level1 * 20)), (32 + (leveli[1] * 20)), (25 + (level2 * 20))], color=edge_color, linewidth=0.5)) for node in patchlist: ax.add_patch(node) for line in linelist: ax.add_line(line) if label: ax.text((start + ((stop - start) / 2)), 12, label, verticalalignment='center', horizontalalignment='center') if (xlim is not None): ax.set_xlim(xlim) else: ax.set_xlim([max((start - 20), 0), (stop + 20)]) ax.set_ylim([0, (40 + (exon_loc.shape[0] * 20))]) ax.set_yticks([]) if (highlight is not None): rect = patches.Rectangle((highlight[0], 0), (highlight[1] - highlight[0]), ax.get_ylim()[1], facecolor=highlight_color, edgecolor='none', alpha=0.5) ax.add_patch(rect)<|docstring|>Takes a graph given as vertices and edges and visualizes its structure<|endoftext|>
06c765a786afb757be1866036a0e7080ea8f24eab589dd8e1b4fd11b86bff1a4
def __init__(self, classes_root_dir, this_root_dir, yolo_config, augment=False): '\n :param root_dir:\n :param yolo_config: dictionary that contain the require data for yolo (C, B, K)\n ' self.augment = augment self.root_dir = this_root_dir self.C = yolo_config['C'] self.B = yolo_config['B'] self.K = yolo_config['K'] (classes, class_to_idx) = find_classes(classes_root_dir) self.class_to_idx = class_to_idx self.classes = classes spects = [] count = 0 dir = os.path.expanduser(this_root_dir) for target in sorted(os.listdir(dir)): d = os.path.join(dir, target) if (not os.path.isdir(d)): continue if (target not in classes): continue for (root, _, fnames) in sorted(os.walk(d)): for fname in sorted(fnames): count += 1 if is_audio_file(fname): path = os.path.join(root, fname) x = os.path.getsize(path) if (x < 1000): print(path) continue label = os.path.join(root, fname.replace('.wav', '.wrd')) tclass = self.class_to_idx[target] item = (path, label, tclass) spects.append(item) self.data = spects
:param root_dir: :param yolo_config: dictionary that contain the require data for yolo (C, B, K)
Multitask/ImageAudioClassifier/Helper.py
__init__
nileshpd1211/SRIP-UCSD---Efficient-Deep-Networks
0
python
def __init__(self, classes_root_dir, this_root_dir, yolo_config, augment=False): '\n :param root_dir:\n :param yolo_config: dictionary that contain the require data for yolo (C, B, K)\n ' self.augment = augment self.root_dir = this_root_dir self.C = yolo_config['C'] self.B = yolo_config['B'] self.K = yolo_config['K'] (classes, class_to_idx) = find_classes(classes_root_dir) self.class_to_idx = class_to_idx self.classes = classes spects = [] count = 0 dir = os.path.expanduser(this_root_dir) for target in sorted(os.listdir(dir)): d = os.path.join(dir, target) if (not os.path.isdir(d)): continue if (target not in classes): continue for (root, _, fnames) in sorted(os.walk(d)): for fname in sorted(fnames): count += 1 if is_audio_file(fname): path = os.path.join(root, fname) x = os.path.getsize(path) if (x < 1000): print(path) continue label = os.path.join(root, fname.replace('.wav', '.wrd')) tclass = self.class_to_idx[target] item = (path, label, tclass) spects.append(item) self.data = spects
def __init__(self, classes_root_dir, this_root_dir, yolo_config, augment=False): '\n :param root_dir:\n :param yolo_config: dictionary that contain the require data for yolo (C, B, K)\n ' self.augment = augment self.root_dir = this_root_dir self.C = yolo_config['C'] self.B = yolo_config['B'] self.K = yolo_config['K'] (classes, class_to_idx) = find_classes(classes_root_dir) self.class_to_idx = class_to_idx self.classes = classes spects = [] count = 0 dir = os.path.expanduser(this_root_dir) for target in sorted(os.listdir(dir)): d = os.path.join(dir, target) if (not os.path.isdir(d)): continue if (target not in classes): continue for (root, _, fnames) in sorted(os.walk(d)): for fname in sorted(fnames): count += 1 if is_audio_file(fname): path = os.path.join(root, fname) x = os.path.getsize(path) if (x < 1000): print(path) continue label = os.path.join(root, fname.replace('.wav', '.wrd')) tclass = self.class_to_idx[target] item = (path, label, tclass) spects.append(item) self.data = spects<|docstring|>:param root_dir: :param yolo_config: dictionary that contain the require data for yolo (C, B, K)<|endoftext|>
2251cf9211e0219bbd7950e779b5de897420674c4ab917c64b86c61b8e3cc913
def __getitem__(self, idx): '\n :param idx:\n :return:\n ' features_path = self.data[idx][0] add_augment = False if self.augment: add_augment = utils.random_onoff() (features, dot_len, real_features_len, sr) = utils.spect_loader(features_path, max_len=101, augment=add_augment) target_path = self.data[idx][1] target = open(target_path, 'r').readlines() (_, num_features, features_wav_len) = features.shape '\n # the labels file, is file with few lines,\n # each line represents one item in the wav file and contains : start (in sr), end (in sr), class.\n # yolo needs more details\n # x - represent the center of the box relative to the bounds of the grid cell.\n # w - predicted relative to the whole wav.\n # iou - the confidence prediction represents the IOU between the predicted box and any ground truth box\n\n ' divide = (sr / features_wav_len) width_cell = ((1.0 * features_wav_len) / self.C) line_yolo_data = [] for line_str in target: line = line_str.replace('\t', '_').split(' ') feature_start = math.floor((float(line[0]) / divide)) feature_end = math.floor((float(line[1]) / divide)) object_width = (feature_end - feature_start) center_x = (feature_start + (object_width / 2.0)) cell_index = int((center_x / width_cell)) object_norm_x = ((float(center_x) / width_cell) - int((center_x / width_cell))) object_norm_w = (object_width / features_wav_len) class_label = line[2] object_class = self.class_to_idx[class_label] line_yolo_data.append([cell_index, object_norm_x, object_norm_w, object_class]) kwspotting_target = (torch.ones([self.K]) * (- 1)) target = torch.zeros([self.C, (((self.B * 3) + self.K) + 1)], dtype=torch.float32) for yolo_item in line_yolo_data: index = yolo_item[0] x = yolo_item[1] w = math.sqrt(yolo_item[2]) obj_class = yolo_item[3] target[(index, ((self.B * 3) + obj_class))] = 1 target[(index, (- 1))] = 1 for box in range(0, self.B): target[(index, ((box * 3) + 2))] = 1 target[(index, (box * 3))] = x target[(index, ((box * 3) + 1))] = w kwspotting_target[obj_class] = 1 return (features, target, features_path, kwspotting_target)
:param idx: :return:
Multitask/ImageAudioClassifier/Helper.py
__getitem__
nileshpd1211/SRIP-UCSD---Efficient-Deep-Networks
0
python
def __getitem__(self, idx): '\n :param idx:\n :return:\n ' features_path = self.data[idx][0] add_augment = False if self.augment: add_augment = utils.random_onoff() (features, dot_len, real_features_len, sr) = utils.spect_loader(features_path, max_len=101, augment=add_augment) target_path = self.data[idx][1] target = open(target_path, 'r').readlines() (_, num_features, features_wav_len) = features.shape '\n # the labels file, is file with few lines,\n # each line represents one item in the wav file and contains : start (in sr), end (in sr), class.\n # yolo needs more details\n # x - represent the center of the box relative to the bounds of the grid cell.\n # w - predicted relative to the whole wav.\n # iou - the confidence prediction represents the IOU between the predicted box and any ground truth box\n\n ' divide = (sr / features_wav_len) width_cell = ((1.0 * features_wav_len) / self.C) line_yolo_data = [] for line_str in target: line = line_str.replace('\t', '_').split(' ') feature_start = math.floor((float(line[0]) / divide)) feature_end = math.floor((float(line[1]) / divide)) object_width = (feature_end - feature_start) center_x = (feature_start + (object_width / 2.0)) cell_index = int((center_x / width_cell)) object_norm_x = ((float(center_x) / width_cell) - int((center_x / width_cell))) object_norm_w = (object_width / features_wav_len) class_label = line[2] object_class = self.class_to_idx[class_label] line_yolo_data.append([cell_index, object_norm_x, object_norm_w, object_class]) kwspotting_target = (torch.ones([self.K]) * (- 1)) target = torch.zeros([self.C, (((self.B * 3) + self.K) + 1)], dtype=torch.float32) for yolo_item in line_yolo_data: index = yolo_item[0] x = yolo_item[1] w = math.sqrt(yolo_item[2]) obj_class = yolo_item[3] target[(index, ((self.B * 3) + obj_class))] = 1 target[(index, (- 1))] = 1 for box in range(0, self.B): target[(index, ((box * 3) + 2))] = 1 target[(index, (box * 3))] = x target[(index, ((box * 3) + 1))] = w kwspotting_target[obj_class] = 1 return (features, target, features_path, kwspotting_target)
def __getitem__(self, idx): '\n :param idx:\n :return:\n ' features_path = self.data[idx][0] add_augment = False if self.augment: add_augment = utils.random_onoff() (features, dot_len, real_features_len, sr) = utils.spect_loader(features_path, max_len=101, augment=add_augment) target_path = self.data[idx][1] target = open(target_path, 'r').readlines() (_, num_features, features_wav_len) = features.shape '\n # the labels file, is file with few lines,\n # each line represents one item in the wav file and contains : start (in sr), end (in sr), class.\n # yolo needs more details\n # x - represent the center of the box relative to the bounds of the grid cell.\n # w - predicted relative to the whole wav.\n # iou - the confidence prediction represents the IOU between the predicted box and any ground truth box\n\n ' divide = (sr / features_wav_len) width_cell = ((1.0 * features_wav_len) / self.C) line_yolo_data = [] for line_str in target: line = line_str.replace('\t', '_').split(' ') feature_start = math.floor((float(line[0]) / divide)) feature_end = math.floor((float(line[1]) / divide)) object_width = (feature_end - feature_start) center_x = (feature_start + (object_width / 2.0)) cell_index = int((center_x / width_cell)) object_norm_x = ((float(center_x) / width_cell) - int((center_x / width_cell))) object_norm_w = (object_width / features_wav_len) class_label = line[2] object_class = self.class_to_idx[class_label] line_yolo_data.append([cell_index, object_norm_x, object_norm_w, object_class]) kwspotting_target = (torch.ones([self.K]) * (- 1)) target = torch.zeros([self.C, (((self.B * 3) + self.K) + 1)], dtype=torch.float32) for yolo_item in line_yolo_data: index = yolo_item[0] x = yolo_item[1] w = math.sqrt(yolo_item[2]) obj_class = yolo_item[3] target[(index, ((self.B * 3) + obj_class))] = 1 target[(index, (- 1))] = 1 for box in range(0, self.B): target[(index, ((box * 3) + 2))] = 1 target[(index, (box * 3))] = x target[(index, ((box * 3) + 1))] = w kwspotting_target[obj_class] = 1 return (features, target, features_path, kwspotting_target)<|docstring|>:param idx: :return:<|endoftext|>
b71335bbad624951a0b6757bf15e0ecd2d1cbf4ebd02e5de403f832d60394d49
def hIndex(self, citations): '\n :type citations: List[int]\n :rtype: int\n ' n = len(citations) (left, right) = (0, (n - 1)) while (left <= right): mid = (left + ((right - left) / 2)) if (citations[mid] >= (n - mid)): right = (mid - 1) else: left = (mid + 1) return (n - left)
:type citations: List[int] :rtype: int
tools/leetcode.275.H-Index II/leetcode.275.H-Index II.submission1.py
hIndex
tedye/leetcode
4
python
def hIndex(self, citations): '\n :type citations: List[int]\n :rtype: int\n ' n = len(citations) (left, right) = (0, (n - 1)) while (left <= right): mid = (left + ((right - left) / 2)) if (citations[mid] >= (n - mid)): right = (mid - 1) else: left = (mid + 1) return (n - left)
def hIndex(self, citations): '\n :type citations: List[int]\n :rtype: int\n ' n = len(citations) (left, right) = (0, (n - 1)) while (left <= right): mid = (left + ((right - left) / 2)) if (citations[mid] >= (n - mid)): right = (mid - 1) else: left = (mid + 1) return (n - left)<|docstring|>:type citations: List[int] :rtype: int<|endoftext|>
d240279e07d79914326ff5bc4417f2c7fa32f54eb5f39c1740e7f0eba3320d3a
def is_alien_sorted(self, a, x): '\n Determines whether input strings follow specified lexographic order.\n\n :param list[str] a: input array of ordered strings\n :param str x: string representing target lexographic order\n :return: True if input array is sorted by target lexographic order\n :rtype: bool\n ' if ((not a) or (not x)): return False d = {c: i for (i, c) in enumerate(x, 1)} d[''] = 0 n = (len(a) - 1) m = max((len(s) for s in a)) j = 0 while (j < m): for i in range(n, 0, (- 1)): if (j >= len(a[i])): u = '' else: u = a[i][j] if (j >= len(a[(i - 1)])): v = '' else: v = a[(i - 1)][j] if (d[u] > d[v]): a[i] = a[(i - 1)] elif (u == v): continue else: return False j += 1 return True
Determines whether input strings follow specified lexographic order. :param list[str] a: input array of ordered strings :param str x: string representing target lexographic order :return: True if input array is sorted by target lexographic order :rtype: bool
0953_verifying_alien_dictionary/python_source.py
is_alien_sorted
arthurdysart/LeetCode
0
python
def is_alien_sorted(self, a, x): '\n Determines whether input strings follow specified lexographic order.\n\n :param list[str] a: input array of ordered strings\n :param str x: string representing target lexographic order\n :return: True if input array is sorted by target lexographic order\n :rtype: bool\n ' if ((not a) or (not x)): return False d = {c: i for (i, c) in enumerate(x, 1)} d[] = 0 n = (len(a) - 1) m = max((len(s) for s in a)) j = 0 while (j < m): for i in range(n, 0, (- 1)): if (j >= len(a[i])): u = else: u = a[i][j] if (j >= len(a[(i - 1)])): v = else: v = a[(i - 1)][j] if (d[u] > d[v]): a[i] = a[(i - 1)] elif (u == v): continue else: return False j += 1 return True
def is_alien_sorted(self, a, x): '\n Determines whether input strings follow specified lexographic order.\n\n :param list[str] a: input array of ordered strings\n :param str x: string representing target lexographic order\n :return: True if input array is sorted by target lexographic order\n :rtype: bool\n ' if ((not a) or (not x)): return False d = {c: i for (i, c) in enumerate(x, 1)} d[] = 0 n = (len(a) - 1) m = max((len(s) for s in a)) j = 0 while (j < m): for i in range(n, 0, (- 1)): if (j >= len(a[i])): u = else: u = a[i][j] if (j >= len(a[(i - 1)])): v = else: v = a[(i - 1)][j] if (d[u] > d[v]): a[i] = a[(i - 1)] elif (u == v): continue else: return False j += 1 return True<|docstring|>Determines whether input strings follow specified lexographic order. :param list[str] a: input array of ordered strings :param str x: string representing target lexographic order :return: True if input array is sorted by target lexographic order :rtype: bool<|endoftext|>
8a4918fa56de7fc7952f0af094a56d509d17c7994152e56659752c54a5a213bd
def stdin(self, sys_stdin): '\n Imports standard input.\n\n :param _io.TextIOWrapper sys_stdin: standard input\n :return: input array of ordered strings and target string\n :rtype: tup[list[str], str]\n ' inputs = [x.strip('[]"\n') for x in sys_stdin] if (inputs[0] == ''): a = list() else: a = [str(x).strip('"') for x in inputs[0].split('","')] x = inputs[1] return (a, x)
Imports standard input. :param _io.TextIOWrapper sys_stdin: standard input :return: input array of ordered strings and target string :rtype: tup[list[str], str]
0953_verifying_alien_dictionary/python_source.py
stdin
arthurdysart/LeetCode
0
python
def stdin(self, sys_stdin): '\n Imports standard input.\n\n :param _io.TextIOWrapper sys_stdin: standard input\n :return: input array of ordered strings and target string\n :rtype: tup[list[str], str]\n ' inputs = [x.strip('[]"\n') for x in sys_stdin] if (inputs[0] == ): a = list() else: a = [str(x).strip('"') for x in inputs[0].split('","')] x = inputs[1] return (a, x)
def stdin(self, sys_stdin): '\n Imports standard input.\n\n :param _io.TextIOWrapper sys_stdin: standard input\n :return: input array of ordered strings and target string\n :rtype: tup[list[str], str]\n ' inputs = [x.strip('[]"\n') for x in sys_stdin] if (inputs[0] == ): a = list() else: a = [str(x).strip('"') for x in inputs[0].split('","')] x = inputs[1] return (a, x)<|docstring|>Imports standard input. :param _io.TextIOWrapper sys_stdin: standard input :return: input array of ordered strings and target string :rtype: tup[list[str], str]<|endoftext|>
77d0f790ef6024adf98538666e66cf5336cfea9f80fe28bfc3b50a08c1e3a9fe
def test_empty_dag(self): 'Empty DAG has empty counts.' circuit = QuantumCircuit() dag = circuit_to_dag(circuit) pass_ = CountOpsLongestPath() _ = pass_.run(dag) self.assertDictEqual(pass_.property_set['count_ops_longest_path'], {})
Empty DAG has empty counts.
test/python/transpiler/test_count_ops_longest_path_pass.py
test_empty_dag
ikkoham/qiskit-core
1,456
python
def test_empty_dag(self): circuit = QuantumCircuit() dag = circuit_to_dag(circuit) pass_ = CountOpsLongestPath() _ = pass_.run(dag) self.assertDictEqual(pass_.property_set['count_ops_longest_path'], {})
def test_empty_dag(self): circuit = QuantumCircuit() dag = circuit_to_dag(circuit) pass_ = CountOpsLongestPath() _ = pass_.run(dag) self.assertDictEqual(pass_.property_set['count_ops_longest_path'], {})<|docstring|>Empty DAG has empty counts.<|endoftext|>
22ab0475905474e90ced6c8c5b7291ffb9ec334cf3604f96d37cf1819b9392d1
def test_just_qubits(self): 'A dag with 9 operations (3 CXs, 2Xs, 2Ys and 2 Hs) on the longest\n path\n ' qr = QuantumRegister(2) circuit = QuantumCircuit(qr) circuit.cx(qr[0], qr[1]) circuit.x(qr[0]) circuit.y(qr[0]) circuit.h(qr[0]) circuit.cx(qr[0], qr[1]) circuit.x(qr[1]) circuit.y(qr[1]) circuit.h(qr[1]) circuit.cx(qr[0], qr[1]) dag = circuit_to_dag(circuit) pass_ = CountOpsLongestPath() _ = pass_.run(dag) count_ops = pass_.property_set['count_ops_longest_path'] self.assertDictEqual(count_ops, {'cx': 3, 'x': 2, 'y': 2, 'h': 2})
A dag with 9 operations (3 CXs, 2Xs, 2Ys and 2 Hs) on the longest path
test/python/transpiler/test_count_ops_longest_path_pass.py
test_just_qubits
ikkoham/qiskit-core
1,456
python
def test_just_qubits(self): 'A dag with 9 operations (3 CXs, 2Xs, 2Ys and 2 Hs) on the longest\n path\n ' qr = QuantumRegister(2) circuit = QuantumCircuit(qr) circuit.cx(qr[0], qr[1]) circuit.x(qr[0]) circuit.y(qr[0]) circuit.h(qr[0]) circuit.cx(qr[0], qr[1]) circuit.x(qr[1]) circuit.y(qr[1]) circuit.h(qr[1]) circuit.cx(qr[0], qr[1]) dag = circuit_to_dag(circuit) pass_ = CountOpsLongestPath() _ = pass_.run(dag) count_ops = pass_.property_set['count_ops_longest_path'] self.assertDictEqual(count_ops, {'cx': 3, 'x': 2, 'y': 2, 'h': 2})
def test_just_qubits(self): 'A dag with 9 operations (3 CXs, 2Xs, 2Ys and 2 Hs) on the longest\n path\n ' qr = QuantumRegister(2) circuit = QuantumCircuit(qr) circuit.cx(qr[0], qr[1]) circuit.x(qr[0]) circuit.y(qr[0]) circuit.h(qr[0]) circuit.cx(qr[0], qr[1]) circuit.x(qr[1]) circuit.y(qr[1]) circuit.h(qr[1]) circuit.cx(qr[0], qr[1]) dag = circuit_to_dag(circuit) pass_ = CountOpsLongestPath() _ = pass_.run(dag) count_ops = pass_.property_set['count_ops_longest_path'] self.assertDictEqual(count_ops, {'cx': 3, 'x': 2, 'y': 2, 'h': 2})<|docstring|>A dag with 9 operations (3 CXs, 2Xs, 2Ys and 2 Hs) on the longest path<|endoftext|>
82a1f146eaf101a08df80a614ffdd02e2a64a51ae7616d3478a392ea8c08ad80
@staticmethod def add_args(arg_parser: argparse.ArgumentParser) -> None: "\n Override this to add arguments to the CLI argument parser. These args\n will show up when the user invokes ``libcst.tool codemod`` with\n ``--help``. They will also be presented to your class's ``__init__``\n method. So, if you define a command with an argument 'foo', you should also\n have a corresponding 'foo' positional or keyword argument in your\n class's ``__init__`` method.\n " pass
Override this to add arguments to the CLI argument parser. These args will show up when the user invokes ``libcst.tool codemod`` with ``--help``. They will also be presented to your class's ``__init__`` method. So, if you define a command with an argument 'foo', you should also have a corresponding 'foo' positional or keyword argument in your class's ``__init__`` method.
libcst/codemod/_command.py
add_args
Instagram/LibCST
880
python
@staticmethod def add_args(arg_parser: argparse.ArgumentParser) -> None: "\n Override this to add arguments to the CLI argument parser. These args\n will show up when the user invokes ``libcst.tool codemod`` with\n ``--help``. They will also be presented to your class's ``__init__``\n method. So, if you define a command with an argument 'foo', you should also\n have a corresponding 'foo' positional or keyword argument in your\n class's ``__init__`` method.\n " pass
@staticmethod def add_args(arg_parser: argparse.ArgumentParser) -> None: "\n Override this to add arguments to the CLI argument parser. These args\n will show up when the user invokes ``libcst.tool codemod`` with\n ``--help``. They will also be presented to your class's ``__init__``\n method. So, if you define a command with an argument 'foo', you should also\n have a corresponding 'foo' positional or keyword argument in your\n class's ``__init__`` method.\n " pass<|docstring|>Override this to add arguments to the CLI argument parser. These args will show up when the user invokes ``libcst.tool codemod`` with ``--help``. They will also be presented to your class's ``__init__`` method. So, if you define a command with an argument 'foo', you should also have a corresponding 'foo' positional or keyword argument in your class's ``__init__`` method.<|endoftext|>
180f5fe5fb058f75111ecb2593a0ba537116919fc788ded2d7561772987014b8
@abstractmethod def transform_module_impl(self, tree: Module) -> Module: '\n Override this with your transform. You should take in the tree, optionally\n mutate it and then return the mutated version. The module reference and all\n calculated metadata are available for the lifetime of this function.\n ' ...
Override this with your transform. You should take in the tree, optionally mutate it and then return the mutated version. The module reference and all calculated metadata are available for the lifetime of this function.
libcst/codemod/_command.py
transform_module_impl
Instagram/LibCST
880
python
@abstractmethod def transform_module_impl(self, tree: Module) -> Module: '\n Override this with your transform. You should take in the tree, optionally\n mutate it and then return the mutated version. The module reference and all\n calculated metadata are available for the lifetime of this function.\n ' ...
@abstractmethod def transform_module_impl(self, tree: Module) -> Module: '\n Override this with your transform. You should take in the tree, optionally\n mutate it and then return the mutated version. The module reference and all\n calculated metadata are available for the lifetime of this function.\n ' ...<|docstring|>Override this with your transform. You should take in the tree, optionally mutate it and then return the mutated version. The module reference and all calculated metadata are available for the lifetime of this function.<|endoftext|>
085b981ec8b43303f55ea9740aa6d6d6dabdcd498361d21424514019017cfe05
@abstractmethod def get_transforms(self) -> Generator[(Type[Codemod], None, None)]: '\n A generator which yields one or more subclasses of\n :class:`~libcst.codemod.Codemod`. In the general case, you will usually\n yield a series of classes, but it is possible to programmatically decide\n which classes to yield depending on the contents of the context\n :attr:`~libcst.codemod.CodemodContext.scratch`.\n\n Note that you should yield classes, not instances of classes, as the\n point of :class:`~libcst.codemod.MagicArgsCodemodCommand` is to\n instantiate them for you with the contents of\n :attr:`~libcst.codemod.CodemodContext.scratch`.\n ' ...
A generator which yields one or more subclasses of :class:`~libcst.codemod.Codemod`. In the general case, you will usually yield a series of classes, but it is possible to programmatically decide which classes to yield depending on the contents of the context :attr:`~libcst.codemod.CodemodContext.scratch`. Note that you should yield classes, not instances of classes, as the point of :class:`~libcst.codemod.MagicArgsCodemodCommand` is to instantiate them for you with the contents of :attr:`~libcst.codemod.CodemodContext.scratch`.
libcst/codemod/_command.py
get_transforms
Instagram/LibCST
880
python
@abstractmethod def get_transforms(self) -> Generator[(Type[Codemod], None, None)]: '\n A generator which yields one or more subclasses of\n :class:`~libcst.codemod.Codemod`. In the general case, you will usually\n yield a series of classes, but it is possible to programmatically decide\n which classes to yield depending on the contents of the context\n :attr:`~libcst.codemod.CodemodContext.scratch`.\n\n Note that you should yield classes, not instances of classes, as the\n point of :class:`~libcst.codemod.MagicArgsCodemodCommand` is to\n instantiate them for you with the contents of\n :attr:`~libcst.codemod.CodemodContext.scratch`.\n ' ...
@abstractmethod def get_transforms(self) -> Generator[(Type[Codemod], None, None)]: '\n A generator which yields one or more subclasses of\n :class:`~libcst.codemod.Codemod`. In the general case, you will usually\n yield a series of classes, but it is possible to programmatically decide\n which classes to yield depending on the contents of the context\n :attr:`~libcst.codemod.CodemodContext.scratch`.\n\n Note that you should yield classes, not instances of classes, as the\n point of :class:`~libcst.codemod.MagicArgsCodemodCommand` is to\n instantiate them for you with the contents of\n :attr:`~libcst.codemod.CodemodContext.scratch`.\n ' ...<|docstring|>A generator which yields one or more subclasses of :class:`~libcst.codemod.Codemod`. In the general case, you will usually yield a series of classes, but it is possible to programmatically decide which classes to yield depending on the contents of the context :attr:`~libcst.codemod.CodemodContext.scratch`. Note that you should yield classes, not instances of classes, as the point of :class:`~libcst.codemod.MagicArgsCodemodCommand` is to instantiate them for you with the contents of :attr:`~libcst.codemod.CodemodContext.scratch`.<|endoftext|>
c4395fbe5ef6cc471595ca298803691eea6ed88bec1ab6db552f028ffe9e9705
@app.http_get('/api/architectures') @app.authenticated async def get_architectures(request): '\n Gets a list of all architectures from\n the database.\n\n ---\n description: Returns a list of architectures\n tags:\n - Architectures\n produces:\n - text/json\n responses:\n "200":\n description: successful\n "500":\n description: internal server error\n ' query = request.cirrina.db_session.query(Architecture) data = {'total_result_count': query.count(), 'results': []} for architecture in query.filter((Architecture.name != 'all')).all(): data['results'].append({'id': architecture.id, 'name': architecture.name}) return web.json_response(data)
Gets a list of all architectures from the database. --- description: Returns a list of architectures tags: - Architectures produces: - text/json responses: "200": description: successful "500": description: internal server error
molior/api/architecture.py
get_architectures
randombenj/molior
0
python
@app.http_get('/api/architectures') @app.authenticated async def get_architectures(request): '\n Gets a list of all architectures from\n the database.\n\n ---\n description: Returns a list of architectures\n tags:\n - Architectures\n produces:\n - text/json\n responses:\n "200":\n description: successful\n "500":\n description: internal server error\n ' query = request.cirrina.db_session.query(Architecture) data = {'total_result_count': query.count(), 'results': []} for architecture in query.filter((Architecture.name != 'all')).all(): data['results'].append({'id': architecture.id, 'name': architecture.name}) return web.json_response(data)
@app.http_get('/api/architectures') @app.authenticated async def get_architectures(request): '\n Gets a list of all architectures from\n the database.\n\n ---\n description: Returns a list of architectures\n tags:\n - Architectures\n produces:\n - text/json\n responses:\n "200":\n description: successful\n "500":\n description: internal server error\n ' query = request.cirrina.db_session.query(Architecture) data = {'total_result_count': query.count(), 'results': []} for architecture in query.filter((Architecture.name != 'all')).all(): data['results'].append({'id': architecture.id, 'name': architecture.name}) return web.json_response(data)<|docstring|>Gets a list of all architectures from the database. --- description: Returns a list of architectures tags: - Architectures produces: - text/json responses: "200": description: successful "500": description: internal server error<|endoftext|>
dcf375c490e7c5191f9b9da8cafa661abe6c9112b4a2b6d71f6a911ed708d6ad
def _showvid(video_name, from_frame=0): '\n HELPER FUNCTION\n Navigate through video frame by frame from a certain frame.\n\n :param video_name: videopath\n :param from_frame: show vid from this frame\n :return: nothing\n ' cap = cv2.VideoCapture(str(video_name)) cap.set(1, from_frame) for i in range(from_frame, (from_frame + 999999)): (ret, frame) = cap.read() if (not ret): print('Grab frame unsuccessful. ABORT MISSION!') break cv2.imshow(('frame: ' + str(i)), frame) key = cv2.waitKey() if (key == ord('q')): break cv2.destroyAllWindows() cv2.destroyAllWindows() cv2.destroyAllWindows()
HELPER FUNCTION Navigate through video frame by frame from a certain frame. :param video_name: videopath :param from_frame: show vid from this frame :return: nothing
PredictFeatures/OrientationLabeler.py
_showvid
Iglohut/autoscore_3d
0
python
def _showvid(video_name, from_frame=0): '\n HELPER FUNCTION\n Navigate through video frame by frame from a certain frame.\n\n :param video_name: videopath\n :param from_frame: show vid from this frame\n :return: nothing\n ' cap = cv2.VideoCapture(str(video_name)) cap.set(1, from_frame) for i in range(from_frame, (from_frame + 999999)): (ret, frame) = cap.read() if (not ret): print('Grab frame unsuccessful. ABORT MISSION!') break cv2.imshow(('frame: ' + str(i)), frame) key = cv2.waitKey() if (key == ord('q')): break cv2.destroyAllWindows() cv2.destroyAllWindows() cv2.destroyAllWindows()
def _showvid(video_name, from_frame=0): '\n HELPER FUNCTION\n Navigate through video frame by frame from a certain frame.\n\n :param video_name: videopath\n :param from_frame: show vid from this frame\n :return: nothing\n ' cap = cv2.VideoCapture(str(video_name)) cap.set(1, from_frame) for i in range(from_frame, (from_frame + 999999)): (ret, frame) = cap.read() if (not ret): print('Grab frame unsuccessful. ABORT MISSION!') break cv2.imshow(('frame: ' + str(i)), frame) key = cv2.waitKey() if (key == ord('q')): break cv2.destroyAllWindows() cv2.destroyAllWindows() cv2.destroyAllWindows()<|docstring|>HELPER FUNCTION Navigate through video frame by frame from a certain frame. :param video_name: videopath :param from_frame: show vid from this frame :return: nothing<|endoftext|>
9e369e65be931b91d23a29a3edeff0c7c0a6096f6582921674fbd8d462592ad5
def _vidlength(video_name): '\n\n :param video_name: path to video\n :return: number of frames in video\n ' cap = cv2.VideoCapture(str(video_name)) property_id = int(cv2.CAP_PROP_FRAME_COUNT) length = int(cv2.VideoCapture.get(cap, property_id)) return length
:param video_name: path to video :return: number of frames in video
PredictFeatures/OrientationLabeler.py
_vidlength
Iglohut/autoscore_3d
0
python
def _vidlength(video_name): '\n\n :param video_name: path to video\n :return: number of frames in video\n ' cap = cv2.VideoCapture(str(video_name)) property_id = int(cv2.CAP_PROP_FRAME_COUNT) length = int(cv2.VideoCapture.get(cap, property_id)) return length
def _vidlength(video_name): '\n\n :param video_name: path to video\n :return: number of frames in video\n ' cap = cv2.VideoCapture(str(video_name)) property_id = int(cv2.CAP_PROP_FRAME_COUNT) length = int(cv2.VideoCapture.get(cap, property_id)) return length<|docstring|>:param video_name: path to video :return: number of frames in video<|endoftext|>
dec98e363ba80c4fc077be997195ccf59efa8720611dcebb02e270464ee71a2c
def myKalman(measurements): '\n This Kalman smoothes a timeseries of (x, y) coordinates for a single limb.\n :param measurements: coordinate tuple list [(x1, y1), ..., (xn, yx)]\n :return: kalman smoothed coordinates list: [[x]], [y]]\n ' initial_state_mean = [measurements[(0, 0)], 0, measurements[(0, 1)], 0] transition_matrix = [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]] observation_matrix = [[1, 0, 0, 0], [0, 0, 1, 0]] kf1 = KalmanFilter(transition_matrices=transition_matrix, observation_matrices=observation_matrix, initial_state_mean=initial_state_mean) kf1 = kf1.em(measurements, n_iter=5) (smoothed_state_means, smoothed_state_covariances) = kf1.smooth(measurements) return [smoothed_state_means[(:, 0)], smoothed_state_means[(:, 2)]]
This Kalman smoothes a timeseries of (x, y) coordinates for a single limb. :param measurements: coordinate tuple list [(x1, y1), ..., (xn, yx)] :return: kalman smoothed coordinates list: [[x]], [y]]
PredictFeatures/OrientationLabeler.py
myKalman
Iglohut/autoscore_3d
0
python
def myKalman(measurements): '\n This Kalman smoothes a timeseries of (x, y) coordinates for a single limb.\n :param measurements: coordinate tuple list [(x1, y1), ..., (xn, yx)]\n :return: kalman smoothed coordinates list: [[x]], [y]]\n ' initial_state_mean = [measurements[(0, 0)], 0, measurements[(0, 1)], 0] transition_matrix = [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]] observation_matrix = [[1, 0, 0, 0], [0, 0, 1, 0]] kf1 = KalmanFilter(transition_matrices=transition_matrix, observation_matrices=observation_matrix, initial_state_mean=initial_state_mean) kf1 = kf1.em(measurements, n_iter=5) (smoothed_state_means, smoothed_state_covariances) = kf1.smooth(measurements) return [smoothed_state_means[(:, 0)], smoothed_state_means[(:, 2)]]
def myKalman(measurements): '\n This Kalman smoothes a timeseries of (x, y) coordinates for a single limb.\n :param measurements: coordinate tuple list [(x1, y1), ..., (xn, yx)]\n :return: kalman smoothed coordinates list: [[x]], [y]]\n ' initial_state_mean = [measurements[(0, 0)], 0, measurements[(0, 1)], 0] transition_matrix = [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]] observation_matrix = [[1, 0, 0, 0], [0, 0, 1, 0]] kf1 = KalmanFilter(transition_matrices=transition_matrix, observation_matrices=observation_matrix, initial_state_mean=initial_state_mean) kf1 = kf1.em(measurements, n_iter=5) (smoothed_state_means, smoothed_state_covariances) = kf1.smooth(measurements) return [smoothed_state_means[(:, 0)], smoothed_state_means[(:, 2)]]<|docstring|>This Kalman smoothes a timeseries of (x, y) coordinates for a single limb. :param measurements: coordinate tuple list [(x1, y1), ..., (xn, yx)] :return: kalman smoothed coordinates list: [[x]], [y]]<|endoftext|>
2227b6f4d1fb6fc20d0ac56e3aec3cbd5e16b044cf89728da70b1d6b2d31048b
def kalman_df(df): '\n Doe skalman smoothing on all bodyparts of df\n :param df: DataFrame with only bodyparts (no HD yet!)\n :return: smoothed df\n ' cols = df.columns.values.tolist()[1:] cols = [col for col in cols if ('likelihood' not in col)] for idx in range(0, len(cols), 2): bodylimb_x = cols[idx] bodylimb_y = cols[(idx + 1)] measurements = np.asarray(list(zip(list(df[bodylimb_x]), list(df[bodylimb_y])))) estimated_measurements = myKalman(measurements) estimated_x = estimated_measurements[0] estimated_y = estimated_measurements[1] df[bodylimb_x] = estimated_x df[bodylimb_y] = estimated_y return df
Doe skalman smoothing on all bodyparts of df :param df: DataFrame with only bodyparts (no HD yet!) :return: smoothed df
PredictFeatures/OrientationLabeler.py
kalman_df
Iglohut/autoscore_3d
0
python
def kalman_df(df): '\n Doe skalman smoothing on all bodyparts of df\n :param df: DataFrame with only bodyparts (no HD yet!)\n :return: smoothed df\n ' cols = df.columns.values.tolist()[1:] cols = [col for col in cols if ('likelihood' not in col)] for idx in range(0, len(cols), 2): bodylimb_x = cols[idx] bodylimb_y = cols[(idx + 1)] measurements = np.asarray(list(zip(list(df[bodylimb_x]), list(df[bodylimb_y])))) estimated_measurements = myKalman(measurements) estimated_x = estimated_measurements[0] estimated_y = estimated_measurements[1] df[bodylimb_x] = estimated_x df[bodylimb_y] = estimated_y return df
def kalman_df(df): '\n Doe skalman smoothing on all bodyparts of df\n :param df: DataFrame with only bodyparts (no HD yet!)\n :return: smoothed df\n ' cols = df.columns.values.tolist()[1:] cols = [col for col in cols if ('likelihood' not in col)] for idx in range(0, len(cols), 2): bodylimb_x = cols[idx] bodylimb_y = cols[(idx + 1)] measurements = np.asarray(list(zip(list(df[bodylimb_x]), list(df[bodylimb_y])))) estimated_measurements = myKalman(measurements) estimated_x = estimated_measurements[0] estimated_y = estimated_measurements[1] df[bodylimb_x] = estimated_x df[bodylimb_y] = estimated_y return df<|docstring|>Doe skalman smoothing on all bodyparts of df :param df: DataFrame with only bodyparts (no HD yet!) :return: smoothed df<|endoftext|>
6203267fdea6a1c652fd9f304bc6a6b5893415fe605a68363695239b6c6db77b
@MAX_API.doc('predict') @MAX_API.expect(input_parser) def post(self): 'Generate audio embedding from input data' result = {'status': 'error'} true_start = time.time() args = input_parser.parse_args() if ((args['audio'] is None) and (args['url'] is None)): e = BadRequest() e.data = {'status': 'error', 'message': 'Need to provide either an audio or url argument'} raise e audio_data = {} uuid_map = {} if (args['url'] is not None): url_splt = args['url'].split(',') for url in url_splt: audio_data[url] = urllib.request.urlopen(url).read() else: audio_data[args['audio'].filename] = args['audio'].read() print(f'audio_data: {audio_data.keys()}') for filestring in audio_data.keys(): uuid_map[filestring] = uuid.uuid1() if ('mp3' in filestring): print(f'Creating file: /{uuid_map[filestring]}.mp3') file = open(f'/{uuid_map[filestring]}.mp3', 'wb+') file.write(audio_data[filestring]) file.close() elif ('wav' in filestring): print(f'Creating file: /{uuid_map[filestring]}.wav') file = open(f'/{uuid_map[filestring]}.wav', 'wb+') file.write(audio_data[filestring]) file.close() else: e = BadRequest() e.data = {'status': 'error', 'message': 'Invalid file type/extension'} raise e start = time.time() commands = [(f'ffmpeg -i /{uuid_map[x]}.mp3 /{uuid_map[x]}.wav' if ('mp3' in x) else '') for x in uuid_map.keys()] threads = [] for command in commands: if (command != ''): print(f' Running command: {command}') threads.append(threading.Thread(target=run_sys, args=(command,))) for thread in threads: thread.start() for thread in threads: thread.join() print(f'Converted mp3 files in {(time.time() - start)}s') start = time.time() for filestring in uuid_map.keys(): audio_data[filestring] = open(f'/{uuid_map[filestring]}.wav', 'rb').read() os.remove(f'/{uuid_map[filestring]}.wav') if ('mp3' in filestring): os.remove(f'/{uuid_map[filestring]}.mp3') print(f'Deleted files in {(time.time() - start)}s') res = {} threads = [] for filestring in audio_data.keys(): threads.append(threading.Thread(target=run_model, args=(self.model_wrapper.predict, filestring, audio_data[filestring], res))) for thread in threads: thread.start() for thread in threads: thread.join() result['embedding'] = res result['status'] = 'ok' print(f'Completed processing in {(time.time() - true_start)}s') return result
Generate audio embedding from input data
api/predict.py
post
dad9489/MAX-Audio-Embedding-Generator
0
python
@MAX_API.doc('predict') @MAX_API.expect(input_parser) def post(self): result = {'status': 'error'} true_start = time.time() args = input_parser.parse_args() if ((args['audio'] is None) and (args['url'] is None)): e = BadRequest() e.data = {'status': 'error', 'message': 'Need to provide either an audio or url argument'} raise e audio_data = {} uuid_map = {} if (args['url'] is not None): url_splt = args['url'].split(',') for url in url_splt: audio_data[url] = urllib.request.urlopen(url).read() else: audio_data[args['audio'].filename] = args['audio'].read() print(f'audio_data: {audio_data.keys()}') for filestring in audio_data.keys(): uuid_map[filestring] = uuid.uuid1() if ('mp3' in filestring): print(f'Creating file: /{uuid_map[filestring]}.mp3') file = open(f'/{uuid_map[filestring]}.mp3', 'wb+') file.write(audio_data[filestring]) file.close() elif ('wav' in filestring): print(f'Creating file: /{uuid_map[filestring]}.wav') file = open(f'/{uuid_map[filestring]}.wav', 'wb+') file.write(audio_data[filestring]) file.close() else: e = BadRequest() e.data = {'status': 'error', 'message': 'Invalid file type/extension'} raise e start = time.time() commands = [(f'ffmpeg -i /{uuid_map[x]}.mp3 /{uuid_map[x]}.wav' if ('mp3' in x) else ) for x in uuid_map.keys()] threads = [] for command in commands: if (command != ): print(f' Running command: {command}') threads.append(threading.Thread(target=run_sys, args=(command,))) for thread in threads: thread.start() for thread in threads: thread.join() print(f'Converted mp3 files in {(time.time() - start)}s') start = time.time() for filestring in uuid_map.keys(): audio_data[filestring] = open(f'/{uuid_map[filestring]}.wav', 'rb').read() os.remove(f'/{uuid_map[filestring]}.wav') if ('mp3' in filestring): os.remove(f'/{uuid_map[filestring]}.mp3') print(f'Deleted files in {(time.time() - start)}s') res = {} threads = [] for filestring in audio_data.keys(): threads.append(threading.Thread(target=run_model, args=(self.model_wrapper.predict, filestring, audio_data[filestring], res))) for thread in threads: thread.start() for thread in threads: thread.join() result['embedding'] = res result['status'] = 'ok' print(f'Completed processing in {(time.time() - true_start)}s') return result
@MAX_API.doc('predict') @MAX_API.expect(input_parser) def post(self): result = {'status': 'error'} true_start = time.time() args = input_parser.parse_args() if ((args['audio'] is None) and (args['url'] is None)): e = BadRequest() e.data = {'status': 'error', 'message': 'Need to provide either an audio or url argument'} raise e audio_data = {} uuid_map = {} if (args['url'] is not None): url_splt = args['url'].split(',') for url in url_splt: audio_data[url] = urllib.request.urlopen(url).read() else: audio_data[args['audio'].filename] = args['audio'].read() print(f'audio_data: {audio_data.keys()}') for filestring in audio_data.keys(): uuid_map[filestring] = uuid.uuid1() if ('mp3' in filestring): print(f'Creating file: /{uuid_map[filestring]}.mp3') file = open(f'/{uuid_map[filestring]}.mp3', 'wb+') file.write(audio_data[filestring]) file.close() elif ('wav' in filestring): print(f'Creating file: /{uuid_map[filestring]}.wav') file = open(f'/{uuid_map[filestring]}.wav', 'wb+') file.write(audio_data[filestring]) file.close() else: e = BadRequest() e.data = {'status': 'error', 'message': 'Invalid file type/extension'} raise e start = time.time() commands = [(f'ffmpeg -i /{uuid_map[x]}.mp3 /{uuid_map[x]}.wav' if ('mp3' in x) else ) for x in uuid_map.keys()] threads = [] for command in commands: if (command != ): print(f' Running command: {command}') threads.append(threading.Thread(target=run_sys, args=(command,))) for thread in threads: thread.start() for thread in threads: thread.join() print(f'Converted mp3 files in {(time.time() - start)}s') start = time.time() for filestring in uuid_map.keys(): audio_data[filestring] = open(f'/{uuid_map[filestring]}.wav', 'rb').read() os.remove(f'/{uuid_map[filestring]}.wav') if ('mp3' in filestring): os.remove(f'/{uuid_map[filestring]}.mp3') print(f'Deleted files in {(time.time() - start)}s') res = {} threads = [] for filestring in audio_data.keys(): threads.append(threading.Thread(target=run_model, args=(self.model_wrapper.predict, filestring, audio_data[filestring], res))) for thread in threads: thread.start() for thread in threads: thread.join() result['embedding'] = res result['status'] = 'ok' print(f'Completed processing in {(time.time() - true_start)}s') return result<|docstring|>Generate audio embedding from input data<|endoftext|>
566995742683f5de5548e42f1f4836c041fad16e6bf20e877bc12523313fe440
def __init__(self, laser_int): "\n\t\tInitialize class. \n\t\t\n\t\tParameters\n\t\t----------\n\t\tlaser_int: str\n\t\t\tlaser intensity (in filename). e.g. `_0.5mW' \n\t\t\t\n\t\t" self.dirs_to_plot = None self.data = None self.genotypes = None self.laser_int = laser_int
Initialize class. Parameters ---------- laser_int: str laser intensity (in filename). e.g. `_0.5mW'
analysis/plot_fwd_pcts.py
__init__
faymanns/Qbio_2018
0
python
def __init__(self, laser_int): "\n\t\tInitialize class. \n\t\t\n\t\tParameters\n\t\t----------\n\t\tlaser_int: str\n\t\t\tlaser intensity (in filename). e.g. `_0.5mW' \n\t\t\t\n\t\t" self.dirs_to_plot = None self.data = None self.genotypes = None self.laser_int = laser_int
def __init__(self, laser_int): "\n\t\tInitialize class. \n\t\t\n\t\tParameters\n\t\t----------\n\t\tlaser_int: str\n\t\t\tlaser intensity (in filename). e.g. `_0.5mW' \n\t\t\t\n\t\t" self.dirs_to_plot = None self.data = None self.genotypes = None self.laser_int = laser_int<|docstring|>Initialize class. Parameters ---------- laser_int: str laser intensity (in filename). e.g. `_0.5mW'<|endoftext|>
7a1c493ccce05b259b9f49525928ef640133598a2b000d6b1cc3436ccfcff9e0
def get_all_dirs(self, in_dir): '\n\t\tGet all directories in the analysis output directory corresponding \n\t\tto the desired genotype. All directories containing genotype will\n\t\tbe appended to dirs_to_plot.\n\t\t\n\t\tParameters\n\t\t----------\n\t\tin_dir: str\n\t\t\tanalysis directory.\n\t\t\n\t\t' if (not os.path.isdir(in_dir)): print(('%s does not exist!' % in_dir)) quit() full_dir = os.path.join(in_dir, '_centroid') all_dirs = next(os.walk(full_dir))[1] self.dirs_to_plot = [] self.genotypes = [] for dir in all_dirs: if (self.laser_int in dir): self.dirs_to_plot.append(os.path.join(full_dir, dir)) self.genotypes.append(('%s' % dir.replace(self.laser_int, ''))) self.genotypes = sp.array(self.genotypes, dtype='object')
Get all directories in the analysis output directory corresponding to the desired genotype. All directories containing genotype will be appended to dirs_to_plot. Parameters ---------- in_dir: str analysis directory.
analysis/plot_fwd_pcts.py
get_all_dirs
faymanns/Qbio_2018
0
python
def get_all_dirs(self, in_dir): '\n\t\tGet all directories in the analysis output directory corresponding \n\t\tto the desired genotype. All directories containing genotype will\n\t\tbe appended to dirs_to_plot.\n\t\t\n\t\tParameters\n\t\t----------\n\t\tin_dir: str\n\t\t\tanalysis directory.\n\t\t\n\t\t' if (not os.path.isdir(in_dir)): print(('%s does not exist!' % in_dir)) quit() full_dir = os.path.join(in_dir, '_centroid') all_dirs = next(os.walk(full_dir))[1] self.dirs_to_plot = [] self.genotypes = [] for dir in all_dirs: if (self.laser_int in dir): self.dirs_to_plot.append(os.path.join(full_dir, dir)) self.genotypes.append(('%s' % dir.replace(self.laser_int, ))) self.genotypes = sp.array(self.genotypes, dtype='object')
def get_all_dirs(self, in_dir): '\n\t\tGet all directories in the analysis output directory corresponding \n\t\tto the desired genotype. All directories containing genotype will\n\t\tbe appended to dirs_to_plot.\n\t\t\n\t\tParameters\n\t\t----------\n\t\tin_dir: str\n\t\t\tanalysis directory.\n\t\t\n\t\t' if (not os.path.isdir(in_dir)): print(('%s does not exist!' % in_dir)) quit() full_dir = os.path.join(in_dir, '_centroid') all_dirs = next(os.walk(full_dir))[1] self.dirs_to_plot = [] self.genotypes = [] for dir in all_dirs: if (self.laser_int in dir): self.dirs_to_plot.append(os.path.join(full_dir, dir)) self.genotypes.append(('%s' % dir.replace(self.laser_int, ))) self.genotypes = sp.array(self.genotypes, dtype='object')<|docstring|>Get all directories in the analysis output directory corresponding to the desired genotype. All directories containing genotype will be appended to dirs_to_plot. Parameters ---------- in_dir: str analysis directory.<|endoftext|>
86de5e1aa6bdf8bc5a235a335d96e9d0d7f2894efa12f474c37040f070eb11a3
def plot_pct_fwds(self): '\n\t\tPlot the pct of forward motion for each genotype.\n\t\t' Nn = len(self.dirs_to_plot) for (iD, dir) in enumerate(self.dirs_to_plot): filename = os.path.join(dir, 'pct_fwd.txt') tmp_data = sp.loadtxt(filename) if (self.data is None): self.data = sp.zeros((Nn, len(tmp_data))) self.data[(iD, :)] = tmp_data self.avgs = (sp.average(self.data, axis=1) * 100) self.stds = (sp.std(self.data, axis=1) * 100) sort_idxs = sp.argsort(self.avgs)[::(- 1)] if (sort_idxs[0] != 0): zero_idx = sp.argwhere((sort_idxs == 0))[0] change_idx = sort_idxs[zero_idx] sort_idxs[zero_idx] = sort_idxs[0] sort_idxs[0] = 0 sort_labels = self.genotypes[sort_idxs] sort_avgs = self.avgs[sort_idxs] sort_stds = self.stds[sort_idxs] fig = plt.figure() fig.set_size_inches(3, 4) plt.errorbar(range(Nn), sort_avgs, sort_stds, lw=0, elinewidth=1.5, capsize=5, color='k') plt.scatter(range(Nn), sort_avgs, c=sp.arange(Nn), cmap=plt.cm.winter, zorder=100, s=30) plt.ylim(0, 105) plt.xticks(rotation=90) plt.xticks(range(Nn), sort_labels)
Plot the pct of forward motion for each genotype.
analysis/plot_fwd_pcts.py
plot_pct_fwds
faymanns/Qbio_2018
0
python
def plot_pct_fwds(self): '\n\t\t\n\t\t' Nn = len(self.dirs_to_plot) for (iD, dir) in enumerate(self.dirs_to_plot): filename = os.path.join(dir, 'pct_fwd.txt') tmp_data = sp.loadtxt(filename) if (self.data is None): self.data = sp.zeros((Nn, len(tmp_data))) self.data[(iD, :)] = tmp_data self.avgs = (sp.average(self.data, axis=1) * 100) self.stds = (sp.std(self.data, axis=1) * 100) sort_idxs = sp.argsort(self.avgs)[::(- 1)] if (sort_idxs[0] != 0): zero_idx = sp.argwhere((sort_idxs == 0))[0] change_idx = sort_idxs[zero_idx] sort_idxs[zero_idx] = sort_idxs[0] sort_idxs[0] = 0 sort_labels = self.genotypes[sort_idxs] sort_avgs = self.avgs[sort_idxs] sort_stds = self.stds[sort_idxs] fig = plt.figure() fig.set_size_inches(3, 4) plt.errorbar(range(Nn), sort_avgs, sort_stds, lw=0, elinewidth=1.5, capsize=5, color='k') plt.scatter(range(Nn), sort_avgs, c=sp.arange(Nn), cmap=plt.cm.winter, zorder=100, s=30) plt.ylim(0, 105) plt.xticks(rotation=90) plt.xticks(range(Nn), sort_labels)
def plot_pct_fwds(self): '\n\t\t\n\t\t' Nn = len(self.dirs_to_plot) for (iD, dir) in enumerate(self.dirs_to_plot): filename = os.path.join(dir, 'pct_fwd.txt') tmp_data = sp.loadtxt(filename) if (self.data is None): self.data = sp.zeros((Nn, len(tmp_data))) self.data[(iD, :)] = tmp_data self.avgs = (sp.average(self.data, axis=1) * 100) self.stds = (sp.std(self.data, axis=1) * 100) sort_idxs = sp.argsort(self.avgs)[::(- 1)] if (sort_idxs[0] != 0): zero_idx = sp.argwhere((sort_idxs == 0))[0] change_idx = sort_idxs[zero_idx] sort_idxs[zero_idx] = sort_idxs[0] sort_idxs[0] = 0 sort_labels = self.genotypes[sort_idxs] sort_avgs = self.avgs[sort_idxs] sort_stds = self.stds[sort_idxs] fig = plt.figure() fig.set_size_inches(3, 4) plt.errorbar(range(Nn), sort_avgs, sort_stds, lw=0, elinewidth=1.5, capsize=5, color='k') plt.scatter(range(Nn), sort_avgs, c=sp.arange(Nn), cmap=plt.cm.winter, zorder=100, s=30) plt.ylim(0, 105) plt.xticks(rotation=90) plt.xticks(range(Nn), sort_labels)<|docstring|>Plot the pct of forward motion for each genotype.<|endoftext|>
4a17b5feb45fdb8db3fecf5fd897c9f9e1b09e25004cf605a9f005a39897df04
def save_data(self, in_dir): '\n\t\tOutput fwd and bkwd percent averages and stds. \n\t\t\n\t\tParameters\n\t\t----------\n\t\tin_dir : str\n\t\t\tDirectory of where to save data / same as input directory.\n\t\t\n\t\t' out_file = os.path.join(in_dir, '_centroid', ('pct_fwds%s.png' % self.laser_int)) plt.tight_layout() plt.savefig(out_file) out_file = os.path.join(in_dir, '_centroid', ('pct_fwds%s.svg' % self.laser_int)) plt.tight_layout() plt.savefig(out_file)
Output fwd and bkwd percent averages and stds. Parameters ---------- in_dir : str Directory of where to save data / same as input directory.
analysis/plot_fwd_pcts.py
save_data
faymanns/Qbio_2018
0
python
def save_data(self, in_dir): '\n\t\tOutput fwd and bkwd percent averages and stds. \n\t\t\n\t\tParameters\n\t\t----------\n\t\tin_dir : str\n\t\t\tDirectory of where to save data / same as input directory.\n\t\t\n\t\t' out_file = os.path.join(in_dir, '_centroid', ('pct_fwds%s.png' % self.laser_int)) plt.tight_layout() plt.savefig(out_file) out_file = os.path.join(in_dir, '_centroid', ('pct_fwds%s.svg' % self.laser_int)) plt.tight_layout() plt.savefig(out_file)
def save_data(self, in_dir): '\n\t\tOutput fwd and bkwd percent averages and stds. \n\t\t\n\t\tParameters\n\t\t----------\n\t\tin_dir : str\n\t\t\tDirectory of where to save data / same as input directory.\n\t\t\n\t\t' out_file = os.path.join(in_dir, '_centroid', ('pct_fwds%s.png' % self.laser_int)) plt.tight_layout() plt.savefig(out_file) out_file = os.path.join(in_dir, '_centroid', ('pct_fwds%s.svg' % self.laser_int)) plt.tight_layout() plt.savefig(out_file)<|docstring|>Output fwd and bkwd percent averages and stds. Parameters ---------- in_dir : str Directory of where to save data / same as input directory.<|endoftext|>
a3e4201dbb722285f9540dfa52030508336618feb630d5967a2b12fde0e50d91
def load_test(tstfile): 'Load a test from file.\n\n This reads a test from a csv file.\n\n Parameters\n ----------\n tstfile : :class:`str`\n Path to the file\n ' try: with zipfile.ZipFile(tstfile, 'r') as zfile: info = TxtIO(zfile.open('info.csv')) data = csv.reader(info) row = _nextr(data) if (row[0] != 'Testtype'): raise Exception if (row[1] == 'PumpingTest'): routine = _load_pumping_test else: raise Exception except Exception: raise Exception(('loadTest: loading the test ' + 'was not possible')) return routine(tstfile)
Load a test from file. This reads a test from a csv file. Parameters ---------- tstfile : :class:`str` Path to the file
welltestpy/data/testslib.py
load_test
kinverarity1/welltestpy
2
python
def load_test(tstfile): 'Load a test from file.\n\n This reads a test from a csv file.\n\n Parameters\n ----------\n tstfile : :class:`str`\n Path to the file\n ' try: with zipfile.ZipFile(tstfile, 'r') as zfile: info = TxtIO(zfile.open('info.csv')) data = csv.reader(info) row = _nextr(data) if (row[0] != 'Testtype'): raise Exception if (row[1] == 'PumpingTest'): routine = _load_pumping_test else: raise Exception except Exception: raise Exception(('loadTest: loading the test ' + 'was not possible')) return routine(tstfile)
def load_test(tstfile): 'Load a test from file.\n\n This reads a test from a csv file.\n\n Parameters\n ----------\n tstfile : :class:`str`\n Path to the file\n ' try: with zipfile.ZipFile(tstfile, 'r') as zfile: info = TxtIO(zfile.open('info.csv')) data = csv.reader(info) row = _nextr(data) if (row[0] != 'Testtype'): raise Exception if (row[1] == 'PumpingTest'): routine = _load_pumping_test else: raise Exception except Exception: raise Exception(('loadTest: loading the test ' + 'was not possible')) return routine(tstfile)<|docstring|>Load a test from file. This reads a test from a csv file. Parameters ---------- tstfile : :class:`str` Path to the file<|endoftext|>
c224e5dc5a0224e5f02c3af6d53d508f8705a013e292a8472f4e92475114325f
def _load_pumping_test(tstfile): 'Load a pumping test from file.\n\n This reads a pumping test from a csv file.\n\n Parameters\n ----------\n tstfile : :class:`str`\n Path to the file\n ' try: with zipfile.ZipFile(tstfile, 'r') as zfile: info = TxtIO(zfile.open('info.csv')) data = csv.reader(info) if (next(data)[1] != 'PumpingTest'): raise Exception name = next(data)[1] description = next(data)[1] timeframe = next(data)[1] pumpingwell = next(data)[1] pumpingrate = load_var(TxtIO(zfile.open(next(data)[1]))) aquiferdepth = load_var(TxtIO(zfile.open(next(data)[1]))) aquiferradius = load_var(TxtIO(zfile.open(next(data)[1]))) obscnt = np.int(next(data)[1]) observations = {} for __ in range(obscnt): row = _nextr(data) observations[row[0]] = load_obs(BytIO(zfile.read(row[1]))) pumpingtest = PumpingTest(name, pumpingwell, pumpingrate, observations, aquiferdepth, aquiferradius, description, timeframe) except Exception: raise Exception(('loadPumpingTest: loading the pumpingtest ' + 'was not possible')) return pumpingtest
Load a pumping test from file. This reads a pumping test from a csv file. Parameters ---------- tstfile : :class:`str` Path to the file
welltestpy/data/testslib.py
_load_pumping_test
kinverarity1/welltestpy
2
python
def _load_pumping_test(tstfile): 'Load a pumping test from file.\n\n This reads a pumping test from a csv file.\n\n Parameters\n ----------\n tstfile : :class:`str`\n Path to the file\n ' try: with zipfile.ZipFile(tstfile, 'r') as zfile: info = TxtIO(zfile.open('info.csv')) data = csv.reader(info) if (next(data)[1] != 'PumpingTest'): raise Exception name = next(data)[1] description = next(data)[1] timeframe = next(data)[1] pumpingwell = next(data)[1] pumpingrate = load_var(TxtIO(zfile.open(next(data)[1]))) aquiferdepth = load_var(TxtIO(zfile.open(next(data)[1]))) aquiferradius = load_var(TxtIO(zfile.open(next(data)[1]))) obscnt = np.int(next(data)[1]) observations = {} for __ in range(obscnt): row = _nextr(data) observations[row[0]] = load_obs(BytIO(zfile.read(row[1]))) pumpingtest = PumpingTest(name, pumpingwell, pumpingrate, observations, aquiferdepth, aquiferradius, description, timeframe) except Exception: raise Exception(('loadPumpingTest: loading the pumpingtest ' + 'was not possible')) return pumpingtest
def _load_pumping_test(tstfile): 'Load a pumping test from file.\n\n This reads a pumping test from a csv file.\n\n Parameters\n ----------\n tstfile : :class:`str`\n Path to the file\n ' try: with zipfile.ZipFile(tstfile, 'r') as zfile: info = TxtIO(zfile.open('info.csv')) data = csv.reader(info) if (next(data)[1] != 'PumpingTest'): raise Exception name = next(data)[1] description = next(data)[1] timeframe = next(data)[1] pumpingwell = next(data)[1] pumpingrate = load_var(TxtIO(zfile.open(next(data)[1]))) aquiferdepth = load_var(TxtIO(zfile.open(next(data)[1]))) aquiferradius = load_var(TxtIO(zfile.open(next(data)[1]))) obscnt = np.int(next(data)[1]) observations = {} for __ in range(obscnt): row = _nextr(data) observations[row[0]] = load_obs(BytIO(zfile.read(row[1]))) pumpingtest = PumpingTest(name, pumpingwell, pumpingrate, observations, aquiferdepth, aquiferradius, description, timeframe) except Exception: raise Exception(('loadPumpingTest: loading the pumpingtest ' + 'was not possible')) return pumpingtest<|docstring|>Load a pumping test from file. This reads a pumping test from a csv file. Parameters ---------- tstfile : :class:`str` Path to the file<|endoftext|>
fab1a3c2c8cb3b3cd725df62c63bae968f3032f896b9750b7935ba59a91de521
@property def testtype(self): ':class:`str`: String containing the test type' return self._testtype
:class:`str`: String containing the test type
welltestpy/data/testslib.py
testtype
kinverarity1/welltestpy
2
python
@property def testtype(self): return self._testtype
@property def testtype(self): return self._testtype<|docstring|>:class:`str`: String containing the test type<|endoftext|>
ca09f0967451fde4c69efe2576303327263db9621891c5a5a51cb85a52623886
@property def wells(self): ':class:`tuple` of :class:`str`: all well names' tmp = list(self.__observations.keys()) tmp.append(self.pumpingwell) return tuple(set(tmp))
:class:`tuple` of :class:`str`: all well names
welltestpy/data/testslib.py
wells
kinverarity1/welltestpy
2
python
@property def wells(self): tmp = list(self.__observations.keys()) tmp.append(self.pumpingwell) return tuple(set(tmp))
@property def wells(self): tmp = list(self.__observations.keys()) tmp.append(self.pumpingwell) return tuple(set(tmp))<|docstring|>:class:`tuple` of :class:`str`: all well names<|endoftext|>
a8bdf55ac8df6b50afdbdc9b43d7d5cd409b0d3bbaf06312f14152081ad38add
@property def pumpingrate(self): ':class:`float`: pumping rate at the pumping well' return self._pumpingrate.value
:class:`float`: pumping rate at the pumping well
welltestpy/data/testslib.py
pumpingrate
kinverarity1/welltestpy
2
python
@property def pumpingrate(self): return self._pumpingrate.value
@property def pumpingrate(self): return self._pumpingrate.value<|docstring|>:class:`float`: pumping rate at the pumping well<|endoftext|>
fb7212229268e3a199810db49e0bf9143a6f55b60576c9d144d334cf0e723d41
@property def aquiferdepth(self): ':class:`float`: aquifer depth at the field site' return self._aquiferdepth.value
:class:`float`: aquifer depth at the field site
welltestpy/data/testslib.py
aquiferdepth
kinverarity1/welltestpy
2
python
@property def aquiferdepth(self): return self._aquiferdepth.value
@property def aquiferdepth(self): return self._aquiferdepth.value<|docstring|>:class:`float`: aquifer depth at the field site<|endoftext|>
590e85b2ed67637e8c6832bde79e60cbc6d27f5f01cdce17dce447ecd4010bca
@property def aquiferradius(self): ':class:`float`: aquifer radius at the field site' return self._aquiferradius.value
:class:`float`: aquifer radius at the field site
welltestpy/data/testslib.py
aquiferradius
kinverarity1/welltestpy
2
python
@property def aquiferradius(self): return self._aquiferradius.value
@property def aquiferradius(self): return self._aquiferradius.value<|docstring|>:class:`float`: aquifer radius at the field site<|endoftext|>
682b0e58f04a2e5ae333f66ea5755226b70f4b6399ace49c36a7b1cf574d4aef
@property def observations(self): ':class:`dict`: observations made at the field site' return self.__observations
:class:`dict`: observations made at the field site
welltestpy/data/testslib.py
observations
kinverarity1/welltestpy
2
python
@property def observations(self): return self.__observations
@property def observations(self): return self.__observations<|docstring|>:class:`dict`: observations made at the field site<|endoftext|>
f2c67e29e9f682b617f1ae9a2200a9cc844e640e5c030365758795579d6bbbcb
def add_steady_obs(self, well, observation, description='Steady State Drawdown observation'): '\n Add steady drawdown observations.\n\n Parameters\n ----------\n well : :class:`str`\n well where the observation is made.\n observation : :class:`Variable`\n Observation.\n description : :class:`str`, optional\n Description of the Variable. Default: ``"Steady observation"``\n ' obs = StdyHeadObs(well, observation, description) self.addobservations(obs)
Add steady drawdown observations. Parameters ---------- well : :class:`str` well where the observation is made. observation : :class:`Variable` Observation. description : :class:`str`, optional Description of the Variable. Default: ``"Steady observation"``
welltestpy/data/testslib.py
add_steady_obs
kinverarity1/welltestpy
2
python
def add_steady_obs(self, well, observation, description='Steady State Drawdown observation'): '\n Add steady drawdown observations.\n\n Parameters\n ----------\n well : :class:`str`\n well where the observation is made.\n observation : :class:`Variable`\n Observation.\n description : :class:`str`, optional\n Description of the Variable. Default: ``"Steady observation"``\n ' obs = StdyHeadObs(well, observation, description) self.addobservations(obs)
def add_steady_obs(self, well, observation, description='Steady State Drawdown observation'): '\n Add steady drawdown observations.\n\n Parameters\n ----------\n well : :class:`str`\n well where the observation is made.\n observation : :class:`Variable`\n Observation.\n description : :class:`str`, optional\n Description of the Variable. Default: ``"Steady observation"``\n ' obs = StdyHeadObs(well, observation, description) self.addobservations(obs)<|docstring|>Add steady drawdown observations. Parameters ---------- well : :class:`str` well where the observation is made. observation : :class:`Variable` Observation. description : :class:`str`, optional Description of the Variable. Default: ``"Steady observation"``<|endoftext|>
4d5a57c2170e6666867fa19c14cdca83e4fd48717fde30c08659b7e6d9bb1f4f
def add_transient_obs(self, well, time, observation, description='Transient Drawdown observation'): '\n Add transient drawdown observations.\n\n Parameters\n ----------\n well : :class:`str`\n well where the observation is made.\n time : :class:`Variable`\n Time points of observation.\n observation : :class:`Variable`\n Observation.\n description : :class:`str`, optional\n Description of the Variable. Default: ``"Drawdown observation"``\n ' obs = DrawdownObs(well, time, observation, description) self.addobservations(obs)
Add transient drawdown observations. Parameters ---------- well : :class:`str` well where the observation is made. time : :class:`Variable` Time points of observation. observation : :class:`Variable` Observation. description : :class:`str`, optional Description of the Variable. Default: ``"Drawdown observation"``
welltestpy/data/testslib.py
add_transient_obs
kinverarity1/welltestpy
2
python
def add_transient_obs(self, well, time, observation, description='Transient Drawdown observation'): '\n Add transient drawdown observations.\n\n Parameters\n ----------\n well : :class:`str`\n well where the observation is made.\n time : :class:`Variable`\n Time points of observation.\n observation : :class:`Variable`\n Observation.\n description : :class:`str`, optional\n Description of the Variable. Default: ``"Drawdown observation"``\n ' obs = DrawdownObs(well, time, observation, description) self.addobservations(obs)
def add_transient_obs(self, well, time, observation, description='Transient Drawdown observation'): '\n Add transient drawdown observations.\n\n Parameters\n ----------\n well : :class:`str`\n well where the observation is made.\n time : :class:`Variable`\n Time points of observation.\n observation : :class:`Variable`\n Observation.\n description : :class:`str`, optional\n Description of the Variable. Default: ``"Drawdown observation"``\n ' obs = DrawdownObs(well, time, observation, description) self.addobservations(obs)<|docstring|>Add transient drawdown observations. Parameters ---------- well : :class:`str` well where the observation is made. time : :class:`Variable` Time points of observation. observation : :class:`Variable` Observation. description : :class:`str`, optional Description of the Variable. Default: ``"Drawdown observation"``<|endoftext|>
3d18a563517c283907a0a2809b5e7cb7019f91fde132f0a1aad08816b30ad12e
def addobservations(self, obs): 'Add some specified observations.\n\n This will add observations to the pumping test.\n\n Parameters\n ----------\n obs : :class:`dict`\n Observations to be added.\n ' if isinstance(obs, dict): for k in obs: if (not isinstance(obs[k], Observation)): raise ValueError((('PumpingTest_addobservations: some ' + "'observations' are not ") + 'of type Observation')) if (k in self.observations): raise ValueError(('PumpingTest_addobservations: some ' + "'observations' are already present")) for k in obs: self.__observations[k] = dcopy(obs[k]) elif isinstance(obs, Observation): if (obs in self.observations): raise ValueError(('PumpingTest_addobservations: ' + "'observation' are already present")) self.__observations[obs.name] = dcopy(obs) else: raise ValueError(("PumpingTest_addobservations: 'observations' " + 'should be given as dictonary with well as key'))
Add some specified observations. This will add observations to the pumping test. Parameters ---------- obs : :class:`dict` Observations to be added.
welltestpy/data/testslib.py
addobservations
kinverarity1/welltestpy
2
python
def addobservations(self, obs): 'Add some specified observations.\n\n This will add observations to the pumping test.\n\n Parameters\n ----------\n obs : :class:`dict`\n Observations to be added.\n ' if isinstance(obs, dict): for k in obs: if (not isinstance(obs[k], Observation)): raise ValueError((('PumpingTest_addobservations: some ' + "'observations' are not ") + 'of type Observation')) if (k in self.observations): raise ValueError(('PumpingTest_addobservations: some ' + "'observations' are already present")) for k in obs: self.__observations[k] = dcopy(obs[k]) elif isinstance(obs, Observation): if (obs in self.observations): raise ValueError(('PumpingTest_addobservations: ' + "'observation' are already present")) self.__observations[obs.name] = dcopy(obs) else: raise ValueError(("PumpingTest_addobservations: 'observations' " + 'should be given as dictonary with well as key'))
def addobservations(self, obs): 'Add some specified observations.\n\n This will add observations to the pumping test.\n\n Parameters\n ----------\n obs : :class:`dict`\n Observations to be added.\n ' if isinstance(obs, dict): for k in obs: if (not isinstance(obs[k], Observation)): raise ValueError((('PumpingTest_addobservations: some ' + "'observations' are not ") + 'of type Observation')) if (k in self.observations): raise ValueError(('PumpingTest_addobservations: some ' + "'observations' are already present")) for k in obs: self.__observations[k] = dcopy(obs[k]) elif isinstance(obs, Observation): if (obs in self.observations): raise ValueError(('PumpingTest_addobservations: ' + "'observation' are already present")) self.__observations[obs.name] = dcopy(obs) else: raise ValueError(("PumpingTest_addobservations: 'observations' " + 'should be given as dictonary with well as key'))<|docstring|>Add some specified observations. This will add observations to the pumping test. Parameters ---------- obs : :class:`dict` Observations to be added.<|endoftext|>
f818f8f7b7d65db37976e8e70aabc6f381176edd1b2778881b3a8092cbe5184d
def delobservations(self, obs): 'Delete some specified observations.\n\n This will delete observations from the pumping test. You can give a\n list of observations or a single observation by name.\n\n Parameters\n ----------\n obs : :class:`list` of :class:`str` or :class:`str`\n Observations to be deleted.\n ' if isinstance(obs, (list, tuple)): for k in obs: if (k in self.observations): del self.__observations[k] elif (obs in self.observations): del self.__observations[obs]
Delete some specified observations. This will delete observations from the pumping test. You can give a list of observations or a single observation by name. Parameters ---------- obs : :class:`list` of :class:`str` or :class:`str` Observations to be deleted.
welltestpy/data/testslib.py
delobservations
kinverarity1/welltestpy
2
python
def delobservations(self, obs): 'Delete some specified observations.\n\n This will delete observations from the pumping test. You can give a\n list of observations or a single observation by name.\n\n Parameters\n ----------\n obs : :class:`list` of :class:`str` or :class:`str`\n Observations to be deleted.\n ' if isinstance(obs, (list, tuple)): for k in obs: if (k in self.observations): del self.__observations[k] elif (obs in self.observations): del self.__observations[obs]
def delobservations(self, obs): 'Delete some specified observations.\n\n This will delete observations from the pumping test. You can give a\n list of observations or a single observation by name.\n\n Parameters\n ----------\n obs : :class:`list` of :class:`str` or :class:`str`\n Observations to be deleted.\n ' if isinstance(obs, (list, tuple)): for k in obs: if (k in self.observations): del self.__observations[k] elif (obs in self.observations): del self.__observations[obs]<|docstring|>Delete some specified observations. This will delete observations from the pumping test. You can give a list of observations or a single observation by name. Parameters ---------- obs : :class:`list` of :class:`str` or :class:`str` Observations to be deleted.<|endoftext|>
ccc554dfb8b981269ec483fa5406778027d6b630ac5d007a26aa080388b7dcb9
def _addplot(self, plt_ax, wells, exclude=None): 'Generate a plot of the pumping test.\n\n This will plot the pumping test on the given figure axes.\n\n Parameters\n ----------\n ax : :class:`Axes`\n Axes where the plot should be done.\n wells : :class:`dict`\n Dictonary containing the well classes sorted by name.\n exclude: :class:`list`, optional\n List of wells that should be excluded from the plot.\n Default: ``None``\n\n Notes\n -----\n This will be used by the Campaign class.\n ' if (exclude is None): exclude = [] for k in self.observations: if (k in exclude): continue if (k != self.pumpingwell): dist = (wells[k] - wells[self.pumpingwell]) else: dist = wells[self.pumpingwell].radius if (self.pumpingrate > 0): displace = np.maximum(self.observations[k].value[1], 0.0) else: displace = np.minimum(self.observations[k].value[1], 0.0) plt_ax.plot(self.observations[k].value[0], displace, linewidth=2, label=(self.observations[k].name + ' r={:1.2f}'.format(dist))) plt_ax.set_xlabel(self.observations[k].labels[0]) plt_ax.set_ylabel(self.observations[k].labels[1]) plt_ax.set_title(repr(self)) plt_ax.legend(loc='center right', fancybox=True, framealpha=0.75)
Generate a plot of the pumping test. This will plot the pumping test on the given figure axes. Parameters ---------- ax : :class:`Axes` Axes where the plot should be done. wells : :class:`dict` Dictonary containing the well classes sorted by name. exclude: :class:`list`, optional List of wells that should be excluded from the plot. Default: ``None`` Notes ----- This will be used by the Campaign class.
welltestpy/data/testslib.py
_addplot
kinverarity1/welltestpy
2
python
def _addplot(self, plt_ax, wells, exclude=None): 'Generate a plot of the pumping test.\n\n This will plot the pumping test on the given figure axes.\n\n Parameters\n ----------\n ax : :class:`Axes`\n Axes where the plot should be done.\n wells : :class:`dict`\n Dictonary containing the well classes sorted by name.\n exclude: :class:`list`, optional\n List of wells that should be excluded from the plot.\n Default: ``None``\n\n Notes\n -----\n This will be used by the Campaign class.\n ' if (exclude is None): exclude = [] for k in self.observations: if (k in exclude): continue if (k != self.pumpingwell): dist = (wells[k] - wells[self.pumpingwell]) else: dist = wells[self.pumpingwell].radius if (self.pumpingrate > 0): displace = np.maximum(self.observations[k].value[1], 0.0) else: displace = np.minimum(self.observations[k].value[1], 0.0) plt_ax.plot(self.observations[k].value[0], displace, linewidth=2, label=(self.observations[k].name + ' r={:1.2f}'.format(dist))) plt_ax.set_xlabel(self.observations[k].labels[0]) plt_ax.set_ylabel(self.observations[k].labels[1]) plt_ax.set_title(repr(self)) plt_ax.legend(loc='center right', fancybox=True, framealpha=0.75)
def _addplot(self, plt_ax, wells, exclude=None): 'Generate a plot of the pumping test.\n\n This will plot the pumping test on the given figure axes.\n\n Parameters\n ----------\n ax : :class:`Axes`\n Axes where the plot should be done.\n wells : :class:`dict`\n Dictonary containing the well classes sorted by name.\n exclude: :class:`list`, optional\n List of wells that should be excluded from the plot.\n Default: ``None``\n\n Notes\n -----\n This will be used by the Campaign class.\n ' if (exclude is None): exclude = [] for k in self.observations: if (k in exclude): continue if (k != self.pumpingwell): dist = (wells[k] - wells[self.pumpingwell]) else: dist = wells[self.pumpingwell].radius if (self.pumpingrate > 0): displace = np.maximum(self.observations[k].value[1], 0.0) else: displace = np.minimum(self.observations[k].value[1], 0.0) plt_ax.plot(self.observations[k].value[0], displace, linewidth=2, label=(self.observations[k].name + ' r={:1.2f}'.format(dist))) plt_ax.set_xlabel(self.observations[k].labels[0]) plt_ax.set_ylabel(self.observations[k].labels[1]) plt_ax.set_title(repr(self)) plt_ax.legend(loc='center right', fancybox=True, framealpha=0.75)<|docstring|>Generate a plot of the pumping test. This will plot the pumping test on the given figure axes. Parameters ---------- ax : :class:`Axes` Axes where the plot should be done. wells : :class:`dict` Dictonary containing the well classes sorted by name. exclude: :class:`list`, optional List of wells that should be excluded from the plot. Default: ``None`` Notes ----- This will be used by the Campaign class.<|endoftext|>
1f2c27d7bab3679f065cd99372781dce9862c66454b0984ee1538b0f45fb204e
def save(self, path='', name=None): 'Save a pumping test to file.\n\n This writes the variable to a csv file.\n\n Parameters\n ----------\n path : :class:`str`, optional\n Path where the variable should be saved. Default: ``""``\n name : :class:`str`, optional\n Name of the file. If ``None``, the name will be generated by\n ``"Test_"+name``. Default: ``None``\n\n Notes\n -----\n The file will get the suffix ``".tst"``.\n ' path = os.path.normpath(path) if (not os.path.exists(path)): os.makedirs(path) if (name is None): name = ('Test_' + self.name) if (name[(- 4):] != '.tst'): name += '.tst' name = _formname(name) patht = os.path.join(path, '.tmptest') if os.path.exists(patht): shutil.rmtree(patht, ignore_errors=True) os.makedirs(patht) with open(os.path.join(patht, 'info.csv'), 'w') as csvf: writer = csv.writer(csvf, quoting=csv.QUOTE_NONNUMERIC) writer.writerow(['Testtype', 'PumpingTest']) writer.writerow(['name', self.name]) writer.writerow(['description', self.description]) writer.writerow(['timeframe', self.timeframe]) writer.writerow(['pumpingwell', self.pumpingwell]) pumprname = (name[:(- 4)] + '_PprVar.var') aquidname = (name[:(- 4)] + '_AqdVar.var') aquirname = (name[:(- 4)] + '_AqrVar.var') writer.writerow(['pumpingrate', pumprname]) self._pumpingrate.save(patht, pumprname) writer.writerow(['aquiferdepth', aquidname]) self._aquiferdepth.save(patht, aquidname) writer.writerow(['aquiferradius', aquirname]) self._aquiferradius.save(patht, aquirname) okeys = tuple(self.observations.keys()) writer.writerow(['Observations', len(okeys)]) obsname = {} for k in okeys: obsname[k] = (((name[:(- 4)] + '_') + k) + '_Obs.obs') writer.writerow([k, obsname[k]]) self.observations[k].save(patht, obsname[k]) with zipfile.ZipFile(os.path.join(path, name), 'w') as zfile: zfile.write(os.path.join(patht, 'info.csv'), 'info.csv') zfile.write(os.path.join(patht, pumprname), pumprname) zfile.write(os.path.join(patht, aquidname), aquidname) zfile.write(os.path.join(patht, aquirname), aquirname) for k in okeys: zfile.write(os.path.join(patht, obsname[k]), obsname[k]) shutil.rmtree(patht, ignore_errors=True)
Save a pumping test to file. This writes the variable to a csv file. Parameters ---------- path : :class:`str`, optional Path where the variable should be saved. Default: ``""`` name : :class:`str`, optional Name of the file. If ``None``, the name will be generated by ``"Test_"+name``. Default: ``None`` Notes ----- The file will get the suffix ``".tst"``.
welltestpy/data/testslib.py
save
kinverarity1/welltestpy
2
python
def save(self, path=, name=None): 'Save a pumping test to file.\n\n This writes the variable to a csv file.\n\n Parameters\n ----------\n path : :class:`str`, optional\n Path where the variable should be saved. Default: ````\n name : :class:`str`, optional\n Name of the file. If ``None``, the name will be generated by\n ``"Test_"+name``. Default: ``None``\n\n Notes\n -----\n The file will get the suffix ``".tst"``.\n ' path = os.path.normpath(path) if (not os.path.exists(path)): os.makedirs(path) if (name is None): name = ('Test_' + self.name) if (name[(- 4):] != '.tst'): name += '.tst' name = _formname(name) patht = os.path.join(path, '.tmptest') if os.path.exists(patht): shutil.rmtree(patht, ignore_errors=True) os.makedirs(patht) with open(os.path.join(patht, 'info.csv'), 'w') as csvf: writer = csv.writer(csvf, quoting=csv.QUOTE_NONNUMERIC) writer.writerow(['Testtype', 'PumpingTest']) writer.writerow(['name', self.name]) writer.writerow(['description', self.description]) writer.writerow(['timeframe', self.timeframe]) writer.writerow(['pumpingwell', self.pumpingwell]) pumprname = (name[:(- 4)] + '_PprVar.var') aquidname = (name[:(- 4)] + '_AqdVar.var') aquirname = (name[:(- 4)] + '_AqrVar.var') writer.writerow(['pumpingrate', pumprname]) self._pumpingrate.save(patht, pumprname) writer.writerow(['aquiferdepth', aquidname]) self._aquiferdepth.save(patht, aquidname) writer.writerow(['aquiferradius', aquirname]) self._aquiferradius.save(patht, aquirname) okeys = tuple(self.observations.keys()) writer.writerow(['Observations', len(okeys)]) obsname = {} for k in okeys: obsname[k] = (((name[:(- 4)] + '_') + k) + '_Obs.obs') writer.writerow([k, obsname[k]]) self.observations[k].save(patht, obsname[k]) with zipfile.ZipFile(os.path.join(path, name), 'w') as zfile: zfile.write(os.path.join(patht, 'info.csv'), 'info.csv') zfile.write(os.path.join(patht, pumprname), pumprname) zfile.write(os.path.join(patht, aquidname), aquidname) zfile.write(os.path.join(patht, aquirname), aquirname) for k in okeys: zfile.write(os.path.join(patht, obsname[k]), obsname[k]) shutil.rmtree(patht, ignore_errors=True)
def save(self, path=, name=None): 'Save a pumping test to file.\n\n This writes the variable to a csv file.\n\n Parameters\n ----------\n path : :class:`str`, optional\n Path where the variable should be saved. Default: ````\n name : :class:`str`, optional\n Name of the file. If ``None``, the name will be generated by\n ``"Test_"+name``. Default: ``None``\n\n Notes\n -----\n The file will get the suffix ``".tst"``.\n ' path = os.path.normpath(path) if (not os.path.exists(path)): os.makedirs(path) if (name is None): name = ('Test_' + self.name) if (name[(- 4):] != '.tst'): name += '.tst' name = _formname(name) patht = os.path.join(path, '.tmptest') if os.path.exists(patht): shutil.rmtree(patht, ignore_errors=True) os.makedirs(patht) with open(os.path.join(patht, 'info.csv'), 'w') as csvf: writer = csv.writer(csvf, quoting=csv.QUOTE_NONNUMERIC) writer.writerow(['Testtype', 'PumpingTest']) writer.writerow(['name', self.name]) writer.writerow(['description', self.description]) writer.writerow(['timeframe', self.timeframe]) writer.writerow(['pumpingwell', self.pumpingwell]) pumprname = (name[:(- 4)] + '_PprVar.var') aquidname = (name[:(- 4)] + '_AqdVar.var') aquirname = (name[:(- 4)] + '_AqrVar.var') writer.writerow(['pumpingrate', pumprname]) self._pumpingrate.save(patht, pumprname) writer.writerow(['aquiferdepth', aquidname]) self._aquiferdepth.save(patht, aquidname) writer.writerow(['aquiferradius', aquirname]) self._aquiferradius.save(patht, aquirname) okeys = tuple(self.observations.keys()) writer.writerow(['Observations', len(okeys)]) obsname = {} for k in okeys: obsname[k] = (((name[:(- 4)] + '_') + k) + '_Obs.obs') writer.writerow([k, obsname[k]]) self.observations[k].save(patht, obsname[k]) with zipfile.ZipFile(os.path.join(path, name), 'w') as zfile: zfile.write(os.path.join(patht, 'info.csv'), 'info.csv') zfile.write(os.path.join(patht, pumprname), pumprname) zfile.write(os.path.join(patht, aquidname), aquidname) zfile.write(os.path.join(patht, aquirname), aquirname) for k in okeys: zfile.write(os.path.join(patht, obsname[k]), obsname[k]) shutil.rmtree(patht, ignore_errors=True)<|docstring|>Save a pumping test to file. This writes the variable to a csv file. Parameters ---------- path : :class:`str`, optional Path where the variable should be saved. Default: ``""`` name : :class:`str`, optional Name of the file. If ``None``, the name will be generated by ``"Test_"+name``. Default: ``None`` Notes ----- The file will get the suffix ``".tst"``.<|endoftext|>
96c7322beb5d8ab3fbcfff7907ea73f68352abd03abf88acd321bcb8ec011125
def __init__(self): ' Initialization ' self._headers = {}
Initialization
wtf/app/response.py
__init__
ndparker/wtf
1
python
def __init__(self): ' ' self._headers = {}
def __init__(self): ' ' self._headers = {}<|docstring|>Initialization<|endoftext|>
9d0e3b504d397a82ccf39671a8b54bc550ed0933b2548ad66a369f6e7c93ee34
def __contains__(self, name): '\n Check if header is already set\n\n :Parameters:\n - `name`: Header name\n\n :Types:\n - `name`: ``str``\n\n :return: Does the header already exist?\n :rtype: ``bool``\n ' return (name.lower() in self._headers)
Check if header is already set :Parameters: - `name`: Header name :Types: - `name`: ``str`` :return: Does the header already exist? :rtype: ``bool``
wtf/app/response.py
__contains__
ndparker/wtf
1
python
def __contains__(self, name): '\n Check if header is already set\n\n :Parameters:\n - `name`: Header name\n\n :Types:\n - `name`: ``str``\n\n :return: Does the header already exist?\n :rtype: ``bool``\n ' return (name.lower() in self._headers)
def __contains__(self, name): '\n Check if header is already set\n\n :Parameters:\n - `name`: Header name\n\n :Types:\n - `name`: ``str``\n\n :return: Does the header already exist?\n :rtype: ``bool``\n ' return (name.lower() in self._headers)<|docstring|>Check if header is already set :Parameters: - `name`: Header name :Types: - `name`: ``str`` :return: Does the header already exist? :rtype: ``bool``<|endoftext|>
7995a790dce5ff563348b1458f481dca9267036836b51117aaec0e6eefaab52e
def __iter__(self): ' Header tuple iterator ' for (name, values) in self._headers.iteritems(): for value in values: (yield (name, value))
Header tuple iterator
wtf/app/response.py
__iter__
ndparker/wtf
1
python
def __iter__(self): ' ' for (name, values) in self._headers.iteritems(): for value in values: (yield (name, value))
def __iter__(self): ' ' for (name, values) in self._headers.iteritems(): for value in values: (yield (name, value))<|docstring|>Header tuple iterator<|endoftext|>
2de996f8326151a6c59ebc99a0dd35b53d2a7705e3a428841c666b7f26fda7cb
def get(self, name): '\n Determine the value list of a header\n\n :Parameters:\n - `name`: The header name\n\n :Types:\n - `name`: ``str``\n\n :return: The value list or ``None``\n :rtype: ``list``\n ' return self._headers.get(name.lower())
Determine the value list of a header :Parameters: - `name`: The header name :Types: - `name`: ``str`` :return: The value list or ``None`` :rtype: ``list``
wtf/app/response.py
get
ndparker/wtf
1
python
def get(self, name): '\n Determine the value list of a header\n\n :Parameters:\n - `name`: The header name\n\n :Types:\n - `name`: ``str``\n\n :return: The value list or ``None``\n :rtype: ``list``\n ' return self._headers.get(name.lower())
def get(self, name): '\n Determine the value list of a header\n\n :Parameters:\n - `name`: The header name\n\n :Types:\n - `name`: ``str``\n\n :return: The value list or ``None``\n :rtype: ``list``\n ' return self._headers.get(name.lower())<|docstring|>Determine the value list of a header :Parameters: - `name`: The header name :Types: - `name`: ``str`` :return: The value list or ``None`` :rtype: ``list``<|endoftext|>
6f8da64a055ed8e25447a16c80ceed25091bb06c9fc5166f06a12833688c750f
def set(self, name, *values): "\n Set a header, replacing any same-named header previously set\n\n :Parameters:\n - `name`: The header name\n - `values`: List of values (``('value', ...)``)\n\n :Types:\n - `name`: ``str``\n - `values`: ``tuple``\n " self._headers[name.lower()] = list(values)
Set a header, replacing any same-named header previously set :Parameters: - `name`: The header name - `values`: List of values (``('value', ...)``) :Types: - `name`: ``str`` - `values`: ``tuple``
wtf/app/response.py
set
ndparker/wtf
1
python
def set(self, name, *values): "\n Set a header, replacing any same-named header previously set\n\n :Parameters:\n - `name`: The header name\n - `values`: List of values (``('value', ...)``)\n\n :Types:\n - `name`: ``str``\n - `values`: ``tuple``\n " self._headers[name.lower()] = list(values)
def set(self, name, *values): "\n Set a header, replacing any same-named header previously set\n\n :Parameters:\n - `name`: The header name\n - `values`: List of values (``('value', ...)``)\n\n :Types:\n - `name`: ``str``\n - `values`: ``tuple``\n " self._headers[name.lower()] = list(values)<|docstring|>Set a header, replacing any same-named header previously set :Parameters: - `name`: The header name - `values`: List of values (``('value', ...)``) :Types: - `name`: ``str`` - `values`: ``tuple``<|endoftext|>
b60b124e21e7b50ce2550d357190308dc55b73ac403a9468b9f8778a6e1f58cf
def add(self, name, *values): "\n Add a value list to a header\n\n The old values are preserved\n\n :Parameters:\n - `name`: Header name\n - `values`: Values to add (``('value', ...)``)\n\n :Types:\n - `name`: ``str``\n - `values`: ``tuple``\n " self._headers.setdefault(name.lower(), []).extend(list(values))
Add a value list to a header The old values are preserved :Parameters: - `name`: Header name - `values`: Values to add (``('value', ...)``) :Types: - `name`: ``str`` - `values`: ``tuple``
wtf/app/response.py
add
ndparker/wtf
1
python
def add(self, name, *values): "\n Add a value list to a header\n\n The old values are preserved\n\n :Parameters:\n - `name`: Header name\n - `values`: Values to add (``('value', ...)``)\n\n :Types:\n - `name`: ``str``\n - `values`: ``tuple``\n " self._headers.setdefault(name.lower(), []).extend(list(values))
def add(self, name, *values): "\n Add a value list to a header\n\n The old values are preserved\n\n :Parameters:\n - `name`: Header name\n - `values`: Values to add (``('value', ...)``)\n\n :Types:\n - `name`: ``str``\n - `values`: ``tuple``\n " self._headers.setdefault(name.lower(), []).extend(list(values))<|docstring|>Add a value list to a header The old values are preserved :Parameters: - `name`: Header name - `values`: Values to add (``('value', ...)``) :Types: - `name`: ``str`` - `values`: ``tuple``<|endoftext|>
a98c5f43d93908c32a1c86344580910ee8d1c0df547b5f692f6733a3b6956ed9
def remove(self, name, value=None): '\n Remove a header by name (plus optionally by value)\n\n If the header does not exist alrady, it is not an error.\n\n :Parameters:\n - `name`: Header name\n - `value`: Particular value to remove\n\n :Types:\n - `name`: ``str``\n - `value`: ``str``\n ' name = name.lower() if (name in self._headers): if (value is None): del self._headers[name] else: try: while True: self._headers[name].remove(value) except ValueError: pass
Remove a header by name (plus optionally by value) If the header does not exist alrady, it is not an error. :Parameters: - `name`: Header name - `value`: Particular value to remove :Types: - `name`: ``str`` - `value`: ``str``
wtf/app/response.py
remove
ndparker/wtf
1
python
def remove(self, name, value=None): '\n Remove a header by name (plus optionally by value)\n\n If the header does not exist alrady, it is not an error.\n\n :Parameters:\n - `name`: Header name\n - `value`: Particular value to remove\n\n :Types:\n - `name`: ``str``\n - `value`: ``str``\n ' name = name.lower() if (name in self._headers): if (value is None): del self._headers[name] else: try: while True: self._headers[name].remove(value) except ValueError: pass
def remove(self, name, value=None): '\n Remove a header by name (plus optionally by value)\n\n If the header does not exist alrady, it is not an error.\n\n :Parameters:\n - `name`: Header name\n - `value`: Particular value to remove\n\n :Types:\n - `name`: ``str``\n - `value`: ``str``\n ' name = name.lower() if (name in self._headers): if (value is None): del self._headers[name] else: try: while True: self._headers[name].remove(value) except ValueError: pass<|docstring|>Remove a header by name (plus optionally by value) If the header does not exist alrady, it is not an error. :Parameters: - `name`: Header name - `value`: Particular value to remove :Types: - `name`: ``str`` - `value`: ``str``<|endoftext|>
6550ebffe6eab4d2d299ea9a2477bafaf3e67d5a101aeb2074488e0e5af9adf0
def __init__(self, request, start_response): '\n Initialization\n\n :Parameters:\n - `request`: Request object\n - `start_response`: WSGI start_response callable\n\n :Types:\n - `request`: `wtf.app.request.Request`\n - `start_response`: ``callable``\n ' self.request = _weakref.proxy(request) self.http = http self.status(200) self.headers = HeaderCollection() self.content_type('text/html') def first_write(towrite): ' First write flushes all ' if (self.write == first_write): resp_code = ('%03d %s' % self._status) headers = list(self.headers) self.write = start_response(resp_code, headers) return self.write(towrite) self.write = first_write
Initialization :Parameters: - `request`: Request object - `start_response`: WSGI start_response callable :Types: - `request`: `wtf.app.request.Request` - `start_response`: ``callable``
wtf/app/response.py
__init__
ndparker/wtf
1
python
def __init__(self, request, start_response): '\n Initialization\n\n :Parameters:\n - `request`: Request object\n - `start_response`: WSGI start_response callable\n\n :Types:\n - `request`: `wtf.app.request.Request`\n - `start_response`: ``callable``\n ' self.request = _weakref.proxy(request) self.http = http self.status(200) self.headers = HeaderCollection() self.content_type('text/html') def first_write(towrite): ' First write flushes all ' if (self.write == first_write): resp_code = ('%03d %s' % self._status) headers = list(self.headers) self.write = start_response(resp_code, headers) return self.write(towrite) self.write = first_write
def __init__(self, request, start_response): '\n Initialization\n\n :Parameters:\n - `request`: Request object\n - `start_response`: WSGI start_response callable\n\n :Types:\n - `request`: `wtf.app.request.Request`\n - `start_response`: ``callable``\n ' self.request = _weakref.proxy(request) self.http = http self.status(200) self.headers = HeaderCollection() self.content_type('text/html') def first_write(towrite): ' First write flushes all ' if (self.write == first_write): resp_code = ('%03d %s' % self._status) headers = list(self.headers) self.write = start_response(resp_code, headers) return self.write(towrite) self.write = first_write<|docstring|>Initialization :Parameters: - `request`: Request object - `start_response`: WSGI start_response callable :Types: - `request`: `wtf.app.request.Request` - `start_response`: ``callable``<|endoftext|>
41bfb06f6c58f66debd5e95f3f4deedbad1d4dfbb48646a2ad3995ffa7122ac9
def __getattr__(self, name): "\n Resolve unknown attributes\n\n We're looking for special env variables inserted by the middleware\n stack: ``wtf.response.<name>``. These are expected to be factories,\n which are lazily initialized with the response object and return\n the actual attribute, which is cached in the response object for\n further use.\n\n :Parameters:\n - `name`: The name to look up\n\n :Types:\n - `name`: ``str``\n\n :return: The attribute in question\n :rtype: any\n\n :Exceptions:\n - `AttributeError`: Attribute could not be resolved\n " try: factory = self.request.env[('wtf.response.%s' % name)] except KeyError: pass else: setattr(self, name, factory(_weakref.proxy(self))) return super(Response, self).__getattribute__(name)
Resolve unknown attributes We're looking for special env variables inserted by the middleware stack: ``wtf.response.<name>``. These are expected to be factories, which are lazily initialized with the response object and return the actual attribute, which is cached in the response object for further use. :Parameters: - `name`: The name to look up :Types: - `name`: ``str`` :return: The attribute in question :rtype: any :Exceptions: - `AttributeError`: Attribute could not be resolved
wtf/app/response.py
__getattr__
ndparker/wtf
1
python
def __getattr__(self, name): "\n Resolve unknown attributes\n\n We're looking for special env variables inserted by the middleware\n stack: ``wtf.response.<name>``. These are expected to be factories,\n which are lazily initialized with the response object and return\n the actual attribute, which is cached in the response object for\n further use.\n\n :Parameters:\n - `name`: The name to look up\n\n :Types:\n - `name`: ``str``\n\n :return: The attribute in question\n :rtype: any\n\n :Exceptions:\n - `AttributeError`: Attribute could not be resolved\n " try: factory = self.request.env[('wtf.response.%s' % name)] except KeyError: pass else: setattr(self, name, factory(_weakref.proxy(self))) return super(Response, self).__getattribute__(name)
def __getattr__(self, name): "\n Resolve unknown attributes\n\n We're looking for special env variables inserted by the middleware\n stack: ``wtf.response.<name>``. These are expected to be factories,\n which are lazily initialized with the response object and return\n the actual attribute, which is cached in the response object for\n further use.\n\n :Parameters:\n - `name`: The name to look up\n\n :Types:\n - `name`: ``str``\n\n :return: The attribute in question\n :rtype: any\n\n :Exceptions:\n - `AttributeError`: Attribute could not be resolved\n " try: factory = self.request.env[('wtf.response.%s' % name)] except KeyError: pass else: setattr(self, name, factory(_weakref.proxy(self))) return super(Response, self).__getattribute__(name)<|docstring|>Resolve unknown attributes We're looking for special env variables inserted by the middleware stack: ``wtf.response.<name>``. These are expected to be factories, which are lazily initialized with the response object and return the actual attribute, which is cached in the response object for further use. :Parameters: - `name`: The name to look up :Types: - `name`: ``str`` :return: The attribute in question :rtype: any :Exceptions: - `AttributeError`: Attribute could not be resolved<|endoftext|>
60c2a5838db4b4ff470fdb067f90d9ae9b041bc2937c92862ffe48167a993e6a
def status(self, status=None, reason=None): "\n Set/get response status\n\n :Parameters:\n - `status`: Response status code\n - `reason`: Reason phrase\n\n :Types:\n - `status`: ``int``\n - `reason`: ``str``\n\n :return: Tuple of previous status and reason phrase\n (``(int, 'reason')``)\n :rtype: ``tuple``\n " oldstatus = self._status if (status is not None): if (reason is None): reason = (http.reasons.get(status) or ('Status %d' % status)) status = (int(status), reason) elif (reason is not None): status = (oldstatus[0], reason) self._status = status return oldstatus
Set/get response status :Parameters: - `status`: Response status code - `reason`: Reason phrase :Types: - `status`: ``int`` - `reason`: ``str`` :return: Tuple of previous status and reason phrase (``(int, 'reason')``) :rtype: ``tuple``
wtf/app/response.py
status
ndparker/wtf
1
python
def status(self, status=None, reason=None): "\n Set/get response status\n\n :Parameters:\n - `status`: Response status code\n - `reason`: Reason phrase\n\n :Types:\n - `status`: ``int``\n - `reason`: ``str``\n\n :return: Tuple of previous status and reason phrase\n (``(int, 'reason')``)\n :rtype: ``tuple``\n " oldstatus = self._status if (status is not None): if (reason is None): reason = (http.reasons.get(status) or ('Status %d' % status)) status = (int(status), reason) elif (reason is not None): status = (oldstatus[0], reason) self._status = status return oldstatus
def status(self, status=None, reason=None): "\n Set/get response status\n\n :Parameters:\n - `status`: Response status code\n - `reason`: Reason phrase\n\n :Types:\n - `status`: ``int``\n - `reason`: ``str``\n\n :return: Tuple of previous status and reason phrase\n (``(int, 'reason')``)\n :rtype: ``tuple``\n " oldstatus = self._status if (status is not None): if (reason is None): reason = (http.reasons.get(status) or ('Status %d' % status)) status = (int(status), reason) elif (reason is not None): status = (oldstatus[0], reason) self._status = status return oldstatus<|docstring|>Set/get response status :Parameters: - `status`: Response status code - `reason`: Reason phrase :Types: - `status`: ``int`` - `reason`: ``str`` :return: Tuple of previous status and reason phrase (``(int, 'reason')``) :rtype: ``tuple``<|endoftext|>
74acc9f0902a3d93638e018df84884a0acd7e32070fa5a7398bb64b02879386f
def cookie(self, name, value, path='/', expires=None, max_age=None, domain=None, secure=None, comment=None, version=None, codec=None): '\n Set response cookie\n\n :Parameters:\n - `name`: Cookie name\n - `value`: Cookie value (if a codec is given, the type should be\n applicable for the codec encoder).\n - `path`: Valid URL base path for the cookie. It should always be set\n to a reasonable path (at least ``/``), otherwise the cookie will\n only be valid for the current URL and below.\n - `expires`: Expire time of the cookie. If unset or ``None`` the\n cookie is dropped when the browser is closed. See also the\n `max_age` parameter.\n - `max_age`: Max age of the cookie in seconds. If set, make sure it\n matches the expiry time. The difference is that expires will be\n transformed to a HTTP date, while max-age will stay an integer.\n The expires parameter is the older one and better understood by\n the clients out there. For that reason if you set max_age only,\n expires will be set automatically to ``now + max_age``. If unset\n or ``None`` the cookie will be dropped when the browser is closed.\n - `domain`: Valid domain\n - `secure`: Whether this is an SSL-only cookie or not\n - `comment`: Cookie comment\n - `version`: Cookie spec version. See `RFC 2965`_\n - `codec`: Cookie codec to apply. If unset or ``None``, the codec\n specified in the application configuration is applied.\n\n .. _RFC 2965: http://www.ietf.org/rfc/rfc2965.txt\n\n :Types:\n - `name`: ``str``\n - `value`: ``str``\n - `path`: ``str``\n - `expires`: ``datetime.datetime``\n - `max_age`: ``int``\n - `domain`: ``str``\n - `secure`: ``bool``\n - `comment`: ``str``\n - `version`: ``int``\n - `codec`: `CookieCodecInterface`\n ' if (codec is None): codec = self.request.env.get('wtf.codec.cookie') cstring = _httputil.make_cookie(name, value, codec, path=path, expires=expires, max_age=max_age, domain=domain, secure=secure, comment=comment, version=version) self.headers.add('Set-Cookie', cstring)
Set response cookie :Parameters: - `name`: Cookie name - `value`: Cookie value (if a codec is given, the type should be applicable for the codec encoder). - `path`: Valid URL base path for the cookie. It should always be set to a reasonable path (at least ``/``), otherwise the cookie will only be valid for the current URL and below. - `expires`: Expire time of the cookie. If unset or ``None`` the cookie is dropped when the browser is closed. See also the `max_age` parameter. - `max_age`: Max age of the cookie in seconds. If set, make sure it matches the expiry time. The difference is that expires will be transformed to a HTTP date, while max-age will stay an integer. The expires parameter is the older one and better understood by the clients out there. For that reason if you set max_age only, expires will be set automatically to ``now + max_age``. If unset or ``None`` the cookie will be dropped when the browser is closed. - `domain`: Valid domain - `secure`: Whether this is an SSL-only cookie or not - `comment`: Cookie comment - `version`: Cookie spec version. See `RFC 2965`_ - `codec`: Cookie codec to apply. If unset or ``None``, the codec specified in the application configuration is applied. .. _RFC 2965: http://www.ietf.org/rfc/rfc2965.txt :Types: - `name`: ``str`` - `value`: ``str`` - `path`: ``str`` - `expires`: ``datetime.datetime`` - `max_age`: ``int`` - `domain`: ``str`` - `secure`: ``bool`` - `comment`: ``str`` - `version`: ``int`` - `codec`: `CookieCodecInterface`
wtf/app/response.py
cookie
ndparker/wtf
1
python
def cookie(self, name, value, path='/', expires=None, max_age=None, domain=None, secure=None, comment=None, version=None, codec=None): '\n Set response cookie\n\n :Parameters:\n - `name`: Cookie name\n - `value`: Cookie value (if a codec is given, the type should be\n applicable for the codec encoder).\n - `path`: Valid URL base path for the cookie. It should always be set\n to a reasonable path (at least ``/``), otherwise the cookie will\n only be valid for the current URL and below.\n - `expires`: Expire time of the cookie. If unset or ``None`` the\n cookie is dropped when the browser is closed. See also the\n `max_age` parameter.\n - `max_age`: Max age of the cookie in seconds. If set, make sure it\n matches the expiry time. The difference is that expires will be\n transformed to a HTTP date, while max-age will stay an integer.\n The expires parameter is the older one and better understood by\n the clients out there. For that reason if you set max_age only,\n expires will be set automatically to ``now + max_age``. If unset\n or ``None`` the cookie will be dropped when the browser is closed.\n - `domain`: Valid domain\n - `secure`: Whether this is an SSL-only cookie or not\n - `comment`: Cookie comment\n - `version`: Cookie spec version. See `RFC 2965`_\n - `codec`: Cookie codec to apply. If unset or ``None``, the codec\n specified in the application configuration is applied.\n\n .. _RFC 2965: http://www.ietf.org/rfc/rfc2965.txt\n\n :Types:\n - `name`: ``str``\n - `value`: ``str``\n - `path`: ``str``\n - `expires`: ``datetime.datetime``\n - `max_age`: ``int``\n - `domain`: ``str``\n - `secure`: ``bool``\n - `comment`: ``str``\n - `version`: ``int``\n - `codec`: `CookieCodecInterface`\n ' if (codec is None): codec = self.request.env.get('wtf.codec.cookie') cstring = _httputil.make_cookie(name, value, codec, path=path, expires=expires, max_age=max_age, domain=domain, secure=secure, comment=comment, version=version) self.headers.add('Set-Cookie', cstring)
def cookie(self, name, value, path='/', expires=None, max_age=None, domain=None, secure=None, comment=None, version=None, codec=None): '\n Set response cookie\n\n :Parameters:\n - `name`: Cookie name\n - `value`: Cookie value (if a codec is given, the type should be\n applicable for the codec encoder).\n - `path`: Valid URL base path for the cookie. It should always be set\n to a reasonable path (at least ``/``), otherwise the cookie will\n only be valid for the current URL and below.\n - `expires`: Expire time of the cookie. If unset or ``None`` the\n cookie is dropped when the browser is closed. See also the\n `max_age` parameter.\n - `max_age`: Max age of the cookie in seconds. If set, make sure it\n matches the expiry time. The difference is that expires will be\n transformed to a HTTP date, while max-age will stay an integer.\n The expires parameter is the older one and better understood by\n the clients out there. For that reason if you set max_age only,\n expires will be set automatically to ``now + max_age``. If unset\n or ``None`` the cookie will be dropped when the browser is closed.\n - `domain`: Valid domain\n - `secure`: Whether this is an SSL-only cookie or not\n - `comment`: Cookie comment\n - `version`: Cookie spec version. See `RFC 2965`_\n - `codec`: Cookie codec to apply. If unset or ``None``, the codec\n specified in the application configuration is applied.\n\n .. _RFC 2965: http://www.ietf.org/rfc/rfc2965.txt\n\n :Types:\n - `name`: ``str``\n - `value`: ``str``\n - `path`: ``str``\n - `expires`: ``datetime.datetime``\n - `max_age`: ``int``\n - `domain`: ``str``\n - `secure`: ``bool``\n - `comment`: ``str``\n - `version`: ``int``\n - `codec`: `CookieCodecInterface`\n ' if (codec is None): codec = self.request.env.get('wtf.codec.cookie') cstring = _httputil.make_cookie(name, value, codec, path=path, expires=expires, max_age=max_age, domain=domain, secure=secure, comment=comment, version=version) self.headers.add('Set-Cookie', cstring)<|docstring|>Set response cookie :Parameters: - `name`: Cookie name - `value`: Cookie value (if a codec is given, the type should be applicable for the codec encoder). - `path`: Valid URL base path for the cookie. It should always be set to a reasonable path (at least ``/``), otherwise the cookie will only be valid for the current URL and below. - `expires`: Expire time of the cookie. If unset or ``None`` the cookie is dropped when the browser is closed. See also the `max_age` parameter. - `max_age`: Max age of the cookie in seconds. If set, make sure it matches the expiry time. The difference is that expires will be transformed to a HTTP date, while max-age will stay an integer. The expires parameter is the older one and better understood by the clients out there. For that reason if you set max_age only, expires will be set automatically to ``now + max_age``. If unset or ``None`` the cookie will be dropped when the browser is closed. - `domain`: Valid domain - `secure`: Whether this is an SSL-only cookie or not - `comment`: Cookie comment - `version`: Cookie spec version. See `RFC 2965`_ - `codec`: Cookie codec to apply. If unset or ``None``, the codec specified in the application configuration is applied. .. _RFC 2965: http://www.ietf.org/rfc/rfc2965.txt :Types: - `name`: ``str`` - `value`: ``str`` - `path`: ``str`` - `expires`: ``datetime.datetime`` - `max_age`: ``int`` - `domain`: ``str`` - `secure`: ``bool`` - `comment`: ``str`` - `version`: ``int`` - `codec`: `CookieCodecInterface`<|endoftext|>
6486355a8054dac6c69eaa5df8deef458106bec128bfbab80581d0f588bdf7f7
def content_type(self, ctype=None, charset=None): '\n Set/get response content type\n\n :Parameters:\n - `ctype`: Content-Type\n - `charset`: Charset\n\n :Types:\n - `ctype`: ``str``\n - `charset`: ``str``\n\n :return: Full previous content type header (maybe ``None``)\n :rtype: ``str``\n ' oldtype = (self.headers.get('content-type') or [None])[0] if (charset is not None): charset = ('"%s"' % charset.replace('"', '\\"')) if (ctype is None): ctype = (oldtype or 'text/plain') pos = ctype.find(';') if (pos > 0): ctype = ctype[:pos] ctype = ctype.strip() ctype = ('%s; charset=%s' % (ctype, charset)) if (ctype is not None): self.headers.set('content-type', ctype) return oldtype
Set/get response content type :Parameters: - `ctype`: Content-Type - `charset`: Charset :Types: - `ctype`: ``str`` - `charset`: ``str`` :return: Full previous content type header (maybe ``None``) :rtype: ``str``
wtf/app/response.py
content_type
ndparker/wtf
1
python
def content_type(self, ctype=None, charset=None): '\n Set/get response content type\n\n :Parameters:\n - `ctype`: Content-Type\n - `charset`: Charset\n\n :Types:\n - `ctype`: ``str``\n - `charset`: ``str``\n\n :return: Full previous content type header (maybe ``None``)\n :rtype: ``str``\n ' oldtype = (self.headers.get('content-type') or [None])[0] if (charset is not None): charset = ('"%s"' % charset.replace('"', '\\"')) if (ctype is None): ctype = (oldtype or 'text/plain') pos = ctype.find(';') if (pos > 0): ctype = ctype[:pos] ctype = ctype.strip() ctype = ('%s; charset=%s' % (ctype, charset)) if (ctype is not None): self.headers.set('content-type', ctype) return oldtype
def content_type(self, ctype=None, charset=None): '\n Set/get response content type\n\n :Parameters:\n - `ctype`: Content-Type\n - `charset`: Charset\n\n :Types:\n - `ctype`: ``str``\n - `charset`: ``str``\n\n :return: Full previous content type header (maybe ``None``)\n :rtype: ``str``\n ' oldtype = (self.headers.get('content-type') or [None])[0] if (charset is not None): charset = ('"%s"' % charset.replace('"', '\\"')) if (ctype is None): ctype = (oldtype or 'text/plain') pos = ctype.find(';') if (pos > 0): ctype = ctype[:pos] ctype = ctype.strip() ctype = ('%s; charset=%s' % (ctype, charset)) if (ctype is not None): self.headers.set('content-type', ctype) return oldtype<|docstring|>Set/get response content type :Parameters: - `ctype`: Content-Type - `charset`: Charset :Types: - `ctype`: ``str`` - `charset`: ``str`` :return: Full previous content type header (maybe ``None``) :rtype: ``str``<|endoftext|>
b975b346cf9fbcb4ec537f7a3b53bb405f9dba842bc31099401995bf5dfb0f64
def content_length(self, length): '\n Add content length information\n\n :Parameters:\n - `length`: The expected length in octets\n\n :Types:\n - `length`: ``int``\n ' self.headers.set('Content-Length', str(length))
Add content length information :Parameters: - `length`: The expected length in octets :Types: - `length`: ``int``
wtf/app/response.py
content_length
ndparker/wtf
1
python
def content_length(self, length): '\n Add content length information\n\n :Parameters:\n - `length`: The expected length in octets\n\n :Types:\n - `length`: ``int``\n ' self.headers.set('Content-Length', str(length))
def content_length(self, length): '\n Add content length information\n\n :Parameters:\n - `length`: The expected length in octets\n\n :Types:\n - `length`: ``int``\n ' self.headers.set('Content-Length', str(length))<|docstring|>Add content length information :Parameters: - `length`: The expected length in octets :Types: - `length`: ``int``<|endoftext|>
00233f77471deaa97827af77026f5c9a4c39776399b6b21f5ad6a52a35030fcb
def last_modified(self, last_modified): '\n Add last-modified information\n\n :Parameters:\n - `last_modified`: Last modification date (UTC)\n\n :Types:\n - `last_modified`: ``datetime.datetime``\n ' self.headers.set('Last-Modified', _httputil.make_date(last_modified))
Add last-modified information :Parameters: - `last_modified`: Last modification date (UTC) :Types: - `last_modified`: ``datetime.datetime``
wtf/app/response.py
last_modified
ndparker/wtf
1
python
def last_modified(self, last_modified): '\n Add last-modified information\n\n :Parameters:\n - `last_modified`: Last modification date (UTC)\n\n :Types:\n - `last_modified`: ``datetime.datetime``\n ' self.headers.set('Last-Modified', _httputil.make_date(last_modified))
def last_modified(self, last_modified): '\n Add last-modified information\n\n :Parameters:\n - `last_modified`: Last modification date (UTC)\n\n :Types:\n - `last_modified`: ``datetime.datetime``\n ' self.headers.set('Last-Modified', _httputil.make_date(last_modified))<|docstring|>Add last-modified information :Parameters: - `last_modified`: Last modification date (UTC) :Types: - `last_modified`: ``datetime.datetime``<|endoftext|>
bb29fbd1736fe2029b44c51a44397e4689e5300bfb096f154aa650fa707ebd70
def cache(self, expiry, audience=None): '\n Add cache information\n\n :Parameters:\n - `expiry`: Expiry time in seconds from now\n - `audience`: Caching audience; ``private`` or ``public``\n\n :Types:\n - `expiry`: ``int``\n - `audience`: ``str``\n ' expiry = max(0, expiry) self.headers.set('Expires', _httputil.make_date((_datetime.datetime.utcnow() + _datetime.timedelta(seconds=expiry)))) fields = [('max-age=%s' % expiry)] if (audience in ('private', 'public')): fields.append(audience) self.headers.set('Cache-Control', ', '.join(fields)) if (expiry == 0): self.headers.set('Pragma', 'no-cache')
Add cache information :Parameters: - `expiry`: Expiry time in seconds from now - `audience`: Caching audience; ``private`` or ``public`` :Types: - `expiry`: ``int`` - `audience`: ``str``
wtf/app/response.py
cache
ndparker/wtf
1
python
def cache(self, expiry, audience=None): '\n Add cache information\n\n :Parameters:\n - `expiry`: Expiry time in seconds from now\n - `audience`: Caching audience; ``private`` or ``public``\n\n :Types:\n - `expiry`: ``int``\n - `audience`: ``str``\n ' expiry = max(0, expiry) self.headers.set('Expires', _httputil.make_date((_datetime.datetime.utcnow() + _datetime.timedelta(seconds=expiry)))) fields = [('max-age=%s' % expiry)] if (audience in ('private', 'public')): fields.append(audience) self.headers.set('Cache-Control', ', '.join(fields)) if (expiry == 0): self.headers.set('Pragma', 'no-cache')
def cache(self, expiry, audience=None): '\n Add cache information\n\n :Parameters:\n - `expiry`: Expiry time in seconds from now\n - `audience`: Caching audience; ``private`` or ``public``\n\n :Types:\n - `expiry`: ``int``\n - `audience`: ``str``\n ' expiry = max(0, expiry) self.headers.set('Expires', _httputil.make_date((_datetime.datetime.utcnow() + _datetime.timedelta(seconds=expiry)))) fields = [('max-age=%s' % expiry)] if (audience in ('private', 'public')): fields.append(audience) self.headers.set('Cache-Control', ', '.join(fields)) if (expiry == 0): self.headers.set('Pragma', 'no-cache')<|docstring|>Add cache information :Parameters: - `expiry`: Expiry time in seconds from now - `audience`: Caching audience; ``private`` or ``public`` :Types: - `expiry`: ``int`` - `audience`: ``str``<|endoftext|>
bb552e0c4c10c0dec5c7604644ab6c09c5a30df1737b7aa061f9dc7fe0386275
def raise_error(self, status, **param): '\n Raise an HTTP error\n\n :Parameters:\n - `status`: Status code\n - `param`: Additional parameters for the accompanying class in\n `http_response`. The request object is passed automagically.\n\n :Types:\n - `status`: ``int``\n - `param`: ``dict``\n\n :Exceptions:\n - `KeyError`: The status code is not available\n - `HTTPResponse`: The requested HTTP exception\n ' cls = http.classes[status] raise cls(self.request, **param)
Raise an HTTP error :Parameters: - `status`: Status code - `param`: Additional parameters for the accompanying class in `http_response`. The request object is passed automagically. :Types: - `status`: ``int`` - `param`: ``dict`` :Exceptions: - `KeyError`: The status code is not available - `HTTPResponse`: The requested HTTP exception
wtf/app/response.py
raise_error
ndparker/wtf
1
python
def raise_error(self, status, **param): '\n Raise an HTTP error\n\n :Parameters:\n - `status`: Status code\n - `param`: Additional parameters for the accompanying class in\n `http_response`. The request object is passed automagically.\n\n :Types:\n - `status`: ``int``\n - `param`: ``dict``\n\n :Exceptions:\n - `KeyError`: The status code is not available\n - `HTTPResponse`: The requested HTTP exception\n ' cls = http.classes[status] raise cls(self.request, **param)
def raise_error(self, status, **param): '\n Raise an HTTP error\n\n :Parameters:\n - `status`: Status code\n - `param`: Additional parameters for the accompanying class in\n `http_response`. The request object is passed automagically.\n\n :Types:\n - `status`: ``int``\n - `param`: ``dict``\n\n :Exceptions:\n - `KeyError`: The status code is not available\n - `HTTPResponse`: The requested HTTP exception\n ' cls = http.classes[status] raise cls(self.request, **param)<|docstring|>Raise an HTTP error :Parameters: - `status`: Status code - `param`: Additional parameters for the accompanying class in `http_response`. The request object is passed automagically. :Types: - `status`: ``int`` - `param`: ``dict`` :Exceptions: - `KeyError`: The status code is not available - `HTTPResponse`: The requested HTTP exception<|endoftext|>
38fa2df410bfe3ef949c9f693a5408c1a5202c81bbc45e9b06ca2c7b11c04123
def raise_redirect(self, location, status=302): '\n Raise HTTP redirect\n\n :Parameters:\n - `location`: URL to redirect to\n - `status`: Response code\n\n :Types:\n - `location`: ``str``\n - `status`: ``int``\n\n :Exceptions:\n - `http.HTTPRedirectResponse`: The redirect exception\n ' cls = http.classes[status] assert issubclass(cls, http.HTTPRedirectResponse) raise cls(self.request, location=location)
Raise HTTP redirect :Parameters: - `location`: URL to redirect to - `status`: Response code :Types: - `location`: ``str`` - `status`: ``int`` :Exceptions: - `http.HTTPRedirectResponse`: The redirect exception
wtf/app/response.py
raise_redirect
ndparker/wtf
1
python
def raise_redirect(self, location, status=302): '\n Raise HTTP redirect\n\n :Parameters:\n - `location`: URL to redirect to\n - `status`: Response code\n\n :Types:\n - `location`: ``str``\n - `status`: ``int``\n\n :Exceptions:\n - `http.HTTPRedirectResponse`: The redirect exception\n ' cls = http.classes[status] assert issubclass(cls, http.HTTPRedirectResponse) raise cls(self.request, location=location)
def raise_redirect(self, location, status=302): '\n Raise HTTP redirect\n\n :Parameters:\n - `location`: URL to redirect to\n - `status`: Response code\n\n :Types:\n - `location`: ``str``\n - `status`: ``int``\n\n :Exceptions:\n - `http.HTTPRedirectResponse`: The redirect exception\n ' cls = http.classes[status] assert issubclass(cls, http.HTTPRedirectResponse) raise cls(self.request, location=location)<|docstring|>Raise HTTP redirect :Parameters: - `location`: URL to redirect to - `status`: Response code :Types: - `location`: ``str`` - `status`: ``int`` :Exceptions: - `http.HTTPRedirectResponse`: The redirect exception<|endoftext|>
5f6aecdf910b2674ddb6bfaea5669c8df48ea89ee9c59feb2f0fcb74c58aadb7
def raise_basic_auth(self, realm, message=None): '\n Raise a 401 error for HTTP Basic authentication\n\n :Parameters:\n - `realm`: The realm to authenticate\n - `message`: Optional default overriding message\n\n :Types:\n - `realm`: ``str``\n - `message`: ``str``\n \n :Exceptions:\n - `http.AuthorizationRequired`: The 401 exception\n ' self.raise_error(401, message=message, auth_type='Basic', realm=realm)
Raise a 401 error for HTTP Basic authentication :Parameters: - `realm`: The realm to authenticate - `message`: Optional default overriding message :Types: - `realm`: ``str`` - `message`: ``str`` :Exceptions: - `http.AuthorizationRequired`: The 401 exception
wtf/app/response.py
raise_basic_auth
ndparker/wtf
1
python
def raise_basic_auth(self, realm, message=None): '\n Raise a 401 error for HTTP Basic authentication\n\n :Parameters:\n - `realm`: The realm to authenticate\n - `message`: Optional default overriding message\n\n :Types:\n - `realm`: ``str``\n - `message`: ``str``\n \n :Exceptions:\n - `http.AuthorizationRequired`: The 401 exception\n ' self.raise_error(401, message=message, auth_type='Basic', realm=realm)
def raise_basic_auth(self, realm, message=None): '\n Raise a 401 error for HTTP Basic authentication\n\n :Parameters:\n - `realm`: The realm to authenticate\n - `message`: Optional default overriding message\n\n :Types:\n - `realm`: ``str``\n - `message`: ``str``\n \n :Exceptions:\n - `http.AuthorizationRequired`: The 401 exception\n ' self.raise_error(401, message=message, auth_type='Basic', realm=realm)<|docstring|>Raise a 401 error for HTTP Basic authentication :Parameters: - `realm`: The realm to authenticate - `message`: Optional default overriding message :Types: - `realm`: ``str`` - `message`: ``str`` :Exceptions: - `http.AuthorizationRequired`: The 401 exception<|endoftext|>
4399788d10ed577e07442c3f70ee0a7f55d1f227bcf5fdc0e677969a3cdfd8e2
def first_write(towrite): ' First write flushes all ' if (self.write == first_write): resp_code = ('%03d %s' % self._status) headers = list(self.headers) self.write = start_response(resp_code, headers) return self.write(towrite)
First write flushes all
wtf/app/response.py
first_write
ndparker/wtf
1
python
def first_write(towrite): ' ' if (self.write == first_write): resp_code = ('%03d %s' % self._status) headers = list(self.headers) self.write = start_response(resp_code, headers) return self.write(towrite)
def first_write(towrite): ' ' if (self.write == first_write): resp_code = ('%03d %s' % self._status) headers = list(self.headers) self.write = start_response(resp_code, headers) return self.write(towrite)<|docstring|>First write flushes all<|endoftext|>