Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
DysonFilterLifeSensor.state | (self) | Return filter life in hours. | Return filter life in hours. | def state(self):
"""Return filter life in hours."""
if self._device.state:
return int(self._device.state.filter_life)
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"state",
":",
"return",
"int",
"(",
"self",
".",
"_device",
".",
"state",
".",
"filter_life",
")",
"return",
"None"
] | [
136,
4
] | [
140,
19
] | python | en | ['en', 'en', 'en'] | True |
DysonCarbonFilterLifeSensor.__init__ | (self, device) | Create a new Dyson Carbon Filter Life sensor. | Create a new Dyson Carbon Filter Life sensor. | def __init__(self, device):
"""Create a new Dyson Carbon Filter Life sensor."""
super().__init__(device, "carbon_filter_state")
self._name = f"{self._device.name} Carbon Filter Remaining Life" | [
"def",
"__init__",
"(",
"self",
",",
"device",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"carbon_filter_state\"",
")",
"self",
".",
"_name",
"=",
"f\"{self._device.name} Carbon Filter Remaining Life\""
] | [
146,
4
] | [
149,
72
] | python | en | ['en', 'gl', 'en'] | True |
DysonCarbonFilterLifeSensor.state | (self) | Return filter life remaining in percent. | Return filter life remaining in percent. | def state(self):
"""Return filter life remaining in percent."""
if self._device.state:
return int(self._device.state.carbon_filter_state)
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"state",
":",
"return",
"int",
"(",
"self",
".",
"_device",
".",
"state",
".",
"carbon_filter_state",
")",
"return",
"None"
] | [
152,
4
] | [
156,
19
] | python | en | ['it', 'en', 'en'] | True |
DysonHepaFilterLifeSensor.__init__ | (self, device, filter_type="HEPA") | Create a new Dyson Filter Life sensor. | Create a new Dyson Filter Life sensor. | def __init__(self, device, filter_type="HEPA"):
"""Create a new Dyson Filter Life sensor."""
super().__init__(device, "hepa_filter_state")
self._name = f"{self._device.name} {filter_type} Filter Remaining Life" | [
"def",
"__init__",
"(",
"self",
",",
"device",
",",
"filter_type",
"=",
"\"HEPA\"",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"hepa_filter_state\"",
")",
"self",
".",
"_name",
"=",
"f\"{self._device.name} {filter_type} Filter Remaining Life\""
] | [
162,
4
] | [
165,
79
] | python | en | ['en', 'gl', 'en'] | True |
DysonHepaFilterLifeSensor.state | (self) | Return filter life remaining in percent. | Return filter life remaining in percent. | def state(self):
"""Return filter life remaining in percent."""
if self._device.state:
return int(self._device.state.hepa_filter_state)
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"state",
":",
"return",
"int",
"(",
"self",
".",
"_device",
".",
"state",
".",
"hepa_filter_state",
")",
"return",
"None"
] | [
168,
4
] | [
172,
19
] | python | en | ['it', 'en', 'en'] | True |
DysonDustSensor.__init__ | (self, device) | Create a new Dyson Dust sensor. | Create a new Dyson Dust sensor. | def __init__(self, device):
"""Create a new Dyson Dust sensor."""
super().__init__(device, "dust")
self._name = f"{self._device.name} Dust" | [
"def",
"__init__",
"(",
"self",
",",
"device",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"dust\"",
")",
"self",
".",
"_name",
"=",
"f\"{self._device.name} Dust\""
] | [
178,
4
] | [
181,
48
] | python | en | ['en', 'ga', 'en'] | True |
DysonDustSensor.state | (self) | Return Dust value. | Return Dust value. | def state(self):
"""Return Dust value."""
if self._device.environmental_state:
return self._device.environmental_state.dust
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"environmental_state",
":",
"return",
"self",
".",
"_device",
".",
"environmental_state",
".",
"dust",
"return",
"None"
] | [
184,
4
] | [
188,
19
] | python | en | ['en', 'no', 'en'] | True |
DysonHumiditySensor.__init__ | (self, device) | Create a new Dyson Humidity sensor. | Create a new Dyson Humidity sensor. | def __init__(self, device):
"""Create a new Dyson Humidity sensor."""
super().__init__(device, "humidity")
self._name = f"{self._device.name} Humidity" | [
"def",
"__init__",
"(",
"self",
",",
"device",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"humidity\"",
")",
"self",
".",
"_name",
"=",
"f\"{self._device.name} Humidity\""
] | [
194,
4
] | [
197,
52
] | python | en | ['en', 'mg', 'en'] | True |
DysonHumiditySensor.state | (self) | Return Humidity value. | Return Humidity value. | def state(self):
"""Return Humidity value."""
if self._device.environmental_state:
if self._device.environmental_state.humidity == 0:
return STATE_OFF
return self._device.environmental_state.humidity
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"environmental_state",
":",
"if",
"self",
".",
"_device",
".",
"environmental_state",
".",
"humidity",
"==",
"0",
":",
"return",
"STATE_OFF",
"return",
"self",
".",
"_device",
".",
"environmental_state",
".",
"humidity",
"return",
"None"
] | [
200,
4
] | [
206,
19
] | python | en | ['en', 'et', 'en'] | True |
DysonTemperatureSensor.__init__ | (self, device, unit) | Create a new Dyson Temperature sensor. | Create a new Dyson Temperature sensor. | def __init__(self, device, unit):
"""Create a new Dyson Temperature sensor."""
super().__init__(device, "temperature")
self._name = f"{self._device.name} Temperature"
self._unit = unit | [
"def",
"__init__",
"(",
"self",
",",
"device",
",",
"unit",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"temperature\"",
")",
"self",
".",
"_name",
"=",
"f\"{self._device.name} Temperature\"",
"self",
".",
"_unit",
"=",
"unit"
] | [
212,
4
] | [
216,
25
] | python | en | ['en', 'it', 'en'] | True |
DysonTemperatureSensor.state | (self) | Return Temperature value. | Return Temperature value. | def state(self):
"""Return Temperature value."""
if self._device.environmental_state:
temperature_kelvin = self._device.environmental_state.temperature
if temperature_kelvin == 0:
return STATE_OFF
if self._unit == TEMP_CELSIUS:
return float(f"{(temperature_kelvin - 273.15):.1f}")
return float(f"{(temperature_kelvin * 9 / 5 - 459.67):.1f}")
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"environmental_state",
":",
"temperature_kelvin",
"=",
"self",
".",
"_device",
".",
"environmental_state",
".",
"temperature",
"if",
"temperature_kelvin",
"==",
"0",
":",
"return",
"STATE_OFF",
"if",
"self",
".",
"_unit",
"==",
"TEMP_CELSIUS",
":",
"return",
"float",
"(",
"f\"{(temperature_kelvin - 273.15):.1f}\"",
")",
"return",
"float",
"(",
"f\"{(temperature_kelvin * 9 / 5 - 459.67):.1f}\"",
")",
"return",
"None"
] | [
219,
4
] | [
228,
19
] | python | en | ['en', 'la', 'en'] | True |
DysonTemperatureSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unit"
] | [
231,
4
] | [
233,
25
] | python | en | ['en', 'en', 'en'] | True |
DysonAirQualitySensor.__init__ | (self, device) | Create a new Dyson Air Quality sensor. | Create a new Dyson Air Quality sensor. | def __init__(self, device):
"""Create a new Dyson Air Quality sensor."""
super().__init__(device, "air_quality")
self._name = f"{self._device.name} AQI" | [
"def",
"__init__",
"(",
"self",
",",
"device",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"air_quality\"",
")",
"self",
".",
"_name",
"=",
"f\"{self._device.name} AQI\""
] | [
239,
4
] | [
242,
47
] | python | en | ['en', 'ga', 'en'] | True |
DysonAirQualitySensor.state | (self) | Return Air Quality value. | Return Air Quality value. | def state(self):
"""Return Air Quality value."""
if self._device.environmental_state:
return int(self._device.environmental_state.volatil_organic_compounds)
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_device",
".",
"environmental_state",
":",
"return",
"int",
"(",
"self",
".",
"_device",
".",
"environmental_state",
".",
"volatil_organic_compounds",
")",
"return",
"None"
] | [
245,
4
] | [
249,
19
] | python | en | ['en', 'gd', 'en'] | True |
async_describe_on_off_states | (
hass: HomeAssistantType, registry: GroupIntegrationRegistry
) | Describe group on off states. | Describe group on off states. | def async_describe_on_off_states(
hass: HomeAssistantType, registry: GroupIntegrationRegistry
) -> None:
"""Describe group on off states."""
registry.on_off_states({STATE_ON}, STATE_OFF) | [
"def",
"async_describe_on_off_states",
"(",
"hass",
":",
"HomeAssistantType",
",",
"registry",
":",
"GroupIntegrationRegistry",
")",
"->",
"None",
":",
"registry",
".",
"on_off_states",
"(",
"{",
"STATE_ON",
"}",
",",
"STATE_OFF",
")"
] | [
10,
0
] | [
14,
49
] | python | en | ['en', 'en', 'en'] | True |
conv2d | (x, W) | conv2d returns a 2d convolution layer with full stride. | conv2d returns a 2d convolution layer with full stride. | def conv2d(x, W):
"""conv2d returns a 2d convolution layer with full stride."""
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME') | [
"def",
"conv2d",
"(",
"x",
",",
"W",
")",
":",
"return",
"tf",
".",
"nn",
".",
"conv2d",
"(",
"x",
",",
"W",
",",
"strides",
"=",
"[",
"1",
",",
"1",
",",
"1",
",",
"1",
"]",
",",
"padding",
"=",
"'SAME'",
")"
] | [
97,
0
] | [
99,
67
] | python | en | ['en', 'en', 'en'] | True |
max_pool | (x, pool_size) | max_pool downsamples a feature map by 2X. | max_pool downsamples a feature map by 2X. | def max_pool(x, pool_size):
"""max_pool downsamples a feature map by 2X."""
return tf.nn.max_pool(x, ksize=[1, pool_size, pool_size, 1], strides=[1,
pool_size, pool_size, 1], padding='SAME') | [
"def",
"max_pool",
"(",
"x",
",",
"pool_size",
")",
":",
"return",
"tf",
".",
"nn",
".",
"max_pool",
"(",
"x",
",",
"ksize",
"=",
"[",
"1",
",",
"pool_size",
",",
"pool_size",
",",
"1",
"]",
",",
"strides",
"=",
"[",
"1",
",",
"pool_size",
",",
"pool_size",
",",
"1",
"]",
",",
"padding",
"=",
"'SAME'",
")"
] | [
102,
0
] | [
105,
49
] | python | en | ['en', 'en', 'en'] | True |
weight_variable | (shape) | weight_variable generates a weight variable of a given shape. | weight_variable generates a weight variable of a given shape. | def weight_variable(shape):
"""weight_variable generates a weight variable of a given shape."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial) | [
"def",
"weight_variable",
"(",
"shape",
")",
":",
"initial",
"=",
"tf",
".",
"truncated_normal",
"(",
"shape",
",",
"stddev",
"=",
"0.1",
")",
"return",
"tf",
".",
"Variable",
"(",
"initial",
")"
] | [
113,
0
] | [
116,
31
] | python | en | ['en', 'en', 'en'] | True |
bias_variable | (shape) | bias_variable generates a bias variable of a given shape. | bias_variable generates a bias variable of a given shape. | def bias_variable(shape):
"""bias_variable generates a bias variable of a given shape."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial) | [
"def",
"bias_variable",
"(",
"shape",
")",
":",
"initial",
"=",
"tf",
".",
"constant",
"(",
"0.1",
",",
"shape",
"=",
"shape",
")",
"return",
"tf",
".",
"Variable",
"(",
"initial",
")"
] | [
119,
0
] | [
122,
31
] | python | en | ['en', 'en', 'en'] | True |
mock_weather | () | Mock weather data. | Mock weather data. | def mock_weather():
"""Mock weather data."""
with patch("metno.MetWeatherData") as mock_data:
mock_data = mock_data.return_value
mock_data.fetching_data = AsyncMock(return_value=True)
mock_data.get_current_weather.return_value = {
"condition": "cloudy",
"temperature": 15,
"pressure": 100,
"humidity": 50,
"wind_speed": 10,
"wind_bearing": "NE",
}
mock_data.get_forecast.return_value = {}
yield mock_data | [
"def",
"mock_weather",
"(",
")",
":",
"with",
"patch",
"(",
"\"metno.MetWeatherData\"",
")",
"as",
"mock_data",
":",
"mock_data",
"=",
"mock_data",
".",
"return_value",
"mock_data",
".",
"fetching_data",
"=",
"AsyncMock",
"(",
"return_value",
"=",
"True",
")",
"mock_data",
".",
"get_current_weather",
".",
"return_value",
"=",
"{",
"\"condition\"",
":",
"\"cloudy\"",
",",
"\"temperature\"",
":",
"15",
",",
"\"pressure\"",
":",
"100",
",",
"\"humidity\"",
":",
"50",
",",
"\"wind_speed\"",
":",
"10",
",",
"\"wind_bearing\"",
":",
"\"NE\"",
",",
"}",
"mock_data",
".",
"get_forecast",
".",
"return_value",
"=",
"{",
"}",
"yield",
"mock_data"
] | [
7,
0
] | [
21,
23
] | python | en | ['en', 'xh', 'en'] | True |
test_sending_location | (hass, create_registrations, webhook_client) | Test sending a location via a webhook. | Test sending a location via a webhook. | async def test_sending_location(hass, create_registrations, webhook_client):
"""Test sending a location via a webhook."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={
"type": "update_location",
"data": {
"gps": [10, 20],
"gps_accuracy": 30,
"battery": 40,
"altitude": 50,
"course": 60,
"speed": 70,
"vertical_accuracy": 80,
"location_name": "bar",
},
},
)
assert resp.status == 200
await hass.async_block_till_done()
state = hass.states.get("device_tracker.test_1_2")
assert state is not None
assert state.name == "Test 1"
assert state.state == "bar"
assert state.attributes["source_type"] == "gps"
assert state.attributes["latitude"] == 10
assert state.attributes["longitude"] == 20
assert state.attributes["gps_accuracy"] == 30
assert state.attributes["battery_level"] == 40
assert state.attributes["altitude"] == 50
assert state.attributes["course"] == 60
assert state.attributes["speed"] == 70
assert state.attributes["vertical_accuracy"] == 80
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={
"type": "update_location",
"data": {
"gps": [1, 2],
"gps_accuracy": 3,
"battery": 4,
"altitude": 5,
"course": 6,
"speed": 7,
"vertical_accuracy": 8,
},
},
)
assert resp.status == 200
await hass.async_block_till_done()
state = hass.states.get("device_tracker.test_1_2")
assert state is not None
assert state.state == "not_home"
assert state.attributes["source_type"] == "gps"
assert state.attributes["latitude"] == 1
assert state.attributes["longitude"] == 2
assert state.attributes["gps_accuracy"] == 3
assert state.attributes["battery_level"] == 4
assert state.attributes["altitude"] == 5
assert state.attributes["course"] == 6
assert state.attributes["speed"] == 7
assert state.attributes["vertical_accuracy"] == 8 | [
"async",
"def",
"test_sending_location",
"(",
"hass",
",",
"create_registrations",
",",
"webhook_client",
")",
":",
"resp",
"=",
"await",
"webhook_client",
".",
"post",
"(",
"\"/api/webhook/{}\"",
".",
"format",
"(",
"create_registrations",
"[",
"1",
"]",
"[",
"\"webhook_id\"",
"]",
")",
",",
"json",
"=",
"{",
"\"type\"",
":",
"\"update_location\"",
",",
"\"data\"",
":",
"{",
"\"gps\"",
":",
"[",
"10",
",",
"20",
"]",
",",
"\"gps_accuracy\"",
":",
"30",
",",
"\"battery\"",
":",
"40",
",",
"\"altitude\"",
":",
"50",
",",
"\"course\"",
":",
"60",
",",
"\"speed\"",
":",
"70",
",",
"\"vertical_accuracy\"",
":",
"80",
",",
"\"location_name\"",
":",
"\"bar\"",
",",
"}",
",",
"}",
",",
")",
"assert",
"resp",
".",
"status",
"==",
"200",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"device_tracker.test_1_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Test 1\"",
"assert",
"state",
".",
"state",
"==",
"\"bar\"",
"assert",
"state",
".",
"attributes",
"[",
"\"source_type\"",
"]",
"==",
"\"gps\"",
"assert",
"state",
".",
"attributes",
"[",
"\"latitude\"",
"]",
"==",
"10",
"assert",
"state",
".",
"attributes",
"[",
"\"longitude\"",
"]",
"==",
"20",
"assert",
"state",
".",
"attributes",
"[",
"\"gps_accuracy\"",
"]",
"==",
"30",
"assert",
"state",
".",
"attributes",
"[",
"\"battery_level\"",
"]",
"==",
"40",
"assert",
"state",
".",
"attributes",
"[",
"\"altitude\"",
"]",
"==",
"50",
"assert",
"state",
".",
"attributes",
"[",
"\"course\"",
"]",
"==",
"60",
"assert",
"state",
".",
"attributes",
"[",
"\"speed\"",
"]",
"==",
"70",
"assert",
"state",
".",
"attributes",
"[",
"\"vertical_accuracy\"",
"]",
"==",
"80",
"resp",
"=",
"await",
"webhook_client",
".",
"post",
"(",
"\"/api/webhook/{}\"",
".",
"format",
"(",
"create_registrations",
"[",
"1",
"]",
"[",
"\"webhook_id\"",
"]",
")",
",",
"json",
"=",
"{",
"\"type\"",
":",
"\"update_location\"",
",",
"\"data\"",
":",
"{",
"\"gps\"",
":",
"[",
"1",
",",
"2",
"]",
",",
"\"gps_accuracy\"",
":",
"3",
",",
"\"battery\"",
":",
"4",
",",
"\"altitude\"",
":",
"5",
",",
"\"course\"",
":",
"6",
",",
"\"speed\"",
":",
"7",
",",
"\"vertical_accuracy\"",
":",
"8",
",",
"}",
",",
"}",
",",
")",
"assert",
"resp",
".",
"status",
"==",
"200",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"device_tracker.test_1_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"not_home\"",
"assert",
"state",
".",
"attributes",
"[",
"\"source_type\"",
"]",
"==",
"\"gps\"",
"assert",
"state",
".",
"attributes",
"[",
"\"latitude\"",
"]",
"==",
"1",
"assert",
"state",
".",
"attributes",
"[",
"\"longitude\"",
"]",
"==",
"2",
"assert",
"state",
".",
"attributes",
"[",
"\"gps_accuracy\"",
"]",
"==",
"3",
"assert",
"state",
".",
"attributes",
"[",
"\"battery_level\"",
"]",
"==",
"4",
"assert",
"state",
".",
"attributes",
"[",
"\"altitude\"",
"]",
"==",
"5",
"assert",
"state",
".",
"attributes",
"[",
"\"course\"",
"]",
"==",
"6",
"assert",
"state",
".",
"attributes",
"[",
"\"speed\"",
"]",
"==",
"7",
"assert",
"state",
".",
"attributes",
"[",
"\"vertical_accuracy\"",
"]",
"==",
"8"
] | [
3,
0
] | [
67,
53
] | python | en | ['en', 'lb', 'en'] | True |
test_restoring_location | (hass, create_registrations, webhook_client) | Test sending a location via a webhook. | Test sending a location via a webhook. | async def test_restoring_location(hass, create_registrations, webhook_client):
"""Test sending a location via a webhook."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={
"type": "update_location",
"data": {
"gps": [10, 20],
"gps_accuracy": 30,
"battery": 40,
"altitude": 50,
"course": 60,
"speed": 70,
"vertical_accuracy": 80,
"location_name": "bar",
},
},
)
assert resp.status == 200
await hass.async_block_till_done()
state_1 = hass.states.get("device_tracker.test_1_2")
assert state_1 is not None
config_entry = hass.config_entries.async_entries("mobile_app")[1]
# mobile app doesn't support unloading, so we just reload device tracker
await hass.config_entries.async_forward_entry_unload(config_entry, "device_tracker")
await hass.config_entries.async_forward_entry_setup(config_entry, "device_tracker")
await hass.async_block_till_done()
state_2 = hass.states.get("device_tracker.test_1_2")
assert state_2 is not None
assert state_1 is not state_2
assert state_2.name == "Test 1"
assert state_2.attributes["source_type"] == "gps"
assert state_2.attributes["latitude"] == 10
assert state_2.attributes["longitude"] == 20
assert state_2.attributes["gps_accuracy"] == 30
assert state_2.attributes["battery_level"] == 40
assert state_2.attributes["altitude"] == 50
assert state_2.attributes["course"] == 60
assert state_2.attributes["speed"] == 70
assert state_2.attributes["vertical_accuracy"] == 80 | [
"async",
"def",
"test_restoring_location",
"(",
"hass",
",",
"create_registrations",
",",
"webhook_client",
")",
":",
"resp",
"=",
"await",
"webhook_client",
".",
"post",
"(",
"\"/api/webhook/{}\"",
".",
"format",
"(",
"create_registrations",
"[",
"1",
"]",
"[",
"\"webhook_id\"",
"]",
")",
",",
"json",
"=",
"{",
"\"type\"",
":",
"\"update_location\"",
",",
"\"data\"",
":",
"{",
"\"gps\"",
":",
"[",
"10",
",",
"20",
"]",
",",
"\"gps_accuracy\"",
":",
"30",
",",
"\"battery\"",
":",
"40",
",",
"\"altitude\"",
":",
"50",
",",
"\"course\"",
":",
"60",
",",
"\"speed\"",
":",
"70",
",",
"\"vertical_accuracy\"",
":",
"80",
",",
"\"location_name\"",
":",
"\"bar\"",
",",
"}",
",",
"}",
",",
")",
"assert",
"resp",
".",
"status",
"==",
"200",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state_1",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"device_tracker.test_1_2\"",
")",
"assert",
"state_1",
"is",
"not",
"None",
"config_entry",
"=",
"hass",
".",
"config_entries",
".",
"async_entries",
"(",
"\"mobile_app\"",
")",
"[",
"1",
"]",
"# mobile app doesn't support unloading, so we just reload device tracker",
"await",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"config_entry",
",",
"\"device_tracker\"",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"config_entry",
",",
"\"device_tracker\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state_2",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"device_tracker.test_1_2\"",
")",
"assert",
"state_2",
"is",
"not",
"None",
"assert",
"state_1",
"is",
"not",
"state_2",
"assert",
"state_2",
".",
"name",
"==",
"\"Test 1\"",
"assert",
"state_2",
".",
"attributes",
"[",
"\"source_type\"",
"]",
"==",
"\"gps\"",
"assert",
"state_2",
".",
"attributes",
"[",
"\"latitude\"",
"]",
"==",
"10",
"assert",
"state_2",
".",
"attributes",
"[",
"\"longitude\"",
"]",
"==",
"20",
"assert",
"state_2",
".",
"attributes",
"[",
"\"gps_accuracy\"",
"]",
"==",
"30",
"assert",
"state_2",
".",
"attributes",
"[",
"\"battery_level\"",
"]",
"==",
"40",
"assert",
"state_2",
".",
"attributes",
"[",
"\"altitude\"",
"]",
"==",
"50",
"assert",
"state_2",
".",
"attributes",
"[",
"\"course\"",
"]",
"==",
"60",
"assert",
"state_2",
".",
"attributes",
"[",
"\"speed\"",
"]",
"==",
"70",
"assert",
"state_2",
".",
"attributes",
"[",
"\"vertical_accuracy\"",
"]",
"==",
"80"
] | [
70,
0
] | [
114,
56
] | python | en | ['en', 'lb', 'en'] | True |
async_setup | (hass: HomeAssistantType, config: ConfigType) | Set up the Minecraft Server component. | Set up the Minecraft Server component. | async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up the Minecraft Server component."""
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config",
":",
"ConfigType",
")",
"->",
"bool",
":",
"return",
"True"
] | [
28,
0
] | [
30,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass: HomeAssistantType, config_entry: ConfigEntry) | Set up Minecraft Server from a config entry. | Set up Minecraft Server from a config entry. | async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry) -> bool:
"""Set up Minecraft Server from a config entry."""
domain_data = hass.data.setdefault(DOMAIN, {})
# Create and store server instance.
unique_id = config_entry.unique_id
_LOGGER.debug(
"Creating server instance for '%s' (%s)",
config_entry.data[CONF_NAME],
config_entry.data[CONF_HOST],
)
server = MinecraftServer(hass, unique_id, config_entry.data)
domain_data[unique_id] = server
await server.async_update()
server.start_periodic_update()
# Set up platforms.
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
":",
"ConfigEntry",
")",
"->",
"bool",
":",
"domain_data",
"=",
"hass",
".",
"data",
".",
"setdefault",
"(",
"DOMAIN",
",",
"{",
"}",
")",
"# Create and store server instance.",
"unique_id",
"=",
"config_entry",
".",
"unique_id",
"_LOGGER",
".",
"debug",
"(",
"\"Creating server instance for '%s' (%s)\"",
",",
"config_entry",
".",
"data",
"[",
"CONF_NAME",
"]",
",",
"config_entry",
".",
"data",
"[",
"CONF_HOST",
"]",
",",
")",
"server",
"=",
"MinecraftServer",
"(",
"hass",
",",
"unique_id",
",",
"config_entry",
".",
"data",
")",
"domain_data",
"[",
"unique_id",
"]",
"=",
"server",
"await",
"server",
".",
"async_update",
"(",
")",
"server",
".",
"start_periodic_update",
"(",
")",
"# Set up platforms.",
"for",
"platform",
"in",
"PLATFORMS",
":",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"config_entry",
",",
"platform",
")",
")",
"return",
"True"
] | [
33,
0
] | [
55,
15
] | python | en | ['en', 'en', 'en'] | True |
async_unload_entry | (
hass: HomeAssistantType, config_entry: ConfigEntry
) | Unload Minecraft Server config entry. | Unload Minecraft Server config entry. | async def async_unload_entry(
hass: HomeAssistantType, config_entry: ConfigEntry
) -> bool:
"""Unload Minecraft Server config entry."""
unique_id = config_entry.unique_id
server = hass.data[DOMAIN][unique_id]
# Unload platforms.
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in PLATFORMS
]
)
# Clean up.
server.stop_periodic_update()
hass.data[DOMAIN].pop(unique_id)
return True | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
":",
"ConfigEntry",
")",
"->",
"bool",
":",
"unique_id",
"=",
"config_entry",
".",
"unique_id",
"server",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"unique_id",
"]",
"# Unload platforms.",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"[",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"config_entry",
",",
"platform",
")",
"for",
"platform",
"in",
"PLATFORMS",
"]",
")",
"# Clean up.",
"server",
".",
"stop_periodic_update",
"(",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"pop",
"(",
"unique_id",
")",
"return",
"True"
] | [
58,
0
] | [
77,
15
] | python | da | ['da', 'es', 'en'] | False |
MinecraftServer.__init__ | (
self, hass: HomeAssistantType, unique_id: str, config_data: ConfigType
) | Initialize server instance. | Initialize server instance. | def __init__(
self, hass: HomeAssistantType, unique_id: str, config_data: ConfigType
) -> None:
"""Initialize server instance."""
self._hass = hass
# Server data
self.unique_id = unique_id
self.name = config_data[CONF_NAME]
self.host = config_data[CONF_HOST]
self.port = config_data[CONF_PORT]
self.online = False
self._last_status_request_failed = False
self.srv_record_checked = False
# 3rd party library instance
self._mc_status = MCStatus(self.host, self.port)
# Data provided by 3rd party library
self.version = None
self.protocol_version = None
self.latency_time = None
self.players_online = None
self.players_max = None
self.players_list = None
# Dispatcher signal name
self.signal_name = f"{SIGNAL_NAME_PREFIX}_{self.unique_id}"
# Callback for stopping periodic update.
self._stop_periodic_update = None | [
"def",
"__init__",
"(",
"self",
",",
"hass",
":",
"HomeAssistantType",
",",
"unique_id",
":",
"str",
",",
"config_data",
":",
"ConfigType",
")",
"->",
"None",
":",
"self",
".",
"_hass",
"=",
"hass",
"# Server data",
"self",
".",
"unique_id",
"=",
"unique_id",
"self",
".",
"name",
"=",
"config_data",
"[",
"CONF_NAME",
"]",
"self",
".",
"host",
"=",
"config_data",
"[",
"CONF_HOST",
"]",
"self",
".",
"port",
"=",
"config_data",
"[",
"CONF_PORT",
"]",
"self",
".",
"online",
"=",
"False",
"self",
".",
"_last_status_request_failed",
"=",
"False",
"self",
".",
"srv_record_checked",
"=",
"False",
"# 3rd party library instance",
"self",
".",
"_mc_status",
"=",
"MCStatus",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"# Data provided by 3rd party library",
"self",
".",
"version",
"=",
"None",
"self",
".",
"protocol_version",
"=",
"None",
"self",
".",
"latency_time",
"=",
"None",
"self",
".",
"players_online",
"=",
"None",
"self",
".",
"players_max",
"=",
"None",
"self",
".",
"players_list",
"=",
"None",
"# Dispatcher signal name",
"self",
".",
"signal_name",
"=",
"f\"{SIGNAL_NAME_PREFIX}_{self.unique_id}\"",
"# Callback for stopping periodic update.",
"self",
".",
"_stop_periodic_update",
"=",
"None"
] | [
86,
4
] | [
116,
41
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServer.start_periodic_update | (self) | Start periodic execution of update method. | Start periodic execution of update method. | def start_periodic_update(self) -> None:
"""Start periodic execution of update method."""
self._stop_periodic_update = async_track_time_interval(
self._hass, self.async_update, timedelta(seconds=SCAN_INTERVAL)
) | [
"def",
"start_periodic_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_stop_periodic_update",
"=",
"async_track_time_interval",
"(",
"self",
".",
"_hass",
",",
"self",
".",
"async_update",
",",
"timedelta",
"(",
"seconds",
"=",
"SCAN_INTERVAL",
")",
")"
] | [
118,
4
] | [
122,
9
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServer.stop_periodic_update | (self) | Stop periodic execution of update method. | Stop periodic execution of update method. | def stop_periodic_update(self) -> None:
"""Stop periodic execution of update method."""
self._stop_periodic_update() | [
"def",
"stop_periodic_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_stop_periodic_update",
"(",
")"
] | [
124,
4
] | [
126,
36
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServer.async_check_connection | (self) | Check server connection using a 'status' request and store connection status. | Check server connection using a 'status' request and store connection status. | async def async_check_connection(self) -> None:
"""Check server connection using a 'status' request and store connection status."""
# Check if host is a valid SRV record, if not already done.
if not self.srv_record_checked:
self.srv_record_checked = True
srv_record = await helpers.async_check_srv_record(self._hass, self.host)
if srv_record is not None:
_LOGGER.debug(
"'%s' is a valid Minecraft SRV record ('%s:%s')",
self.host,
srv_record[CONF_HOST],
srv_record[CONF_PORT],
)
# Overwrite host, port and 3rd party library instance
# with data extracted out of SRV record.
self.host = srv_record[CONF_HOST]
self.port = srv_record[CONF_PORT]
self._mc_status = MCStatus(self.host, self.port)
# Ping the server with a status request.
try:
await self._hass.async_add_executor_job(
self._mc_status.status, self._MAX_RETRIES_STATUS
)
self.online = True
except OSError as error:
_LOGGER.debug(
"Error occurred while trying to check the connection to '%s:%s' - OSError: %s",
self.host,
self.port,
error,
)
self.online = False | [
"async",
"def",
"async_check_connection",
"(",
"self",
")",
"->",
"None",
":",
"# Check if host is a valid SRV record, if not already done.",
"if",
"not",
"self",
".",
"srv_record_checked",
":",
"self",
".",
"srv_record_checked",
"=",
"True",
"srv_record",
"=",
"await",
"helpers",
".",
"async_check_srv_record",
"(",
"self",
".",
"_hass",
",",
"self",
".",
"host",
")",
"if",
"srv_record",
"is",
"not",
"None",
":",
"_LOGGER",
".",
"debug",
"(",
"\"'%s' is a valid Minecraft SRV record ('%s:%s')\"",
",",
"self",
".",
"host",
",",
"srv_record",
"[",
"CONF_HOST",
"]",
",",
"srv_record",
"[",
"CONF_PORT",
"]",
",",
")",
"# Overwrite host, port and 3rd party library instance",
"# with data extracted out of SRV record.",
"self",
".",
"host",
"=",
"srv_record",
"[",
"CONF_HOST",
"]",
"self",
".",
"port",
"=",
"srv_record",
"[",
"CONF_PORT",
"]",
"self",
".",
"_mc_status",
"=",
"MCStatus",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"# Ping the server with a status request.",
"try",
":",
"await",
"self",
".",
"_hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"_mc_status",
".",
"status",
",",
"self",
".",
"_MAX_RETRIES_STATUS",
")",
"self",
".",
"online",
"=",
"True",
"except",
"OSError",
"as",
"error",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Error occurred while trying to check the connection to '%s:%s' - OSError: %s\"",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"error",
",",
")",
"self",
".",
"online",
"=",
"False"
] | [
128,
4
] | [
160,
31
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServer.async_update | (self, now: datetime = None) | Get server data from 3rd party library and update properties. | Get server data from 3rd party library and update properties. | async def async_update(self, now: datetime = None) -> None:
"""Get server data from 3rd party library and update properties."""
# Check connection status.
server_online_old = self.online
await self.async_check_connection()
server_online = self.online
# Inform user once about connection state changes if necessary.
if server_online_old and not server_online:
_LOGGER.warning("Connection to '%s:%s' lost", self.host, self.port)
elif not server_online_old and server_online:
_LOGGER.info("Connection to '%s:%s' (re-)established", self.host, self.port)
# Update the server properties if server is online.
if server_online:
await self._async_status_request()
# Notify sensors about new data.
async_dispatcher_send(self._hass, self.signal_name) | [
"async",
"def",
"async_update",
"(",
"self",
",",
"now",
":",
"datetime",
"=",
"None",
")",
"->",
"None",
":",
"# Check connection status.",
"server_online_old",
"=",
"self",
".",
"online",
"await",
"self",
".",
"async_check_connection",
"(",
")",
"server_online",
"=",
"self",
".",
"online",
"# Inform user once about connection state changes if necessary.",
"if",
"server_online_old",
"and",
"not",
"server_online",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Connection to '%s:%s' lost\"",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"elif",
"not",
"server_online_old",
"and",
"server_online",
":",
"_LOGGER",
".",
"info",
"(",
"\"Connection to '%s:%s' (re-)established\"",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"# Update the server properties if server is online.",
"if",
"server_online",
":",
"await",
"self",
".",
"_async_status_request",
"(",
")",
"# Notify sensors about new data.",
"async_dispatcher_send",
"(",
"self",
".",
"_hass",
",",
"self",
".",
"signal_name",
")"
] | [
162,
4
] | [
180,
59
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServer._async_status_request | (self) | Request server status and update properties. | Request server status and update properties. | async def _async_status_request(self) -> None:
"""Request server status and update properties."""
try:
status_response = await self._hass.async_add_executor_job(
self._mc_status.status, self._MAX_RETRIES_STATUS
)
# Got answer to request, update properties.
self.version = status_response.version.name
self.protocol_version = status_response.version.protocol
self.players_online = status_response.players.online
self.players_max = status_response.players.max
self.latency_time = status_response.latency
self.players_list = []
if status_response.players.sample is not None:
for player in status_response.players.sample:
self.players_list.append(player.name)
self.players_list.sort()
# Inform user once about successful update if necessary.
if self._last_status_request_failed:
_LOGGER.info(
"Updating the properties of '%s:%s' succeeded again",
self.host,
self.port,
)
self._last_status_request_failed = False
except OSError as error:
# No answer to request, set all properties to unknown.
self.version = None
self.protocol_version = None
self.players_online = None
self.players_max = None
self.latency_time = None
self.players_list = None
# Inform user once about failed update if necessary.
if not self._last_status_request_failed:
_LOGGER.warning(
"Updating the properties of '%s:%s' failed - OSError: %s",
self.host,
self.port,
error,
)
self._last_status_request_failed = True | [
"async",
"def",
"_async_status_request",
"(",
"self",
")",
"->",
"None",
":",
"try",
":",
"status_response",
"=",
"await",
"self",
".",
"_hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"_mc_status",
".",
"status",
",",
"self",
".",
"_MAX_RETRIES_STATUS",
")",
"# Got answer to request, update properties.",
"self",
".",
"version",
"=",
"status_response",
".",
"version",
".",
"name",
"self",
".",
"protocol_version",
"=",
"status_response",
".",
"version",
".",
"protocol",
"self",
".",
"players_online",
"=",
"status_response",
".",
"players",
".",
"online",
"self",
".",
"players_max",
"=",
"status_response",
".",
"players",
".",
"max",
"self",
".",
"latency_time",
"=",
"status_response",
".",
"latency",
"self",
".",
"players_list",
"=",
"[",
"]",
"if",
"status_response",
".",
"players",
".",
"sample",
"is",
"not",
"None",
":",
"for",
"player",
"in",
"status_response",
".",
"players",
".",
"sample",
":",
"self",
".",
"players_list",
".",
"append",
"(",
"player",
".",
"name",
")",
"self",
".",
"players_list",
".",
"sort",
"(",
")",
"# Inform user once about successful update if necessary.",
"if",
"self",
".",
"_last_status_request_failed",
":",
"_LOGGER",
".",
"info",
"(",
"\"Updating the properties of '%s:%s' succeeded again\"",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
")",
"self",
".",
"_last_status_request_failed",
"=",
"False",
"except",
"OSError",
"as",
"error",
":",
"# No answer to request, set all properties to unknown.",
"self",
".",
"version",
"=",
"None",
"self",
".",
"protocol_version",
"=",
"None",
"self",
".",
"players_online",
"=",
"None",
"self",
".",
"players_max",
"=",
"None",
"self",
".",
"latency_time",
"=",
"None",
"self",
".",
"players_list",
"=",
"None",
"# Inform user once about failed update if necessary.",
"if",
"not",
"self",
".",
"_last_status_request_failed",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Updating the properties of '%s:%s' failed - OSError: %s\"",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"error",
",",
")",
"self",
".",
"_last_status_request_failed",
"=",
"True"
] | [
182,
4
] | [
226,
51
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServerEntity.__init__ | (
self, server: MinecraftServer, type_name: str, icon: str, device_class: str
) | Initialize base entity. | Initialize base entity. | def __init__(
self, server: MinecraftServer, type_name: str, icon: str, device_class: str
) -> None:
"""Initialize base entity."""
self._server = server
self._name = f"{server.name} {type_name}"
self._icon = icon
self._unique_id = f"{self._server.unique_id}-{type_name}"
self._device_info = {
"identifiers": {(DOMAIN, self._server.unique_id)},
"name": self._server.name,
"manufacturer": MANUFACTURER,
"model": f"Minecraft Server ({self._server.version})",
"sw_version": self._server.protocol_version,
}
self._device_class = device_class
self._device_state_attributes = None
self._disconnect_dispatcher = None | [
"def",
"__init__",
"(",
"self",
",",
"server",
":",
"MinecraftServer",
",",
"type_name",
":",
"str",
",",
"icon",
":",
"str",
",",
"device_class",
":",
"str",
")",
"->",
"None",
":",
"self",
".",
"_server",
"=",
"server",
"self",
".",
"_name",
"=",
"f\"{server.name} {type_name}\"",
"self",
".",
"_icon",
"=",
"icon",
"self",
".",
"_unique_id",
"=",
"f\"{self._server.unique_id}-{type_name}\"",
"self",
".",
"_device_info",
"=",
"{",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"_server",
".",
"unique_id",
")",
"}",
",",
"\"name\"",
":",
"self",
".",
"_server",
".",
"name",
",",
"\"manufacturer\"",
":",
"MANUFACTURER",
",",
"\"model\"",
":",
"f\"Minecraft Server ({self._server.version})\"",
",",
"\"sw_version\"",
":",
"self",
".",
"_server",
".",
"protocol_version",
",",
"}",
"self",
".",
"_device_class",
"=",
"device_class",
"self",
".",
"_device_state_attributes",
"=",
"None",
"self",
".",
"_disconnect_dispatcher",
"=",
"None"
] | [
232,
4
] | [
249,
42
] | python | es | ['es', 'zu', 'it'] | False |
MinecraftServerEntity.name | (self) | Return name. | Return name. | def name(self) -> str:
"""Return name."""
return self._name | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_name"
] | [
252,
4
] | [
254,
25
] | python | en | ['en', 'ig', 'en'] | False |
MinecraftServerEntity.unique_id | (self) | Return unique ID. | Return unique ID. | def unique_id(self) -> str:
"""Return unique ID."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_unique_id"
] | [
257,
4
] | [
259,
30
] | python | en | ['fr', 'la', 'en'] | False |
MinecraftServerEntity.device_info | (self) | Return device information. | Return device information. | def device_info(self) -> Dict[str, Any]:
"""Return device information."""
return self._device_info | [
"def",
"device_info",
"(",
"self",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"return",
"self",
".",
"_device_info"
] | [
262,
4
] | [
264,
32
] | python | da | ['es', 'da', 'en'] | False |
MinecraftServerEntity.device_class | (self) | Return device class. | Return device class. | def device_class(self) -> str:
"""Return device class."""
return self._device_class | [
"def",
"device_class",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_device_class"
] | [
267,
4
] | [
269,
33
] | python | en | ['es', 'zh', 'en'] | False |
MinecraftServerEntity.icon | (self) | Return icon. | Return icon. | def icon(self) -> str:
"""Return icon."""
return self._icon | [
"def",
"icon",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_icon"
] | [
272,
4
] | [
274,
25
] | python | en | ['en', 'la', 'en'] | False |
MinecraftServerEntity.should_poll | (self) | Disable polling. | Disable polling. | def should_poll(self) -> bool:
"""Disable polling."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"False"
] | [
277,
4
] | [
279,
20
] | python | en | ['fr', 'en', 'en'] | False |
MinecraftServerEntity.async_update | (self) | Fetch data from the server. | Fetch data from the server. | async def async_update(self) -> None:
"""Fetch data from the server."""
raise NotImplementedError() | [
"async",
"def",
"async_update",
"(",
"self",
")",
"->",
"None",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
281,
4
] | [
283,
35
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServerEntity.async_added_to_hass | (self) | Connect dispatcher to signal from server. | Connect dispatcher to signal from server. | async def async_added_to_hass(self) -> None:
"""Connect dispatcher to signal from server."""
self._disconnect_dispatcher = async_dispatcher_connect(
self.hass, self._server.signal_name, self._update_callback
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_disconnect_dispatcher",
"=",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_server",
".",
"signal_name",
",",
"self",
".",
"_update_callback",
")"
] | [
285,
4
] | [
289,
9
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServerEntity.async_will_remove_from_hass | (self) | Disconnect dispatcher before removal. | Disconnect dispatcher before removal. | async def async_will_remove_from_hass(self) -> None:
"""Disconnect dispatcher before removal."""
self._disconnect_dispatcher() | [
"async",
"def",
"async_will_remove_from_hass",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_disconnect_dispatcher",
"(",
")"
] | [
291,
4
] | [
293,
37
] | python | en | ['en', 'en', 'en'] | True |
MinecraftServerEntity._update_callback | (self) | Triggers update of properties after receiving signal from server. | Triggers update of properties after receiving signal from server. | def _update_callback(self) -> None:
"""Triggers update of properties after receiving signal from server."""
self.async_schedule_update_ha_state(force_refresh=True) | [
"def",
"_update_callback",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"async_schedule_update_ha_state",
"(",
"force_refresh",
"=",
"True",
")"
] | [
296,
4
] | [
298,
63
] | python | en | ['en', 'en', 'en'] | True |
pyorbit_polychord | (config_in, input_datasets=None, return_output=None) | A dummy file is created to let the cpulimit script to proceed with the next step | A dummy file is created to let the cpulimit script to proceed with the next step | def pyorbit_polychord(config_in, input_datasets=None, return_output=None):
output_directory = './' + config_in['output'] + '/polychord/'
mc = ModelContainerPolyChord()
pars_input(config_in, mc, input_datasets)
if mc.nested_sampling_parameters['shutdown_jitter']:
for dataset_name, dataset in mc.dataset_dict.items():
dataset.shutdown_jitter()
mc.model_setup()
mc.create_variables_bounds()
mc.initialize_logchi2()
mc.create_starting_point()
results_analysis.results_resumen(mc, None, skip_theta=True)
mc.output_directory = output_directory
# os.system("mkdir -p " + output_directory + "/clusters")
# os.system("mkdir -p " +output_directory + "chains/clusters")
print()
print('Reference Time Tref: ', mc.Tref)
print()
print('*************************************************************')
print()
settings = PolyChordSettings(nDims=mc.ndim, nDerived=0)
settings.file_root = 'pyorbit'
settings.base_dir = output_directory
for key_name, key_value in mc.nested_sampling_parameters.items():
if hasattr(settings, key_name):
setattr(settings, key_name, key_value)
if 'nlive_mult' in mc.nested_sampling_parameters:
setattr(settings, 'nlive', mc.ndim * mc.nested_sampling_parameters['nlive_mult'])
if 'num_repeats_mult' in mc.nested_sampling_parameters:
setattr(settings, 'num_repeats', mc.ndim * mc.nested_sampling_parameters['num_repeats_mult'])
if 'include_priors' in mc.nested_sampling_parameters:
mc.include_priors = mc.nested_sampling_parameters['include_priors']
output = pypolychord.run_polychord(mc.polychord_call, nDims=mc.ndim, nDerived=0, settings=settings,
prior=mc.polychord_priors, dumper=dumper)
paramnames = [('p%i' % i, r'\theta_%i' % i) for i in range(mc.ndim)]
paramnames += [('r*', 'r')]
output.make_paramnames_files(paramnames)
nested_sampling_save_to_cpickle(mc)
print()
print('PolyChord COMPLETED')
print()
""" A dummy file is created to let the cpulimit script to proceed with the next step"""
nested_sampling_create_dummy_file(mc)
if return_output:
return mc
else:
return | [
"def",
"pyorbit_polychord",
"(",
"config_in",
",",
"input_datasets",
"=",
"None",
",",
"return_output",
"=",
"None",
")",
":",
"output_directory",
"=",
"'./'",
"+",
"config_in",
"[",
"'output'",
"]",
"+",
"'/polychord/'",
"mc",
"=",
"ModelContainerPolyChord",
"(",
")",
"pars_input",
"(",
"config_in",
",",
"mc",
",",
"input_datasets",
")",
"if",
"mc",
".",
"nested_sampling_parameters",
"[",
"'shutdown_jitter'",
"]",
":",
"for",
"dataset_name",
",",
"dataset",
"in",
"mc",
".",
"dataset_dict",
".",
"items",
"(",
")",
":",
"dataset",
".",
"shutdown_jitter",
"(",
")",
"mc",
".",
"model_setup",
"(",
")",
"mc",
".",
"create_variables_bounds",
"(",
")",
"mc",
".",
"initialize_logchi2",
"(",
")",
"mc",
".",
"create_starting_point",
"(",
")",
"results_analysis",
".",
"results_resumen",
"(",
"mc",
",",
"None",
",",
"skip_theta",
"=",
"True",
")",
"mc",
".",
"output_directory",
"=",
"output_directory",
"# os.system(\"mkdir -p \" + output_directory + \"/clusters\")",
"# os.system(\"mkdir -p \" +output_directory + \"chains/clusters\")",
"print",
"(",
")",
"print",
"(",
"'Reference Time Tref: '",
",",
"mc",
".",
"Tref",
")",
"print",
"(",
")",
"print",
"(",
"'*************************************************************'",
")",
"print",
"(",
")",
"settings",
"=",
"PolyChordSettings",
"(",
"nDims",
"=",
"mc",
".",
"ndim",
",",
"nDerived",
"=",
"0",
")",
"settings",
".",
"file_root",
"=",
"'pyorbit'",
"settings",
".",
"base_dir",
"=",
"output_directory",
"for",
"key_name",
",",
"key_value",
"in",
"mc",
".",
"nested_sampling_parameters",
".",
"items",
"(",
")",
":",
"if",
"hasattr",
"(",
"settings",
",",
"key_name",
")",
":",
"setattr",
"(",
"settings",
",",
"key_name",
",",
"key_value",
")",
"if",
"'nlive_mult'",
"in",
"mc",
".",
"nested_sampling_parameters",
":",
"setattr",
"(",
"settings",
",",
"'nlive'",
",",
"mc",
".",
"ndim",
"*",
"mc",
".",
"nested_sampling_parameters",
"[",
"'nlive_mult'",
"]",
")",
"if",
"'num_repeats_mult'",
"in",
"mc",
".",
"nested_sampling_parameters",
":",
"setattr",
"(",
"settings",
",",
"'num_repeats'",
",",
"mc",
".",
"ndim",
"*",
"mc",
".",
"nested_sampling_parameters",
"[",
"'num_repeats_mult'",
"]",
")",
"if",
"'include_priors'",
"in",
"mc",
".",
"nested_sampling_parameters",
":",
"mc",
".",
"include_priors",
"=",
"mc",
".",
"nested_sampling_parameters",
"[",
"'include_priors'",
"]",
"output",
"=",
"pypolychord",
".",
"run_polychord",
"(",
"mc",
".",
"polychord_call",
",",
"nDims",
"=",
"mc",
".",
"ndim",
",",
"nDerived",
"=",
"0",
",",
"settings",
"=",
"settings",
",",
"prior",
"=",
"mc",
".",
"polychord_priors",
",",
"dumper",
"=",
"dumper",
")",
"paramnames",
"=",
"[",
"(",
"'p%i'",
"%",
"i",
",",
"r'\\theta_%i'",
"%",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"mc",
".",
"ndim",
")",
"]",
"paramnames",
"+=",
"[",
"(",
"'r*'",
",",
"'r'",
")",
"]",
"output",
".",
"make_paramnames_files",
"(",
"paramnames",
")",
"nested_sampling_save_to_cpickle",
"(",
"mc",
")",
"print",
"(",
")",
"print",
"(",
"'PolyChord COMPLETED'",
")",
"print",
"(",
")",
"nested_sampling_create_dummy_file",
"(",
"mc",
")",
"if",
"return_output",
":",
"return",
"mc",
"else",
":",
"return"
] | [
27,
0
] | [
94,
14
] | python | en | ['en', 'en', 'en'] | True |
async_get_engine | (hass, config, discovery_info=None) | Set up Google Cloud TTS component. | Set up Google Cloud TTS component. | async def async_get_engine(hass, config, discovery_info=None):
"""Set up Google Cloud TTS component."""
key_file = config.get(CONF_KEY_FILE)
if key_file:
key_file = hass.config.path(key_file)
if not os.path.isfile(key_file):
_LOGGER.error("File %s doesn't exist", key_file)
return None
return GoogleCloudTTSProvider(
hass,
key_file,
config.get(CONF_LANG),
config.get(CONF_GENDER),
config.get(CONF_VOICE),
config.get(CONF_ENCODING),
config.get(CONF_SPEED),
config.get(CONF_PITCH),
config.get(CONF_GAIN),
config.get(CONF_PROFILES),
) | [
"async",
"def",
"async_get_engine",
"(",
"hass",
",",
"config",
",",
"discovery_info",
"=",
"None",
")",
":",
"key_file",
"=",
"config",
".",
"get",
"(",
"CONF_KEY_FILE",
")",
"if",
"key_file",
":",
"key_file",
"=",
"hass",
".",
"config",
".",
"path",
"(",
"key_file",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"key_file",
")",
":",
"_LOGGER",
".",
"error",
"(",
"\"File %s doesn't exist\"",
",",
"key_file",
")",
"return",
"None",
"return",
"GoogleCloudTTSProvider",
"(",
"hass",
",",
"key_file",
",",
"config",
".",
"get",
"(",
"CONF_LANG",
")",
",",
"config",
".",
"get",
"(",
"CONF_GENDER",
")",
",",
"config",
".",
"get",
"(",
"CONF_VOICE",
")",
",",
"config",
".",
"get",
"(",
"CONF_ENCODING",
")",
",",
"config",
".",
"get",
"(",
"CONF_SPEED",
")",
",",
"config",
".",
"get",
"(",
"CONF_PITCH",
")",
",",
"config",
".",
"get",
"(",
"CONF_GAIN",
")",
",",
"config",
".",
"get",
"(",
"CONF_PROFILES",
")",
",",
")"
] | [
134,
0
] | [
154,
5
] | python | en | ['en', 'ca', 'en'] | True |
GoogleCloudTTSProvider.__init__ | (
self,
hass,
key_file=None,
language=DEFAULT_LANG,
gender=DEFAULT_GENDER,
voice=DEFAULT_VOICE,
encoding=DEFAULT_ENCODING,
speed=1.0,
pitch=0,
gain=0,
profiles=None,
) | Init Google Cloud TTS service. | Init Google Cloud TTS service. | def __init__(
self,
hass,
key_file=None,
language=DEFAULT_LANG,
gender=DEFAULT_GENDER,
voice=DEFAULT_VOICE,
encoding=DEFAULT_ENCODING,
speed=1.0,
pitch=0,
gain=0,
profiles=None,
):
"""Init Google Cloud TTS service."""
self.hass = hass
self.name = "Google Cloud TTS"
self._language = language
self._gender = gender
self._voice = voice
self._encoding = encoding
self._speed = speed
self._pitch = pitch
self._gain = gain
self._profiles = profiles
if key_file:
self._client = texttospeech.TextToSpeechClient.from_service_account_json(
key_file
)
else:
self._client = texttospeech.TextToSpeechClient() | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"key_file",
"=",
"None",
",",
"language",
"=",
"DEFAULT_LANG",
",",
"gender",
"=",
"DEFAULT_GENDER",
",",
"voice",
"=",
"DEFAULT_VOICE",
",",
"encoding",
"=",
"DEFAULT_ENCODING",
",",
"speed",
"=",
"1.0",
",",
"pitch",
"=",
"0",
",",
"gain",
"=",
"0",
",",
"profiles",
"=",
"None",
",",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"name",
"=",
"\"Google Cloud TTS\"",
"self",
".",
"_language",
"=",
"language",
"self",
".",
"_gender",
"=",
"gender",
"self",
".",
"_voice",
"=",
"voice",
"self",
".",
"_encoding",
"=",
"encoding",
"self",
".",
"_speed",
"=",
"speed",
"self",
".",
"_pitch",
"=",
"pitch",
"self",
".",
"_gain",
"=",
"gain",
"self",
".",
"_profiles",
"=",
"profiles",
"if",
"key_file",
":",
"self",
".",
"_client",
"=",
"texttospeech",
".",
"TextToSpeechClient",
".",
"from_service_account_json",
"(",
"key_file",
")",
"else",
":",
"self",
".",
"_client",
"=",
"texttospeech",
".",
"TextToSpeechClient",
"(",
")"
] | [
160,
4
] | [
190,
60
] | python | ca | ['nl', 'ca', 'en'] | False |
GoogleCloudTTSProvider.supported_languages | (self) | Return list of supported languages. | Return list of supported languages. | def supported_languages(self):
"""Return list of supported languages."""
return SUPPORTED_LANGUAGES | [
"def",
"supported_languages",
"(",
"self",
")",
":",
"return",
"SUPPORTED_LANGUAGES"
] | [
193,
4
] | [
195,
34
] | python | en | ['en', 'en', 'en'] | True |
GoogleCloudTTSProvider.default_language | (self) | Return the default language. | Return the default language. | def default_language(self):
"""Return the default language."""
return self._language | [
"def",
"default_language",
"(",
"self",
")",
":",
"return",
"self",
".",
"_language"
] | [
198,
4
] | [
200,
29
] | python | en | ['en', 'et', 'en'] | True |
GoogleCloudTTSProvider.supported_options | (self) | Return a list of supported options. | Return a list of supported options. | def supported_options(self):
"""Return a list of supported options."""
return SUPPORTED_OPTIONS | [
"def",
"supported_options",
"(",
"self",
")",
":",
"return",
"SUPPORTED_OPTIONS"
] | [
203,
4
] | [
205,
32
] | python | en | ['en', 'en', 'en'] | True |
GoogleCloudTTSProvider.default_options | (self) | Return a dict including default options. | Return a dict including default options. | def default_options(self):
"""Return a dict including default options."""
return {
CONF_GENDER: self._gender,
CONF_VOICE: self._voice,
CONF_ENCODING: self._encoding,
CONF_SPEED: self._speed,
CONF_PITCH: self._pitch,
CONF_GAIN: self._gain,
CONF_PROFILES: self._profiles,
} | [
"def",
"default_options",
"(",
"self",
")",
":",
"return",
"{",
"CONF_GENDER",
":",
"self",
".",
"_gender",
",",
"CONF_VOICE",
":",
"self",
".",
"_voice",
",",
"CONF_ENCODING",
":",
"self",
".",
"_encoding",
",",
"CONF_SPEED",
":",
"self",
".",
"_speed",
",",
"CONF_PITCH",
":",
"self",
".",
"_pitch",
",",
"CONF_GAIN",
":",
"self",
".",
"_gain",
",",
"CONF_PROFILES",
":",
"self",
".",
"_profiles",
",",
"}"
] | [
208,
4
] | [
218,
9
] | python | ca | ['id', 'ca', 'en'] | False |
GoogleCloudTTSProvider.async_get_tts_audio | (self, message, language, options=None) | Load TTS from google. | Load TTS from google. | async def async_get_tts_audio(self, message, language, options=None):
"""Load TTS from google."""
options_schema = vol.Schema(
{
vol.Optional(CONF_GENDER, default=self._gender): GENDER_SCHEMA,
vol.Optional(CONF_VOICE, default=self._voice): VOICE_SCHEMA,
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): SCHEMA_ENCODING,
vol.Optional(CONF_SPEED, default=self._speed): SPEED_SCHEMA,
vol.Optional(CONF_PITCH, default=self._speed): SPEED_SCHEMA,
vol.Optional(CONF_GAIN, default=DEFAULT_GAIN): GAIN_SCHEMA,
vol.Optional(CONF_PROFILES, default=[]): PROFILES_SCHEMA,
}
)
options = options_schema(options)
_encoding = options[CONF_ENCODING]
_voice = options[CONF_VOICE]
if _voice and not _voice.startswith(language):
language = _voice[:5]
try:
# pylint: disable=no-member
synthesis_input = texttospeech.types.SynthesisInput(text=message)
voice = texttospeech.types.VoiceSelectionParams(
language_code=language,
ssml_gender=texttospeech.enums.SsmlVoiceGender[options[CONF_GENDER]],
name=_voice,
)
audio_config = texttospeech.types.AudioConfig(
audio_encoding=texttospeech.enums.AudioEncoding[_encoding],
speaking_rate=options.get(CONF_SPEED),
pitch=options.get(CONF_PITCH),
volume_gain_db=options.get(CONF_GAIN),
effects_profile_id=options.get(CONF_PROFILES),
)
# pylint: enable=no-member
with async_timeout.timeout(10, loop=self.hass.loop):
response = await self.hass.async_add_executor_job(
self._client.synthesize_speech, synthesis_input, voice, audio_config
)
return _encoding, response.audio_content
except asyncio.TimeoutError as ex:
_LOGGER.error("Timeout for Google Cloud TTS call: %s", ex)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.exception("Error occurred during Google Cloud TTS call: %s", ex)
return None, None | [
"async",
"def",
"async_get_tts_audio",
"(",
"self",
",",
"message",
",",
"language",
",",
"options",
"=",
"None",
")",
":",
"options_schema",
"=",
"vol",
".",
"Schema",
"(",
"{",
"vol",
".",
"Optional",
"(",
"CONF_GENDER",
",",
"default",
"=",
"self",
".",
"_gender",
")",
":",
"GENDER_SCHEMA",
",",
"vol",
".",
"Optional",
"(",
"CONF_VOICE",
",",
"default",
"=",
"self",
".",
"_voice",
")",
":",
"VOICE_SCHEMA",
",",
"vol",
".",
"Optional",
"(",
"CONF_ENCODING",
",",
"default",
"=",
"DEFAULT_ENCODING",
")",
":",
"SCHEMA_ENCODING",
",",
"vol",
".",
"Optional",
"(",
"CONF_SPEED",
",",
"default",
"=",
"self",
".",
"_speed",
")",
":",
"SPEED_SCHEMA",
",",
"vol",
".",
"Optional",
"(",
"CONF_PITCH",
",",
"default",
"=",
"self",
".",
"_speed",
")",
":",
"SPEED_SCHEMA",
",",
"vol",
".",
"Optional",
"(",
"CONF_GAIN",
",",
"default",
"=",
"DEFAULT_GAIN",
")",
":",
"GAIN_SCHEMA",
",",
"vol",
".",
"Optional",
"(",
"CONF_PROFILES",
",",
"default",
"=",
"[",
"]",
")",
":",
"PROFILES_SCHEMA",
",",
"}",
")",
"options",
"=",
"options_schema",
"(",
"options",
")",
"_encoding",
"=",
"options",
"[",
"CONF_ENCODING",
"]",
"_voice",
"=",
"options",
"[",
"CONF_VOICE",
"]",
"if",
"_voice",
"and",
"not",
"_voice",
".",
"startswith",
"(",
"language",
")",
":",
"language",
"=",
"_voice",
"[",
":",
"5",
"]",
"try",
":",
"# pylint: disable=no-member",
"synthesis_input",
"=",
"texttospeech",
".",
"types",
".",
"SynthesisInput",
"(",
"text",
"=",
"message",
")",
"voice",
"=",
"texttospeech",
".",
"types",
".",
"VoiceSelectionParams",
"(",
"language_code",
"=",
"language",
",",
"ssml_gender",
"=",
"texttospeech",
".",
"enums",
".",
"SsmlVoiceGender",
"[",
"options",
"[",
"CONF_GENDER",
"]",
"]",
",",
"name",
"=",
"_voice",
",",
")",
"audio_config",
"=",
"texttospeech",
".",
"types",
".",
"AudioConfig",
"(",
"audio_encoding",
"=",
"texttospeech",
".",
"enums",
".",
"AudioEncoding",
"[",
"_encoding",
"]",
",",
"speaking_rate",
"=",
"options",
".",
"get",
"(",
"CONF_SPEED",
")",
",",
"pitch",
"=",
"options",
".",
"get",
"(",
"CONF_PITCH",
")",
",",
"volume_gain_db",
"=",
"options",
".",
"get",
"(",
"CONF_GAIN",
")",
",",
"effects_profile_id",
"=",
"options",
".",
"get",
"(",
"CONF_PROFILES",
")",
",",
")",
"# pylint: enable=no-member",
"with",
"async_timeout",
".",
"timeout",
"(",
"10",
",",
"loop",
"=",
"self",
".",
"hass",
".",
"loop",
")",
":",
"response",
"=",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"_client",
".",
"synthesize_speech",
",",
"synthesis_input",
",",
"voice",
",",
"audio_config",
")",
"return",
"_encoding",
",",
"response",
".",
"audio_content",
"except",
"asyncio",
".",
"TimeoutError",
"as",
"ex",
":",
"_LOGGER",
".",
"error",
"(",
"\"Timeout for Google Cloud TTS call: %s\"",
",",
"ex",
")",
"except",
"Exception",
"as",
"ex",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"exception",
"(",
"\"Error occurred during Google Cloud TTS call: %s\"",
",",
"ex",
")",
"return",
"None",
",",
"None"
] | [
220,
4
] | [
270,
25
] | python | en | ['en', 'en', 'en'] | True |
device_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass) | [
"def",
"device_reg",
"(",
"hass",
")",
":",
"return",
"mock_device_registry",
"(",
"hass",
")"
] | [
24,
0
] | [
26,
37
] | python | en | ['en', 'fy', 'en'] | True |
entity_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass) | [
"def",
"entity_reg",
"(",
"hass",
")",
":",
"return",
"mock_registry",
"(",
"hass",
")"
] | [
30,
0
] | [
32,
30
] | python | en | ['en', 'fy', 'en'] | True |
calls | (hass) | Track calls to a mock service. | Track calls to a mock service. | def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation") | [
"def",
"calls",
"(",
"hass",
")",
":",
"return",
"async_mock_service",
"(",
"hass",
",",
"\"test\"",
",",
"\"automation\"",
")"
] | [
36,
0
] | [
38,
57
] | python | en | ['en', 'en', 'en'] | True |
test_get_conditions | (hass, device_reg, entity_reg) | Test we get the expected conditions from a vacuum. | Test we get the expected conditions from a vacuum. | async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a vacuum."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_cleaning",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_docked",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert_lists_same(conditions, expected_conditions) | [
"async",
"def",
"test_get_conditions",
"(",
"hass",
",",
"device_reg",
",",
"entity_reg",
")",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"test\"",
",",
"data",
"=",
"{",
"}",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"device_entry",
"=",
"device_reg",
".",
"async_get_or_create",
"(",
"config_entry_id",
"=",
"config_entry",
".",
"entry_id",
",",
"connections",
"=",
"{",
"(",
"device_registry",
".",
"CONNECTION_NETWORK_MAC",
",",
"\"12:34:56:AB:CD:EF\"",
")",
"}",
",",
")",
"entity_reg",
".",
"async_get_or_create",
"(",
"DOMAIN",
",",
"\"test\"",
",",
"\"5678\"",
",",
"device_id",
"=",
"device_entry",
".",
"id",
")",
"expected_conditions",
"=",
"[",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"is_cleaning\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"f\"{DOMAIN}.test_5678\"",
",",
"}",
",",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"is_docked\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"f\"{DOMAIN}.test_5678\"",
",",
"}",
",",
"]",
"conditions",
"=",
"await",
"async_get_device_automations",
"(",
"hass",
",",
"\"condition\"",
",",
"device_entry",
".",
"id",
")",
"assert_lists_same",
"(",
"conditions",
",",
"expected_conditions",
")"
] | [
41,
0
] | [
67,
54
] | python | en | ['en', 'en', 'en'] | True |
test_if_state | (hass, calls) | Test for turn_on and turn_off conditions. | Test for turn_on and turn_off conditions. | async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
hass.states.async_set("vacuum.entity", STATE_DOCKED)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "vacuum.entity",
"type": "is_cleaning",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_cleaning - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "vacuum.entity",
"type": "is_docked",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_docked - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_docked - event - test_event2"
hass.states.async_set("vacuum.entity", STATE_CLEANING)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_cleaning - event - test_event1"
# Returning means it's still cleaning
hass.states.async_set("vacuum.entity", STATE_RETURNING)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 3
assert calls[2].data["some"] == "is_cleaning - event - test_event1" | [
"async",
"def",
"test_if_state",
"(",
"hass",
",",
"calls",
")",
":",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"vacuum.entity\"",
",",
"STATE_DOCKED",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"automation",
".",
"DOMAIN",
",",
"{",
"automation",
".",
"DOMAIN",
":",
"[",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"event\"",
",",
"\"event_type\"",
":",
"\"test_event1\"",
"}",
",",
"\"condition\"",
":",
"[",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"\"",
",",
"\"entity_id\"",
":",
"\"vacuum.entity\"",
",",
"\"type\"",
":",
"\"is_cleaning\"",
",",
"}",
"]",
",",
"\"action\"",
":",
"{",
"\"service\"",
":",
"\"test.automation\"",
",",
"\"data_template\"",
":",
"{",
"\"some\"",
":",
"\"is_cleaning - {{ trigger.platform }} - {{ trigger.event.event_type }}\"",
"}",
",",
"}",
",",
"}",
",",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"event\"",
",",
"\"event_type\"",
":",
"\"test_event2\"",
"}",
",",
"\"condition\"",
":",
"[",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"\"",
",",
"\"entity_id\"",
":",
"\"vacuum.entity\"",
",",
"\"type\"",
":",
"\"is_docked\"",
",",
"}",
"]",
",",
"\"action\"",
":",
"{",
"\"service\"",
":",
"\"test.automation\"",
",",
"\"data_template\"",
":",
"{",
"\"some\"",
":",
"\"is_docked - {{ trigger.platform }} - {{ trigger.event.event_type }}\"",
"}",
",",
"}",
",",
"}",
",",
"]",
"}",
",",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event1\"",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event2\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"1",
"assert",
"calls",
"[",
"0",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"is_docked - event - test_event2\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"vacuum.entity\"",
",",
"STATE_CLEANING",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event1\"",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event2\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"2",
"assert",
"calls",
"[",
"1",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"is_cleaning - event - test_event1\"",
"# Returning means it's still cleaning",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"vacuum.entity\"",
",",
"STATE_RETURNING",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event1\"",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event2\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"3",
"assert",
"calls",
"[",
"2",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"is_cleaning - event - test_event1\""
] | [
70,
0
] | [
137,
71
] | python | en | ['en', 'en', 'en'] | True |
RefCOCO.__init__ | (self, image_set, root_path, data_path, boxes='gt', proposal_source='official',
transform=None, test_mode=False,
zip_mode=False, cache_mode=False, cache_db=False, ignore_db_cache=True,
tokenizer=None, pretrained_model_name=None,
add_image_as_a_box=False, mask_size=(14, 14),
aspect_grouping=False, **kwargs) |
RefCOCO+ Dataset
:param image_set: image folder name
:param root_path: root path to cache database loaded from annotation file
:param data_path: path to dataset
:param boxes: boxes to use, 'gt' or 'proposal'
:param transform: transform
:param test_mode: test mode means no labels available
:param zip_mode: reading images and metadata in zip archive
:param cache_mode: cache whole dataset to RAM first, then __getitem__ read them from RAM
:param ignore_db_cache: ignore previous cached database, reload it from annotation file
:param tokenizer: default is BertTokenizer from pytorch_pretrained_bert
:param add_image_as_a_box: add whole image as a box
:param mask_size: size of instance mask of each object
:param aspect_grouping: whether to group images via their aspect
:param kwargs:
|
RefCOCO+ Dataset | def __init__(self, image_set, root_path, data_path, boxes='gt', proposal_source='official',
transform=None, test_mode=False,
zip_mode=False, cache_mode=False, cache_db=False, ignore_db_cache=True,
tokenizer=None, pretrained_model_name=None,
add_image_as_a_box=False, mask_size=(14, 14),
aspect_grouping=False, **kwargs):
"""
RefCOCO+ Dataset
:param image_set: image folder name
:param root_path: root path to cache database loaded from annotation file
:param data_path: path to dataset
:param boxes: boxes to use, 'gt' or 'proposal'
:param transform: transform
:param test_mode: test mode means no labels available
:param zip_mode: reading images and metadata in zip archive
:param cache_mode: cache whole dataset to RAM first, then __getitem__ read them from RAM
:param ignore_db_cache: ignore previous cached database, reload it from annotation file
:param tokenizer: default is BertTokenizer from pytorch_pretrained_bert
:param add_image_as_a_box: add whole image as a box
:param mask_size: size of instance mask of each object
:param aspect_grouping: whether to group images via their aspect
:param kwargs:
"""
super(RefCOCO, self).__init__()
assert not cache_mode, 'currently not support cache mode!'
categories = ['__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck',
'boat',
'trafficlight', 'firehydrant', 'stopsign', 'parkingmeter', 'bench', 'bird', 'cat', 'dog', 'horse',
'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
'suitcase', 'frisbee', 'skis', 'snowboard', 'sportsball', 'kite', 'baseballbat', 'baseballglove',
'skateboard', 'surfboard', 'tennisracket', 'bottle', 'wineglass', 'cup', 'fork', 'knife', 'spoon',
'bowl', 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 'hotdog', 'pizza', 'donut',
'cake', 'chair', 'couch', 'pottedplant', 'bed', 'diningtable', 'toilet', 'tv', 'laptop', 'mouse',
'remote', 'keyboard', 'cellphone', 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book',
'clock', 'vase', 'scissors', 'teddybear', 'hairdrier', 'toothbrush']
coco_annot_files = {
"train2014": "annotations/instances_train2014.json",
"val2014": "annotations/instances_val2014.json",
"test2015": "annotations/image_info_test2015.json",
}
proposal_dets = 'refcoco+/proposal/res101_coco_minus_refer_notime_dets.json'
proposal_masks = 'refcoco+/proposal/res101_coco_minus_refer_notime_masks.json'
self.vg_proposal = ("vgbua_res101_precomputed", "trainval2014_resnet101_faster_rcnn_genome")
self.proposal_source = proposal_source
self.boxes = boxes
self.test_mode = test_mode
self.category_to_idx = {c: i for i, c in enumerate(categories)}
self.data_path = data_path
self.root_path = root_path
self.transform = transform
self.image_sets = [iset.strip() for iset in image_set.split('+')]
self.coco = COCO(annotation_file=os.path.join(data_path, coco_annot_files['train2014']))
self.refer = REFER(data_path, dataset='refcoco+', splitBy='unc')
self.refer_ids = []
for iset in self.image_sets:
self.refer_ids.extend(self.refer.getRefIds(split=iset))
self.refs = self.refer.loadRefs(ref_ids=self.refer_ids)
if 'proposal' in boxes:
with open(os.path.join(data_path, proposal_dets), 'r') as f:
proposal_list = json.load(f)
self.proposals = {}
for proposal in proposal_list:
image_id = proposal['image_id']
if image_id in self.proposals:
self.proposals[image_id].append(proposal['box'])
else:
self.proposals[image_id] = [proposal['box']]
self.zip_mode = zip_mode
self.cache_mode = cache_mode
self.cache_db = cache_db
self.ignore_db_cache = ignore_db_cache
self.aspect_grouping = aspect_grouping
self.cache_dir = os.path.join(root_path, 'cache')
self.add_image_as_a_box = add_image_as_a_box
self.mask_size = mask_size
if not os.path.exists(self.cache_dir):
makedirsExist(self.cache_dir)
self.tokenizer = tokenizer if tokenizer is not None \
else BertTokenizer.from_pretrained(
'bert-base-uncased' if pretrained_model_name is None else pretrained_model_name,
cache_dir=self.cache_dir)
if zip_mode:
self.zipreader = ZipReader()
self.database = self.load_annotations()
if self.aspect_grouping:
self.group_ids = self.group_aspect(self.database) | [
"def",
"__init__",
"(",
"self",
",",
"image_set",
",",
"root_path",
",",
"data_path",
",",
"boxes",
"=",
"'gt'",
",",
"proposal_source",
"=",
"'official'",
",",
"transform",
"=",
"None",
",",
"test_mode",
"=",
"False",
",",
"zip_mode",
"=",
"False",
",",
"cache_mode",
"=",
"False",
",",
"cache_db",
"=",
"False",
",",
"ignore_db_cache",
"=",
"True",
",",
"tokenizer",
"=",
"None",
",",
"pretrained_model_name",
"=",
"None",
",",
"add_image_as_a_box",
"=",
"False",
",",
"mask_size",
"=",
"(",
"14",
",",
"14",
")",
",",
"aspect_grouping",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"RefCOCO",
",",
"self",
")",
".",
"__init__",
"(",
")",
"assert",
"not",
"cache_mode",
",",
"'currently not support cache mode!'",
"categories",
"=",
"[",
"'__background__'",
",",
"'person'",
",",
"'bicycle'",
",",
"'car'",
",",
"'motorcycle'",
",",
"'airplane'",
",",
"'bus'",
",",
"'train'",
",",
"'truck'",
",",
"'boat'",
",",
"'trafficlight'",
",",
"'firehydrant'",
",",
"'stopsign'",
",",
"'parkingmeter'",
",",
"'bench'",
",",
"'bird'",
",",
"'cat'",
",",
"'dog'",
",",
"'horse'",
",",
"'sheep'",
",",
"'cow'",
",",
"'elephant'",
",",
"'bear'",
",",
"'zebra'",
",",
"'giraffe'",
",",
"'backpack'",
",",
"'umbrella'",
",",
"'handbag'",
",",
"'tie'",
",",
"'suitcase'",
",",
"'frisbee'",
",",
"'skis'",
",",
"'snowboard'",
",",
"'sportsball'",
",",
"'kite'",
",",
"'baseballbat'",
",",
"'baseballglove'",
",",
"'skateboard'",
",",
"'surfboard'",
",",
"'tennisracket'",
",",
"'bottle'",
",",
"'wineglass'",
",",
"'cup'",
",",
"'fork'",
",",
"'knife'",
",",
"'spoon'",
",",
"'bowl'",
",",
"'banana'",
",",
"'apple'",
",",
"'sandwich'",
",",
"'orange'",
",",
"'broccoli'",
",",
"'carrot'",
",",
"'hotdog'",
",",
"'pizza'",
",",
"'donut'",
",",
"'cake'",
",",
"'chair'",
",",
"'couch'",
",",
"'pottedplant'",
",",
"'bed'",
",",
"'diningtable'",
",",
"'toilet'",
",",
"'tv'",
",",
"'laptop'",
",",
"'mouse'",
",",
"'remote'",
",",
"'keyboard'",
",",
"'cellphone'",
",",
"'microwave'",
",",
"'oven'",
",",
"'toaster'",
",",
"'sink'",
",",
"'refrigerator'",
",",
"'book'",
",",
"'clock'",
",",
"'vase'",
",",
"'scissors'",
",",
"'teddybear'",
",",
"'hairdrier'",
",",
"'toothbrush'",
"]",
"coco_annot_files",
"=",
"{",
"\"train2014\"",
":",
"\"annotations/instances_train2014.json\"",
",",
"\"val2014\"",
":",
"\"annotations/instances_val2014.json\"",
",",
"\"test2015\"",
":",
"\"annotations/image_info_test2015.json\"",
",",
"}",
"proposal_dets",
"=",
"'refcoco+/proposal/res101_coco_minus_refer_notime_dets.json'",
"proposal_masks",
"=",
"'refcoco+/proposal/res101_coco_minus_refer_notime_masks.json'",
"self",
".",
"vg_proposal",
"=",
"(",
"\"vgbua_res101_precomputed\"",
",",
"\"trainval2014_resnet101_faster_rcnn_genome\"",
")",
"self",
".",
"proposal_source",
"=",
"proposal_source",
"self",
".",
"boxes",
"=",
"boxes",
"self",
".",
"test_mode",
"=",
"test_mode",
"self",
".",
"category_to_idx",
"=",
"{",
"c",
":",
"i",
"for",
"i",
",",
"c",
"in",
"enumerate",
"(",
"categories",
")",
"}",
"self",
".",
"data_path",
"=",
"data_path",
"self",
".",
"root_path",
"=",
"root_path",
"self",
".",
"transform",
"=",
"transform",
"self",
".",
"image_sets",
"=",
"[",
"iset",
".",
"strip",
"(",
")",
"for",
"iset",
"in",
"image_set",
".",
"split",
"(",
"'+'",
")",
"]",
"self",
".",
"coco",
"=",
"COCO",
"(",
"annotation_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"coco_annot_files",
"[",
"'train2014'",
"]",
")",
")",
"self",
".",
"refer",
"=",
"REFER",
"(",
"data_path",
",",
"dataset",
"=",
"'refcoco+'",
",",
"splitBy",
"=",
"'unc'",
")",
"self",
".",
"refer_ids",
"=",
"[",
"]",
"for",
"iset",
"in",
"self",
".",
"image_sets",
":",
"self",
".",
"refer_ids",
".",
"extend",
"(",
"self",
".",
"refer",
".",
"getRefIds",
"(",
"split",
"=",
"iset",
")",
")",
"self",
".",
"refs",
"=",
"self",
".",
"refer",
".",
"loadRefs",
"(",
"ref_ids",
"=",
"self",
".",
"refer_ids",
")",
"if",
"'proposal'",
"in",
"boxes",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"proposal_dets",
")",
",",
"'r'",
")",
"as",
"f",
":",
"proposal_list",
"=",
"json",
".",
"load",
"(",
"f",
")",
"self",
".",
"proposals",
"=",
"{",
"}",
"for",
"proposal",
"in",
"proposal_list",
":",
"image_id",
"=",
"proposal",
"[",
"'image_id'",
"]",
"if",
"image_id",
"in",
"self",
".",
"proposals",
":",
"self",
".",
"proposals",
"[",
"image_id",
"]",
".",
"append",
"(",
"proposal",
"[",
"'box'",
"]",
")",
"else",
":",
"self",
".",
"proposals",
"[",
"image_id",
"]",
"=",
"[",
"proposal",
"[",
"'box'",
"]",
"]",
"self",
".",
"zip_mode",
"=",
"zip_mode",
"self",
".",
"cache_mode",
"=",
"cache_mode",
"self",
".",
"cache_db",
"=",
"cache_db",
"self",
".",
"ignore_db_cache",
"=",
"ignore_db_cache",
"self",
".",
"aspect_grouping",
"=",
"aspect_grouping",
"self",
".",
"cache_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root_path",
",",
"'cache'",
")",
"self",
".",
"add_image_as_a_box",
"=",
"add_image_as_a_box",
"self",
".",
"mask_size",
"=",
"mask_size",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"cache_dir",
")",
":",
"makedirsExist",
"(",
"self",
".",
"cache_dir",
")",
"self",
".",
"tokenizer",
"=",
"tokenizer",
"if",
"tokenizer",
"is",
"not",
"None",
"else",
"BertTokenizer",
".",
"from_pretrained",
"(",
"'bert-base-uncased'",
"if",
"pretrained_model_name",
"is",
"None",
"else",
"pretrained_model_name",
",",
"cache_dir",
"=",
"self",
".",
"cache_dir",
")",
"if",
"zip_mode",
":",
"self",
".",
"zipreader",
"=",
"ZipReader",
"(",
")",
"self",
".",
"database",
"=",
"self",
".",
"load_annotations",
"(",
")",
"if",
"self",
".",
"aspect_grouping",
":",
"self",
".",
"group_ids",
"=",
"self",
".",
"group_aspect",
"(",
"self",
".",
"database",
")"
] | [
22,
4
] | [
113,
61
] | python | en | ['en', 'error', 'th'] | False |
create_influx_url | (conf: Dict) | Build URL used from config inputs and default when necessary. | Build URL used from config inputs and default when necessary. | def create_influx_url(conf: Dict) -> Dict:
"""Build URL used from config inputs and default when necessary."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_SSL not in conf:
conf[CONF_SSL] = DEFAULT_SSL_V2
if CONF_HOST not in conf:
conf[CONF_HOST] = DEFAULT_HOST_V2
url = conf[CONF_HOST]
if conf[CONF_SSL]:
url = f"https://{url}"
else:
url = f"http://{url}"
if CONF_PORT in conf:
url = f"{url}:{conf[CONF_PORT]}"
if CONF_PATH in conf:
url = f"{url}{conf[CONF_PATH]}"
conf[CONF_URL] = url
return conf | [
"def",
"create_influx_url",
"(",
"conf",
":",
"Dict",
")",
"->",
"Dict",
":",
"if",
"conf",
"[",
"CONF_API_VERSION",
"]",
"==",
"API_VERSION_2",
":",
"if",
"CONF_SSL",
"not",
"in",
"conf",
":",
"conf",
"[",
"CONF_SSL",
"]",
"=",
"DEFAULT_SSL_V2",
"if",
"CONF_HOST",
"not",
"in",
"conf",
":",
"conf",
"[",
"CONF_HOST",
"]",
"=",
"DEFAULT_HOST_V2",
"url",
"=",
"conf",
"[",
"CONF_HOST",
"]",
"if",
"conf",
"[",
"CONF_SSL",
"]",
":",
"url",
"=",
"f\"https://{url}\"",
"else",
":",
"url",
"=",
"f\"http://{url}\"",
"if",
"CONF_PORT",
"in",
"conf",
":",
"url",
"=",
"f\"{url}:{conf[CONF_PORT]}\"",
"if",
"CONF_PATH",
"in",
"conf",
":",
"url",
"=",
"f\"{url}{conf[CONF_PATH]}\"",
"conf",
"[",
"CONF_URL",
"]",
"=",
"url",
"return",
"conf"
] | [
101,
0
] | [
123,
15
] | python | en | ['en', 'en', 'en'] | True |
validate_version_specific_config | (conf: Dict) | Ensure correct config fields are provided based on API version used. | Ensure correct config fields are provided based on API version used. | def validate_version_specific_config(conf: Dict) -> Dict:
"""Ensure correct config fields are provided based on API version used."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_TOKEN not in conf:
raise vol.Invalid(
f"{CONF_TOKEN} and {CONF_BUCKET} are required when {CONF_API_VERSION} is {API_VERSION_2}"
)
if CONF_USERNAME in conf:
raise vol.Invalid(
f"{CONF_USERNAME} and {CONF_PASSWORD} are only allowed when {CONF_API_VERSION} is {DEFAULT_API_VERSION}"
)
else:
if CONF_TOKEN in conf:
raise vol.Invalid(
f"{CONF_TOKEN} and {CONF_BUCKET} are only allowed when {CONF_API_VERSION} is {API_VERSION_2}"
)
return conf | [
"def",
"validate_version_specific_config",
"(",
"conf",
":",
"Dict",
")",
"->",
"Dict",
":",
"if",
"conf",
"[",
"CONF_API_VERSION",
"]",
"==",
"API_VERSION_2",
":",
"if",
"CONF_TOKEN",
"not",
"in",
"conf",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"{CONF_TOKEN} and {CONF_BUCKET} are required when {CONF_API_VERSION} is {API_VERSION_2}\"",
")",
"if",
"CONF_USERNAME",
"in",
"conf",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"{CONF_USERNAME} and {CONF_PASSWORD} are only allowed when {CONF_API_VERSION} is {DEFAULT_API_VERSION}\"",
")",
"else",
":",
"if",
"CONF_TOKEN",
"in",
"conf",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"{CONF_TOKEN} and {CONF_BUCKET} are only allowed when {CONF_API_VERSION} is {API_VERSION_2}\"",
")",
"return",
"conf"
] | [
126,
0
] | [
145,
15
] | python | en | ['en', 'en', 'en'] | True |
_generate_event_to_json | (conf: Dict) | Build event to json converter and add to config. | Build event to json converter and add to config. | def _generate_event_to_json(conf: Dict) -> Callable[[Dict], str]:
"""Build event to json converter and add to config."""
entity_filter = convert_include_exclude_filter(conf)
tags = conf.get(CONF_TAGS)
tags_attributes = conf.get(CONF_TAGS_ATTRIBUTES)
default_measurement = conf.get(CONF_DEFAULT_MEASUREMENT)
measurement_attr = conf.get(CONF_MEASUREMENT_ATTR)
override_measurement = conf.get(CONF_OVERRIDE_MEASUREMENT)
global_ignore_attributes = set(conf[CONF_IGNORE_ATTRIBUTES])
component_config = EntityValues(
conf[CONF_COMPONENT_CONFIG],
conf[CONF_COMPONENT_CONFIG_DOMAIN],
conf[CONF_COMPONENT_CONFIG_GLOB],
)
def event_to_json(event: Dict) -> str:
"""Convert event into json in format Influx expects."""
state = event.data.get(EVENT_NEW_STATE)
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
or not entity_filter(state.entity_id)
):
return
try:
_include_state = _include_value = False
_state_as_value = float(state.state)
_include_value = True
except ValueError:
try:
_state_as_value = float(state_helper.state_as_number(state))
_include_state = _include_value = True
except ValueError:
_include_state = True
include_uom = True
include_dc = True
entity_config = component_config.get(state.entity_id)
measurement = entity_config.get(CONF_OVERRIDE_MEASUREMENT)
if measurement in (None, ""):
if override_measurement:
measurement = override_measurement
else:
if measurement_attr == "entity_id":
measurement = state.entity_id
elif measurement_attr == "domain__device_class":
device_class = state.attributes.get("device_class")
if device_class is None:
# This entity doesn't have a device_class set, use only domain
measurement = state.domain
else:
measurement = f"{state.domain}__{device_class}"
include_dc = False
else:
measurement = state.attributes.get(measurement_attr)
if measurement in (None, ""):
if default_measurement:
measurement = default_measurement
else:
measurement = state.entity_id
else:
include_uom = measurement_attr != "unit_of_measurement"
json = {
INFLUX_CONF_MEASUREMENT: measurement,
INFLUX_CONF_TAGS: {
CONF_DOMAIN: state.domain,
CONF_ENTITY_ID: state.object_id,
},
INFLUX_CONF_TIME: event.time_fired,
INFLUX_CONF_FIELDS: {},
}
if _include_state:
json[INFLUX_CONF_FIELDS][INFLUX_CONF_STATE] = state.state
if _include_value:
json[INFLUX_CONF_FIELDS][INFLUX_CONF_VALUE] = _state_as_value
ignore_attributes = set(entity_config.get(CONF_IGNORE_ATTRIBUTES, []))
ignore_attributes.update(global_ignore_attributes)
for key, value in state.attributes.items():
if key in tags_attributes:
json[INFLUX_CONF_TAGS][key] = value
elif (
(key != CONF_UNIT_OF_MEASUREMENT or include_uom)
and (key != "device_class" or include_dc)
and key not in ignore_attributes
):
# If the key is already in fields
if key in json[INFLUX_CONF_FIELDS]:
key = f"{key}_"
# Prevent column data errors in influxDB.
# For each value we try to cast it as float
# But if we can not do it we store the value
# as string add "_str" postfix to the field key
try:
json[INFLUX_CONF_FIELDS][key] = float(value)
except (ValueError, TypeError):
new_key = f"{key}_str"
new_value = str(value)
json[INFLUX_CONF_FIELDS][new_key] = new_value
if RE_DIGIT_TAIL.match(new_value):
json[INFLUX_CONF_FIELDS][key] = float(
RE_DECIMAL.sub("", new_value)
)
# Infinity and NaN are not valid floats in InfluxDB
try:
if not math.isfinite(json[INFLUX_CONF_FIELDS][key]):
del json[INFLUX_CONF_FIELDS][key]
except (KeyError, TypeError):
pass
json[INFLUX_CONF_TAGS].update(tags)
return json
return event_to_json | [
"def",
"_generate_event_to_json",
"(",
"conf",
":",
"Dict",
")",
"->",
"Callable",
"[",
"[",
"Dict",
"]",
",",
"str",
"]",
":",
"entity_filter",
"=",
"convert_include_exclude_filter",
"(",
"conf",
")",
"tags",
"=",
"conf",
".",
"get",
"(",
"CONF_TAGS",
")",
"tags_attributes",
"=",
"conf",
".",
"get",
"(",
"CONF_TAGS_ATTRIBUTES",
")",
"default_measurement",
"=",
"conf",
".",
"get",
"(",
"CONF_DEFAULT_MEASUREMENT",
")",
"measurement_attr",
"=",
"conf",
".",
"get",
"(",
"CONF_MEASUREMENT_ATTR",
")",
"override_measurement",
"=",
"conf",
".",
"get",
"(",
"CONF_OVERRIDE_MEASUREMENT",
")",
"global_ignore_attributes",
"=",
"set",
"(",
"conf",
"[",
"CONF_IGNORE_ATTRIBUTES",
"]",
")",
"component_config",
"=",
"EntityValues",
"(",
"conf",
"[",
"CONF_COMPONENT_CONFIG",
"]",
",",
"conf",
"[",
"CONF_COMPONENT_CONFIG_DOMAIN",
"]",
",",
"conf",
"[",
"CONF_COMPONENT_CONFIG_GLOB",
"]",
",",
")",
"def",
"event_to_json",
"(",
"event",
":",
"Dict",
")",
"->",
"str",
":",
"\"\"\"Convert event into json in format Influx expects.\"\"\"",
"state",
"=",
"event",
".",
"data",
".",
"get",
"(",
"EVENT_NEW_STATE",
")",
"if",
"(",
"state",
"is",
"None",
"or",
"state",
".",
"state",
"in",
"(",
"STATE_UNKNOWN",
",",
"\"\"",
",",
"STATE_UNAVAILABLE",
")",
"or",
"not",
"entity_filter",
"(",
"state",
".",
"entity_id",
")",
")",
":",
"return",
"try",
":",
"_include_state",
"=",
"_include_value",
"=",
"False",
"_state_as_value",
"=",
"float",
"(",
"state",
".",
"state",
")",
"_include_value",
"=",
"True",
"except",
"ValueError",
":",
"try",
":",
"_state_as_value",
"=",
"float",
"(",
"state_helper",
".",
"state_as_number",
"(",
"state",
")",
")",
"_include_state",
"=",
"_include_value",
"=",
"True",
"except",
"ValueError",
":",
"_include_state",
"=",
"True",
"include_uom",
"=",
"True",
"include_dc",
"=",
"True",
"entity_config",
"=",
"component_config",
".",
"get",
"(",
"state",
".",
"entity_id",
")",
"measurement",
"=",
"entity_config",
".",
"get",
"(",
"CONF_OVERRIDE_MEASUREMENT",
")",
"if",
"measurement",
"in",
"(",
"None",
",",
"\"\"",
")",
":",
"if",
"override_measurement",
":",
"measurement",
"=",
"override_measurement",
"else",
":",
"if",
"measurement_attr",
"==",
"\"entity_id\"",
":",
"measurement",
"=",
"state",
".",
"entity_id",
"elif",
"measurement_attr",
"==",
"\"domain__device_class\"",
":",
"device_class",
"=",
"state",
".",
"attributes",
".",
"get",
"(",
"\"device_class\"",
")",
"if",
"device_class",
"is",
"None",
":",
"# This entity doesn't have a device_class set, use only domain",
"measurement",
"=",
"state",
".",
"domain",
"else",
":",
"measurement",
"=",
"f\"{state.domain}__{device_class}\"",
"include_dc",
"=",
"False",
"else",
":",
"measurement",
"=",
"state",
".",
"attributes",
".",
"get",
"(",
"measurement_attr",
")",
"if",
"measurement",
"in",
"(",
"None",
",",
"\"\"",
")",
":",
"if",
"default_measurement",
":",
"measurement",
"=",
"default_measurement",
"else",
":",
"measurement",
"=",
"state",
".",
"entity_id",
"else",
":",
"include_uom",
"=",
"measurement_attr",
"!=",
"\"unit_of_measurement\"",
"json",
"=",
"{",
"INFLUX_CONF_MEASUREMENT",
":",
"measurement",
",",
"INFLUX_CONF_TAGS",
":",
"{",
"CONF_DOMAIN",
":",
"state",
".",
"domain",
",",
"CONF_ENTITY_ID",
":",
"state",
".",
"object_id",
",",
"}",
",",
"INFLUX_CONF_TIME",
":",
"event",
".",
"time_fired",
",",
"INFLUX_CONF_FIELDS",
":",
"{",
"}",
",",
"}",
"if",
"_include_state",
":",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"INFLUX_CONF_STATE",
"]",
"=",
"state",
".",
"state",
"if",
"_include_value",
":",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"INFLUX_CONF_VALUE",
"]",
"=",
"_state_as_value",
"ignore_attributes",
"=",
"set",
"(",
"entity_config",
".",
"get",
"(",
"CONF_IGNORE_ATTRIBUTES",
",",
"[",
"]",
")",
")",
"ignore_attributes",
".",
"update",
"(",
"global_ignore_attributes",
")",
"for",
"key",
",",
"value",
"in",
"state",
".",
"attributes",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"tags_attributes",
":",
"json",
"[",
"INFLUX_CONF_TAGS",
"]",
"[",
"key",
"]",
"=",
"value",
"elif",
"(",
"(",
"key",
"!=",
"CONF_UNIT_OF_MEASUREMENT",
"or",
"include_uom",
")",
"and",
"(",
"key",
"!=",
"\"device_class\"",
"or",
"include_dc",
")",
"and",
"key",
"not",
"in",
"ignore_attributes",
")",
":",
"# If the key is already in fields",
"if",
"key",
"in",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
":",
"key",
"=",
"f\"{key}_\"",
"# Prevent column data errors in influxDB.",
"# For each value we try to cast it as float",
"# But if we can not do it we store the value",
"# as string add \"_str\" postfix to the field key",
"try",
":",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"key",
"]",
"=",
"float",
"(",
"value",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"new_key",
"=",
"f\"{key}_str\"",
"new_value",
"=",
"str",
"(",
"value",
")",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"new_key",
"]",
"=",
"new_value",
"if",
"RE_DIGIT_TAIL",
".",
"match",
"(",
"new_value",
")",
":",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"key",
"]",
"=",
"float",
"(",
"RE_DECIMAL",
".",
"sub",
"(",
"\"\"",
",",
"new_value",
")",
")",
"# Infinity and NaN are not valid floats in InfluxDB",
"try",
":",
"if",
"not",
"math",
".",
"isfinite",
"(",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"key",
"]",
")",
":",
"del",
"json",
"[",
"INFLUX_CONF_FIELDS",
"]",
"[",
"key",
"]",
"except",
"(",
"KeyError",
",",
"TypeError",
")",
":",
"pass",
"json",
"[",
"INFLUX_CONF_TAGS",
"]",
".",
"update",
"(",
"tags",
")",
"return",
"json",
"return",
"event_to_json"
] | [
194,
0
] | [
313,
24
] | python | en | ['en', 'en', 'en'] | True |
get_influx_connection | (conf, test_write=False, test_read=False) | Create the correct influx connection for the API version. | Create the correct influx connection for the API version. | def get_influx_connection(conf, test_write=False, test_read=False):
"""Create the correct influx connection for the API version."""
kwargs = {
CONF_TIMEOUT: TIMEOUT,
}
precision = conf.get(CONF_PRECISION)
if conf[CONF_API_VERSION] == API_VERSION_2:
kwargs[CONF_URL] = conf[CONF_URL]
kwargs[CONF_TOKEN] = conf[CONF_TOKEN]
kwargs[INFLUX_CONF_ORG] = conf[CONF_ORG]
bucket = conf.get(CONF_BUCKET)
influx = InfluxDBClientV2(**kwargs)
query_api = influx.query_api()
initial_write_mode = SYNCHRONOUS if test_write else ASYNCHRONOUS
write_api = influx.write_api(write_options=initial_write_mode)
def write_v2(json):
"""Write data to V2 influx."""
data = {"bucket": bucket, "record": json}
if precision is not None:
data["write_precision"] = precision
try:
write_api.write(**data)
except (urllib3.exceptions.HTTPError, OSError) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except ApiException as exc:
if exc.status == CODE_INVALID_INPUTS:
raise ValueError(WRITE_ERROR % (json, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V2 % exc) from exc
def query_v2(query, _=None):
"""Query V2 influx."""
try:
return query_api.query(query)
except (urllib3.exceptions.HTTPError, OSError) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except ApiException as exc:
if exc.status == CODE_INVALID_INPUTS:
raise ValueError(QUERY_ERROR % (query, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V2 % exc) from exc
def close_v2():
"""Close V2 influx client."""
influx.close()
buckets = []
if test_write:
# Try to write b"" to influx. If we can connect and creds are valid
# Then invalid inputs is returned. Anything else is a broken config
try:
write_v2(b"")
except ValueError:
pass
write_api = influx.write_api(write_options=ASYNCHRONOUS)
if test_read:
tables = query_v2(TEST_QUERY_V2)
if tables and tables[0].records:
buckets = [bucket.values["name"] for bucket in tables[0].records]
else:
buckets = []
return InfluxClient(buckets, write_v2, query_v2, close_v2)
# Else it's a V1 client
kwargs[CONF_VERIFY_SSL] = conf[CONF_VERIFY_SSL]
if CONF_DB_NAME in conf:
kwargs[CONF_DB_NAME] = conf[CONF_DB_NAME]
if CONF_USERNAME in conf:
kwargs[CONF_USERNAME] = conf[CONF_USERNAME]
if CONF_PASSWORD in conf:
kwargs[CONF_PASSWORD] = conf[CONF_PASSWORD]
if CONF_HOST in conf:
kwargs[CONF_HOST] = conf[CONF_HOST]
if CONF_PATH in conf:
kwargs[CONF_PATH] = conf[CONF_PATH]
if CONF_PORT in conf:
kwargs[CONF_PORT] = conf[CONF_PORT]
if CONF_SSL in conf:
kwargs[CONF_SSL] = conf[CONF_SSL]
influx = InfluxDBClient(**kwargs)
def write_v1(json):
"""Write data to V1 influx."""
try:
influx.write_points(json, time_precision=precision)
except (
requests.exceptions.RequestException,
exceptions.InfluxDBServerError,
OSError,
) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except exceptions.InfluxDBClientError as exc:
if exc.code == CODE_INVALID_INPUTS:
raise ValueError(WRITE_ERROR % (json, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V1 % exc) from exc
def query_v1(query, database=None):
"""Query V1 influx."""
try:
return list(influx.query(query, database=database).get_points())
except (
requests.exceptions.RequestException,
exceptions.InfluxDBServerError,
OSError,
) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except exceptions.InfluxDBClientError as exc:
if exc.code == CODE_INVALID_INPUTS:
raise ValueError(QUERY_ERROR % (query, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V1 % exc) from exc
def close_v1():
"""Close the V1 Influx client."""
influx.close()
databases = []
if test_write:
write_v1([])
if test_read:
databases = [db["name"] for db in query_v1(TEST_QUERY_V1)]
return InfluxClient(databases, write_v1, query_v1, close_v1) | [
"def",
"get_influx_connection",
"(",
"conf",
",",
"test_write",
"=",
"False",
",",
"test_read",
"=",
"False",
")",
":",
"kwargs",
"=",
"{",
"CONF_TIMEOUT",
":",
"TIMEOUT",
",",
"}",
"precision",
"=",
"conf",
".",
"get",
"(",
"CONF_PRECISION",
")",
"if",
"conf",
"[",
"CONF_API_VERSION",
"]",
"==",
"API_VERSION_2",
":",
"kwargs",
"[",
"CONF_URL",
"]",
"=",
"conf",
"[",
"CONF_URL",
"]",
"kwargs",
"[",
"CONF_TOKEN",
"]",
"=",
"conf",
"[",
"CONF_TOKEN",
"]",
"kwargs",
"[",
"INFLUX_CONF_ORG",
"]",
"=",
"conf",
"[",
"CONF_ORG",
"]",
"bucket",
"=",
"conf",
".",
"get",
"(",
"CONF_BUCKET",
")",
"influx",
"=",
"InfluxDBClientV2",
"(",
"*",
"*",
"kwargs",
")",
"query_api",
"=",
"influx",
".",
"query_api",
"(",
")",
"initial_write_mode",
"=",
"SYNCHRONOUS",
"if",
"test_write",
"else",
"ASYNCHRONOUS",
"write_api",
"=",
"influx",
".",
"write_api",
"(",
"write_options",
"=",
"initial_write_mode",
")",
"def",
"write_v2",
"(",
"json",
")",
":",
"\"\"\"Write data to V2 influx.\"\"\"",
"data",
"=",
"{",
"\"bucket\"",
":",
"bucket",
",",
"\"record\"",
":",
"json",
"}",
"if",
"precision",
"is",
"not",
"None",
":",
"data",
"[",
"\"write_precision\"",
"]",
"=",
"precision",
"try",
":",
"write_api",
".",
"write",
"(",
"*",
"*",
"data",
")",
"except",
"(",
"urllib3",
".",
"exceptions",
".",
"HTTPError",
",",
"OSError",
")",
"as",
"exc",
":",
"raise",
"ConnectionError",
"(",
"CONNECTION_ERROR",
"%",
"exc",
")",
"from",
"exc",
"except",
"ApiException",
"as",
"exc",
":",
"if",
"exc",
".",
"status",
"==",
"CODE_INVALID_INPUTS",
":",
"raise",
"ValueError",
"(",
"WRITE_ERROR",
"%",
"(",
"json",
",",
"exc",
")",
")",
"from",
"exc",
"raise",
"ConnectionError",
"(",
"CLIENT_ERROR_V2",
"%",
"exc",
")",
"from",
"exc",
"def",
"query_v2",
"(",
"query",
",",
"_",
"=",
"None",
")",
":",
"\"\"\"Query V2 influx.\"\"\"",
"try",
":",
"return",
"query_api",
".",
"query",
"(",
"query",
")",
"except",
"(",
"urllib3",
".",
"exceptions",
".",
"HTTPError",
",",
"OSError",
")",
"as",
"exc",
":",
"raise",
"ConnectionError",
"(",
"CONNECTION_ERROR",
"%",
"exc",
")",
"from",
"exc",
"except",
"ApiException",
"as",
"exc",
":",
"if",
"exc",
".",
"status",
"==",
"CODE_INVALID_INPUTS",
":",
"raise",
"ValueError",
"(",
"QUERY_ERROR",
"%",
"(",
"query",
",",
"exc",
")",
")",
"from",
"exc",
"raise",
"ConnectionError",
"(",
"CLIENT_ERROR_V2",
"%",
"exc",
")",
"from",
"exc",
"def",
"close_v2",
"(",
")",
":",
"\"\"\"Close V2 influx client.\"\"\"",
"influx",
".",
"close",
"(",
")",
"buckets",
"=",
"[",
"]",
"if",
"test_write",
":",
"# Try to write b\"\" to influx. If we can connect and creds are valid",
"# Then invalid inputs is returned. Anything else is a broken config",
"try",
":",
"write_v2",
"(",
"b\"\"",
")",
"except",
"ValueError",
":",
"pass",
"write_api",
"=",
"influx",
".",
"write_api",
"(",
"write_options",
"=",
"ASYNCHRONOUS",
")",
"if",
"test_read",
":",
"tables",
"=",
"query_v2",
"(",
"TEST_QUERY_V2",
")",
"if",
"tables",
"and",
"tables",
"[",
"0",
"]",
".",
"records",
":",
"buckets",
"=",
"[",
"bucket",
".",
"values",
"[",
"\"name\"",
"]",
"for",
"bucket",
"in",
"tables",
"[",
"0",
"]",
".",
"records",
"]",
"else",
":",
"buckets",
"=",
"[",
"]",
"return",
"InfluxClient",
"(",
"buckets",
",",
"write_v2",
",",
"query_v2",
",",
"close_v2",
")",
"# Else it's a V1 client",
"kwargs",
"[",
"CONF_VERIFY_SSL",
"]",
"=",
"conf",
"[",
"CONF_VERIFY_SSL",
"]",
"if",
"CONF_DB_NAME",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_DB_NAME",
"]",
"=",
"conf",
"[",
"CONF_DB_NAME",
"]",
"if",
"CONF_USERNAME",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_USERNAME",
"]",
"=",
"conf",
"[",
"CONF_USERNAME",
"]",
"if",
"CONF_PASSWORD",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_PASSWORD",
"]",
"=",
"conf",
"[",
"CONF_PASSWORD",
"]",
"if",
"CONF_HOST",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_HOST",
"]",
"=",
"conf",
"[",
"CONF_HOST",
"]",
"if",
"CONF_PATH",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_PATH",
"]",
"=",
"conf",
"[",
"CONF_PATH",
"]",
"if",
"CONF_PORT",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_PORT",
"]",
"=",
"conf",
"[",
"CONF_PORT",
"]",
"if",
"CONF_SSL",
"in",
"conf",
":",
"kwargs",
"[",
"CONF_SSL",
"]",
"=",
"conf",
"[",
"CONF_SSL",
"]",
"influx",
"=",
"InfluxDBClient",
"(",
"*",
"*",
"kwargs",
")",
"def",
"write_v1",
"(",
"json",
")",
":",
"\"\"\"Write data to V1 influx.\"\"\"",
"try",
":",
"influx",
".",
"write_points",
"(",
"json",
",",
"time_precision",
"=",
"precision",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"exceptions",
".",
"InfluxDBServerError",
",",
"OSError",
",",
")",
"as",
"exc",
":",
"raise",
"ConnectionError",
"(",
"CONNECTION_ERROR",
"%",
"exc",
")",
"from",
"exc",
"except",
"exceptions",
".",
"InfluxDBClientError",
"as",
"exc",
":",
"if",
"exc",
".",
"code",
"==",
"CODE_INVALID_INPUTS",
":",
"raise",
"ValueError",
"(",
"WRITE_ERROR",
"%",
"(",
"json",
",",
"exc",
")",
")",
"from",
"exc",
"raise",
"ConnectionError",
"(",
"CLIENT_ERROR_V1",
"%",
"exc",
")",
"from",
"exc",
"def",
"query_v1",
"(",
"query",
",",
"database",
"=",
"None",
")",
":",
"\"\"\"Query V1 influx.\"\"\"",
"try",
":",
"return",
"list",
"(",
"influx",
".",
"query",
"(",
"query",
",",
"database",
"=",
"database",
")",
".",
"get_points",
"(",
")",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"exceptions",
".",
"InfluxDBServerError",
",",
"OSError",
",",
")",
"as",
"exc",
":",
"raise",
"ConnectionError",
"(",
"CONNECTION_ERROR",
"%",
"exc",
")",
"from",
"exc",
"except",
"exceptions",
".",
"InfluxDBClientError",
"as",
"exc",
":",
"if",
"exc",
".",
"code",
"==",
"CODE_INVALID_INPUTS",
":",
"raise",
"ValueError",
"(",
"QUERY_ERROR",
"%",
"(",
"query",
",",
"exc",
")",
")",
"from",
"exc",
"raise",
"ConnectionError",
"(",
"CLIENT_ERROR_V1",
"%",
"exc",
")",
"from",
"exc",
"def",
"close_v1",
"(",
")",
":",
"\"\"\"Close the V1 Influx client.\"\"\"",
"influx",
".",
"close",
"(",
")",
"databases",
"=",
"[",
"]",
"if",
"test_write",
":",
"write_v1",
"(",
"[",
"]",
")",
"if",
"test_read",
":",
"databases",
"=",
"[",
"db",
"[",
"\"name\"",
"]",
"for",
"db",
"in",
"query_v1",
"(",
"TEST_QUERY_V1",
")",
"]",
"return",
"InfluxClient",
"(",
"databases",
",",
"write_v1",
",",
"query_v1",
",",
"close_v1",
")"
] | [
326,
0
] | [
460,
64
] | python | en | ['en', 'en', 'en'] | True |
setup | (hass, config) | Set up the InfluxDB component. | Set up the InfluxDB component. | def setup(hass, config):
"""Set up the InfluxDB component."""
conf = config[DOMAIN]
try:
influx = get_influx_connection(conf, test_write=True)
except ConnectionError as exc:
_LOGGER.error(RETRY_MESSAGE, exc)
event_helper.call_later(hass, RETRY_INTERVAL, lambda _: setup(hass, config))
return True
event_to_json = _generate_event_to_json(conf)
max_tries = conf.get(CONF_RETRY_COUNT)
instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries)
instance.start()
def shutdown(event):
"""Shut down the thread."""
instance.queue.put(None)
instance.join()
influx.close()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
return True | [
"def",
"setup",
"(",
"hass",
",",
"config",
")",
":",
"conf",
"=",
"config",
"[",
"DOMAIN",
"]",
"try",
":",
"influx",
"=",
"get_influx_connection",
"(",
"conf",
",",
"test_write",
"=",
"True",
")",
"except",
"ConnectionError",
"as",
"exc",
":",
"_LOGGER",
".",
"error",
"(",
"RETRY_MESSAGE",
",",
"exc",
")",
"event_helper",
".",
"call_later",
"(",
"hass",
",",
"RETRY_INTERVAL",
",",
"lambda",
"_",
":",
"setup",
"(",
"hass",
",",
"config",
")",
")",
"return",
"True",
"event_to_json",
"=",
"_generate_event_to_json",
"(",
"conf",
")",
"max_tries",
"=",
"conf",
".",
"get",
"(",
"CONF_RETRY_COUNT",
")",
"instance",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"=",
"InfluxThread",
"(",
"hass",
",",
"influx",
",",
"event_to_json",
",",
"max_tries",
")",
"instance",
".",
"start",
"(",
")",
"def",
"shutdown",
"(",
"event",
")",
":",
"\"\"\"Shut down the thread.\"\"\"",
"instance",
".",
"queue",
".",
"put",
"(",
"None",
")",
"instance",
".",
"join",
"(",
")",
"influx",
".",
"close",
"(",
")",
"hass",
".",
"bus",
".",
"listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"shutdown",
")",
"return",
"True"
] | [
463,
0
] | [
486,
15
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread.__init__ | (self, hass, influx, event_to_json, max_tries) | Initialize the listener. | Initialize the listener. | def __init__(self, hass, influx, event_to_json, max_tries):
"""Initialize the listener."""
threading.Thread.__init__(self, name=DOMAIN)
self.queue = queue.Queue()
self.influx = influx
self.event_to_json = event_to_json
self.max_tries = max_tries
self.write_errors = 0
self.shutdown = False
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener) | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"influx",
",",
"event_to_json",
",",
"max_tries",
")",
":",
"threading",
".",
"Thread",
".",
"__init__",
"(",
"self",
",",
"name",
"=",
"DOMAIN",
")",
"self",
".",
"queue",
"=",
"queue",
".",
"Queue",
"(",
")",
"self",
".",
"influx",
"=",
"influx",
"self",
".",
"event_to_json",
"=",
"event_to_json",
"self",
".",
"max_tries",
"=",
"max_tries",
"self",
".",
"write_errors",
"=",
"0",
"self",
".",
"shutdown",
"=",
"False",
"hass",
".",
"bus",
".",
"listen",
"(",
"EVENT_STATE_CHANGED",
",",
"self",
".",
"_event_listener",
")"
] | [
492,
4
] | [
501,
66
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread._event_listener | (self, event) | Listen for new messages on the bus and queue them for Influx. | Listen for new messages on the bus and queue them for Influx. | def _event_listener(self, event):
"""Listen for new messages on the bus and queue them for Influx."""
item = (time.monotonic(), event)
self.queue.put(item) | [
"def",
"_event_listener",
"(",
"self",
",",
"event",
")",
":",
"item",
"=",
"(",
"time",
".",
"monotonic",
"(",
")",
",",
"event",
")",
"self",
".",
"queue",
".",
"put",
"(",
"item",
")"
] | [
504,
4
] | [
507,
28
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread.batch_timeout | () | Return number of seconds to wait for more events. | Return number of seconds to wait for more events. | def batch_timeout():
"""Return number of seconds to wait for more events."""
return BATCH_TIMEOUT | [
"def",
"batch_timeout",
"(",
")",
":",
"return",
"BATCH_TIMEOUT"
] | [
510,
4
] | [
512,
28
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread.get_events_json | (self) | Return a batch of events formatted for writing. | Return a batch of events formatted for writing. | def get_events_json(self):
"""Return a batch of events formatted for writing."""
queue_seconds = QUEUE_BACKLOG_SECONDS + self.max_tries * RETRY_DELAY
count = 0
json = []
dropped = 0
try:
while len(json) < BATCH_BUFFER_SIZE and not self.shutdown:
timeout = None if count == 0 else self.batch_timeout()
item = self.queue.get(timeout=timeout)
count += 1
if item is None:
self.shutdown = True
else:
timestamp, event = item
age = time.monotonic() - timestamp
if age < queue_seconds:
event_json = self.event_to_json(event)
if event_json:
json.append(event_json)
else:
dropped += 1
except queue.Empty:
pass
if dropped:
_LOGGER.warning(CATCHING_UP_MESSAGE, dropped)
return count, json | [
"def",
"get_events_json",
"(",
"self",
")",
":",
"queue_seconds",
"=",
"QUEUE_BACKLOG_SECONDS",
"+",
"self",
".",
"max_tries",
"*",
"RETRY_DELAY",
"count",
"=",
"0",
"json",
"=",
"[",
"]",
"dropped",
"=",
"0",
"try",
":",
"while",
"len",
"(",
"json",
")",
"<",
"BATCH_BUFFER_SIZE",
"and",
"not",
"self",
".",
"shutdown",
":",
"timeout",
"=",
"None",
"if",
"count",
"==",
"0",
"else",
"self",
".",
"batch_timeout",
"(",
")",
"item",
"=",
"self",
".",
"queue",
".",
"get",
"(",
"timeout",
"=",
"timeout",
")",
"count",
"+=",
"1",
"if",
"item",
"is",
"None",
":",
"self",
".",
"shutdown",
"=",
"True",
"else",
":",
"timestamp",
",",
"event",
"=",
"item",
"age",
"=",
"time",
".",
"monotonic",
"(",
")",
"-",
"timestamp",
"if",
"age",
"<",
"queue_seconds",
":",
"event_json",
"=",
"self",
".",
"event_to_json",
"(",
"event",
")",
"if",
"event_json",
":",
"json",
".",
"append",
"(",
"event_json",
")",
"else",
":",
"dropped",
"+=",
"1",
"except",
"queue",
".",
"Empty",
":",
"pass",
"if",
"dropped",
":",
"_LOGGER",
".",
"warning",
"(",
"CATCHING_UP_MESSAGE",
",",
"dropped",
")",
"return",
"count",
",",
"json"
] | [
514,
4
] | [
548,
26
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread.write_to_influxdb | (self, json) | Write preprocessed events to influxdb, with retry. | Write preprocessed events to influxdb, with retry. | def write_to_influxdb(self, json):
"""Write preprocessed events to influxdb, with retry."""
for retry in range(self.max_tries + 1):
try:
self.influx.write(json)
if self.write_errors:
_LOGGER.error(RESUMED_MESSAGE, self.write_errors)
self.write_errors = 0
_LOGGER.debug(WROTE_MESSAGE, len(json))
break
except ValueError as err:
_LOGGER.error(err)
break
except ConnectionError as err:
if retry < self.max_tries:
time.sleep(RETRY_DELAY)
else:
if not self.write_errors:
_LOGGER.error(err)
self.write_errors += len(json) | [
"def",
"write_to_influxdb",
"(",
"self",
",",
"json",
")",
":",
"for",
"retry",
"in",
"range",
"(",
"self",
".",
"max_tries",
"+",
"1",
")",
":",
"try",
":",
"self",
".",
"influx",
".",
"write",
"(",
"json",
")",
"if",
"self",
".",
"write_errors",
":",
"_LOGGER",
".",
"error",
"(",
"RESUMED_MESSAGE",
",",
"self",
".",
"write_errors",
")",
"self",
".",
"write_errors",
"=",
"0",
"_LOGGER",
".",
"debug",
"(",
"WROTE_MESSAGE",
",",
"len",
"(",
"json",
")",
")",
"break",
"except",
"ValueError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"err",
")",
"break",
"except",
"ConnectionError",
"as",
"err",
":",
"if",
"retry",
"<",
"self",
".",
"max_tries",
":",
"time",
".",
"sleep",
"(",
"RETRY_DELAY",
")",
"else",
":",
"if",
"not",
"self",
".",
"write_errors",
":",
"_LOGGER",
".",
"error",
"(",
"err",
")",
"self",
".",
"write_errors",
"+=",
"len",
"(",
"json",
")"
] | [
550,
4
] | [
571,
50
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread.run | (self) | Process incoming events. | Process incoming events. | def run(self):
"""Process incoming events."""
while not self.shutdown:
count, json = self.get_events_json()
if json:
self.write_to_influxdb(json)
for _ in range(count):
self.queue.task_done() | [
"def",
"run",
"(",
"self",
")",
":",
"while",
"not",
"self",
".",
"shutdown",
":",
"count",
",",
"json",
"=",
"self",
".",
"get_events_json",
"(",
")",
"if",
"json",
":",
"self",
".",
"write_to_influxdb",
"(",
"json",
")",
"for",
"_",
"in",
"range",
"(",
"count",
")",
":",
"self",
".",
"queue",
".",
"task_done",
"(",
")"
] | [
573,
4
] | [
580,
38
] | python | en | ['en', 'en', 'en'] | True |
InfluxThread.block_till_done | (self) | Block till all events processed. | Block till all events processed. | def block_till_done(self):
"""Block till all events processed."""
self.queue.join() | [
"def",
"block_till_done",
"(",
"self",
")",
":",
"self",
".",
"queue",
".",
"join",
"(",
")"
] | [
582,
4
] | [
584,
25
] | python | en | ['sv', 'en', 'en'] | True |
async_assert_state_equals | (
entity_id: str, state_obj: State, expected: Any, attribute: WithingsAttribute
) | Assert at given state matches what is expected. | Assert at given state matches what is expected. | def async_assert_state_equals(
entity_id: str, state_obj: State, expected: Any, attribute: WithingsAttribute
) -> None:
"""Assert at given state matches what is expected."""
assert state_obj, f"Expected entity {entity_id} to exist but it did not"
assert state_obj.state == str(expected), (
f"Expected {expected} but was {state_obj.state} "
f"for measure {attribute.measurement}, {entity_id}"
) | [
"def",
"async_assert_state_equals",
"(",
"entity_id",
":",
"str",
",",
"state_obj",
":",
"State",
",",
"expected",
":",
"Any",
",",
"attribute",
":",
"WithingsAttribute",
")",
"->",
"None",
":",
"assert",
"state_obj",
",",
"f\"Expected entity {entity_id} to exist but it did not\"",
"assert",
"state_obj",
".",
"state",
"==",
"str",
"(",
"expected",
")",
",",
"(",
"f\"Expected {expected} but was {state_obj.state} \"",
"f\"for measure {attribute.measurement}, {entity_id}\"",
")"
] | [
290,
0
] | [
299,
5
] | python | en | ['en', 'en', 'en'] | True |
test_sensor_default_enabled_entities | (
hass: HomeAssistant, component_factory: ComponentFactory
) | Test entities enabled by default. | Test entities enabled by default. | async def test_sensor_default_enabled_entities(
hass: HomeAssistant, component_factory: ComponentFactory
) -> None:
"""Test entities enabled by default."""
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
await component_factory.configure_component(profile_configs=(PERSON0,))
# Assert entities should not exist yet.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
assert not await async_get_entity_id(hass, attribute, PERSON0.user_id)
# person 0
await component_factory.setup_profile(PERSON0.user_id)
# Assert entities should exist.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
entity_id = await async_get_entity_id(hass, attribute, PERSON0.user_id)
assert entity_id
assert entity_registry.async_is_registered(entity_id)
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.SLEEP)
assert resp.message_code == 0
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.WEIGHT)
assert resp.message_code == 0
for person, measurement, expected in EXPECTED_DATA:
attribute = WITHINGS_MEASUREMENTS_MAP[measurement]
entity_id = await async_get_entity_id(hass, attribute, person.user_id)
state_obj = hass.states.get(entity_id)
if attribute.enabled_by_default:
async_assert_state_equals(entity_id, state_obj, expected, attribute)
else:
assert state_obj is None
# Unload
await component_factory.unload(PERSON0) | [
"async",
"def",
"test_sensor_default_enabled_entities",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
")",
"->",
"None",
":",
"entity_registry",
":",
"EntityRegistry",
"=",
"(",
"await",
"hass",
".",
"helpers",
".",
"entity_registry",
".",
"async_get_registry",
"(",
")",
")",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"PERSON0",
",",
")",
")",
"# Assert entities should not exist yet.",
"for",
"attribute",
"in",
"get_platform_attributes",
"(",
"SENSOR_DOMAIN",
")",
":",
"assert",
"not",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"attribute",
",",
"PERSON0",
".",
"user_id",
")",
"# person 0",
"await",
"component_factory",
".",
"setup_profile",
"(",
"PERSON0",
".",
"user_id",
")",
"# Assert entities should exist.",
"for",
"attribute",
"in",
"get_platform_attributes",
"(",
"SENSOR_DOMAIN",
")",
":",
"entity_id",
"=",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"attribute",
",",
"PERSON0",
".",
"user_id",
")",
"assert",
"entity_id",
"assert",
"entity_registry",
".",
"async_is_registered",
"(",
"entity_id",
")",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"PERSON0",
".",
"user_id",
",",
"NotifyAppli",
".",
"SLEEP",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"PERSON0",
".",
"user_id",
",",
"NotifyAppli",
".",
"WEIGHT",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"for",
"person",
",",
"measurement",
",",
"expected",
"in",
"EXPECTED_DATA",
":",
"attribute",
"=",
"WITHINGS_MEASUREMENTS_MAP",
"[",
"measurement",
"]",
"entity_id",
"=",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"attribute",
",",
"person",
".",
"user_id",
")",
"state_obj",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"if",
"attribute",
".",
"enabled_by_default",
":",
"async_assert_state_equals",
"(",
"entity_id",
",",
"state_obj",
",",
"expected",
",",
"attribute",
")",
"else",
":",
"assert",
"state_obj",
"is",
"None",
"# Unload",
"await",
"component_factory",
".",
"unload",
"(",
"PERSON0",
")"
] | [
302,
0
] | [
342,
43
] | python | en | ['en', 'en', 'en'] | True |
test_all_entities | (
hass: HomeAssistant, component_factory: ComponentFactory
) | Test all entities. | Test all entities. | async def test_all_entities(
hass: HomeAssistant, component_factory: ComponentFactory
) -> None:
"""Test all entities."""
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
with patch(
"homeassistant.components.withings.sensor.BaseWithingsSensor.entity_registry_enabled_default"
) as enabled_by_default_mock:
enabled_by_default_mock.return_value = True
await component_factory.configure_component(profile_configs=(PERSON0,))
# Assert entities should not exist yet.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
assert not await async_get_entity_id(hass, attribute, PERSON0.user_id)
# person 0
await component_factory.setup_profile(PERSON0.user_id)
# Assert entities should exist.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
entity_id = await async_get_entity_id(hass, attribute, PERSON0.user_id)
assert entity_id
assert entity_registry.async_is_registered(entity_id)
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.SLEEP)
assert resp.message_code == 0
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.WEIGHT)
assert resp.message_code == 0
for person, measurement, expected in EXPECTED_DATA:
attribute = WITHINGS_MEASUREMENTS_MAP[measurement]
entity_id = await async_get_entity_id(hass, attribute, person.user_id)
state_obj = hass.states.get(entity_id)
async_assert_state_equals(entity_id, state_obj, expected, attribute)
# Unload
await component_factory.unload(PERSON0) | [
"async",
"def",
"test_all_entities",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
")",
"->",
"None",
":",
"entity_registry",
":",
"EntityRegistry",
"=",
"(",
"await",
"hass",
".",
"helpers",
".",
"entity_registry",
".",
"async_get_registry",
"(",
")",
")",
"with",
"patch",
"(",
"\"homeassistant.components.withings.sensor.BaseWithingsSensor.entity_registry_enabled_default\"",
")",
"as",
"enabled_by_default_mock",
":",
"enabled_by_default_mock",
".",
"return_value",
"=",
"True",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"PERSON0",
",",
")",
")",
"# Assert entities should not exist yet.",
"for",
"attribute",
"in",
"get_platform_attributes",
"(",
"SENSOR_DOMAIN",
")",
":",
"assert",
"not",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"attribute",
",",
"PERSON0",
".",
"user_id",
")",
"# person 0",
"await",
"component_factory",
".",
"setup_profile",
"(",
"PERSON0",
".",
"user_id",
")",
"# Assert entities should exist.",
"for",
"attribute",
"in",
"get_platform_attributes",
"(",
"SENSOR_DOMAIN",
")",
":",
"entity_id",
"=",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"attribute",
",",
"PERSON0",
".",
"user_id",
")",
"assert",
"entity_id",
"assert",
"entity_registry",
".",
"async_is_registered",
"(",
"entity_id",
")",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"PERSON0",
".",
"user_id",
",",
"NotifyAppli",
".",
"SLEEP",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"PERSON0",
".",
"user_id",
",",
"NotifyAppli",
".",
"WEIGHT",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"for",
"person",
",",
"measurement",
",",
"expected",
"in",
"EXPECTED_DATA",
":",
"attribute",
"=",
"WITHINGS_MEASUREMENTS_MAP",
"[",
"measurement",
"]",
"entity_id",
"=",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"attribute",
",",
"person",
".",
"user_id",
")",
"state_obj",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"async_assert_state_equals",
"(",
"entity_id",
",",
"state_obj",
",",
"expected",
",",
"attribute",
")",
"# Unload",
"await",
"component_factory",
".",
"unload",
"(",
"PERSON0",
")"
] | [
345,
0
] | [
387,
43
] | python | en | ['sv', 'en', 'en'] | True |
test_hls_stream | (hass, hass_client) |
Test hls stream.
Purposefully not mocking anything here to test full
integration with the stream component.
|
Test hls stream. | async def test_hls_stream(hass, hass_client):
"""
Test hls stream.
Purposefully not mocking anything here to test full
integration with the stream component.
"""
await async_setup_component(hass, "stream", {"stream": {}})
# Setup demo HLS track
source = generate_h264_video()
stream = preload_stream(hass, source)
stream.add_provider("hls")
# Request stream
url = request_stream(hass, source)
http_client = await hass_client()
# Fetch playlist
parsed_url = urlparse(url)
playlist_response = await http_client.get(parsed_url.path)
assert playlist_response.status == 200
# Fetch init
playlist = await playlist_response.text()
playlist_url = "/".join(parsed_url.path.split("/")[:-1])
init_url = playlist_url + "/init.mp4"
init_response = await http_client.get(init_url)
assert init_response.status == 200
# Fetch segment
playlist = await playlist_response.text()
playlist_url = "/".join(parsed_url.path.split("/")[:-1])
segment_url = playlist_url + playlist.splitlines()[-1][1:]
segment_response = await http_client.get(segment_url)
assert segment_response.status == 200
# Stop stream, if it hasn't quit already
stream.stop()
# Ensure playlist not accessible after stream ends
fail_response = await http_client.get(parsed_url.path)
assert fail_response.status == HTTP_NOT_FOUND | [
"async",
"def",
"test_hls_stream",
"(",
"hass",
",",
"hass_client",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"stream\"",
",",
"{",
"\"stream\"",
":",
"{",
"}",
"}",
")",
"# Setup demo HLS track",
"source",
"=",
"generate_h264_video",
"(",
")",
"stream",
"=",
"preload_stream",
"(",
"hass",
",",
"source",
")",
"stream",
".",
"add_provider",
"(",
"\"hls\"",
")",
"# Request stream",
"url",
"=",
"request_stream",
"(",
"hass",
",",
"source",
")",
"http_client",
"=",
"await",
"hass_client",
"(",
")",
"# Fetch playlist",
"parsed_url",
"=",
"urlparse",
"(",
"url",
")",
"playlist_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"parsed_url",
".",
"path",
")",
"assert",
"playlist_response",
".",
"status",
"==",
"200",
"# Fetch init",
"playlist",
"=",
"await",
"playlist_response",
".",
"text",
"(",
")",
"playlist_url",
"=",
"\"/\"",
".",
"join",
"(",
"parsed_url",
".",
"path",
".",
"split",
"(",
"\"/\"",
")",
"[",
":",
"-",
"1",
"]",
")",
"init_url",
"=",
"playlist_url",
"+",
"\"/init.mp4\"",
"init_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"init_url",
")",
"assert",
"init_response",
".",
"status",
"==",
"200",
"# Fetch segment",
"playlist",
"=",
"await",
"playlist_response",
".",
"text",
"(",
")",
"playlist_url",
"=",
"\"/\"",
".",
"join",
"(",
"parsed_url",
".",
"path",
".",
"split",
"(",
"\"/\"",
")",
"[",
":",
"-",
"1",
"]",
")",
"segment_url",
"=",
"playlist_url",
"+",
"playlist",
".",
"splitlines",
"(",
")",
"[",
"-",
"1",
"]",
"[",
"1",
":",
"]",
"segment_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"segment_url",
")",
"assert",
"segment_response",
".",
"status",
"==",
"200",
"# Stop stream, if it hasn't quit already",
"stream",
".",
"stop",
"(",
")",
"# Ensure playlist not accessible after stream ends",
"fail_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"parsed_url",
".",
"path",
")",
"assert",
"fail_response",
".",
"status",
"==",
"HTTP_NOT_FOUND"
] | [
18,
0
] | [
61,
49
] | python | en | ['en', 'error', 'th'] | False |
test_stream_timeout | (hass, hass_client) | Test hls stream timeout. | Test hls stream timeout. | async def test_stream_timeout(hass, hass_client):
"""Test hls stream timeout."""
await async_setup_component(hass, "stream", {"stream": {}})
# Setup demo HLS track
source = generate_h264_video()
stream = preload_stream(hass, source)
stream.add_provider("hls")
# Request stream
url = request_stream(hass, source)
http_client = await hass_client()
# Fetch playlist
parsed_url = urlparse(url)
playlist_response = await http_client.get(parsed_url.path)
assert playlist_response.status == 200
# Wait a minute
future = dt_util.utcnow() + timedelta(minutes=1)
async_fire_time_changed(hass, future)
# Fetch again to reset timer
playlist_response = await http_client.get(parsed_url.path)
assert playlist_response.status == 200
# Wait 5 minutes
future = dt_util.utcnow() + timedelta(minutes=5)
async_fire_time_changed(hass, future)
# Ensure playlist not accessible
fail_response = await http_client.get(parsed_url.path)
assert fail_response.status == HTTP_NOT_FOUND | [
"async",
"def",
"test_stream_timeout",
"(",
"hass",
",",
"hass_client",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"stream\"",
",",
"{",
"\"stream\"",
":",
"{",
"}",
"}",
")",
"# Setup demo HLS track",
"source",
"=",
"generate_h264_video",
"(",
")",
"stream",
"=",
"preload_stream",
"(",
"hass",
",",
"source",
")",
"stream",
".",
"add_provider",
"(",
"\"hls\"",
")",
"# Request stream",
"url",
"=",
"request_stream",
"(",
"hass",
",",
"source",
")",
"http_client",
"=",
"await",
"hass_client",
"(",
")",
"# Fetch playlist",
"parsed_url",
"=",
"urlparse",
"(",
"url",
")",
"playlist_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"parsed_url",
".",
"path",
")",
"assert",
"playlist_response",
".",
"status",
"==",
"200",
"# Wait a minute",
"future",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"+",
"timedelta",
"(",
"minutes",
"=",
"1",
")",
"async_fire_time_changed",
"(",
"hass",
",",
"future",
")",
"# Fetch again to reset timer",
"playlist_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"parsed_url",
".",
"path",
")",
"assert",
"playlist_response",
".",
"status",
"==",
"200",
"# Wait 5 minutes",
"future",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"+",
"timedelta",
"(",
"minutes",
"=",
"5",
")",
"async_fire_time_changed",
"(",
"hass",
",",
"future",
")",
"# Ensure playlist not accessible",
"fail_response",
"=",
"await",
"http_client",
".",
"get",
"(",
"parsed_url",
".",
"path",
")",
"assert",
"fail_response",
".",
"status",
"==",
"HTTP_NOT_FOUND"
] | [
65,
0
] | [
98,
49
] | python | en | ['en', 'en', 'en'] | True |
test_stream_ended | (hass) | Test hls stream packets ended. | Test hls stream packets ended. | async def test_stream_ended(hass):
"""Test hls stream packets ended."""
await async_setup_component(hass, "stream", {"stream": {}})
# Setup demo HLS track
source = generate_h264_video()
stream = preload_stream(hass, source)
track = stream.add_provider("hls")
# Request stream
request_stream(hass, source)
# Run it dead
while True:
segment = await track.recv()
if segment is None:
break
segments = segment.sequence
assert segments > 1
assert not track.get_segment()
# Stop stream, if it hasn't quit already
stream.stop() | [
"async",
"def",
"test_stream_ended",
"(",
"hass",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"stream\"",
",",
"{",
"\"stream\"",
":",
"{",
"}",
"}",
")",
"# Setup demo HLS track",
"source",
"=",
"generate_h264_video",
"(",
")",
"stream",
"=",
"preload_stream",
"(",
"hass",
",",
"source",
")",
"track",
"=",
"stream",
".",
"add_provider",
"(",
"\"hls\"",
")",
"# Request stream",
"request_stream",
"(",
"hass",
",",
"source",
")",
"# Run it dead",
"while",
"True",
":",
"segment",
"=",
"await",
"track",
".",
"recv",
"(",
")",
"if",
"segment",
"is",
"None",
":",
"break",
"segments",
"=",
"segment",
".",
"sequence",
"assert",
"segments",
">",
"1",
"assert",
"not",
"track",
".",
"get_segment",
"(",
")",
"# Stop stream, if it hasn't quit already",
"stream",
".",
"stop",
"(",
")"
] | [
102,
0
] | [
125,
17
] | python | en | ['en', 'en', 'en'] | True |
test_stream_keepalive | (hass) | Test hls stream retries the stream when keepalive=True. | Test hls stream retries the stream when keepalive=True. | async def test_stream_keepalive(hass):
"""Test hls stream retries the stream when keepalive=True."""
await async_setup_component(hass, "stream", {"stream": {}})
# Setup demo HLS track
source = "test_stream_keepalive_source"
stream = preload_stream(hass, source)
track = stream.add_provider("hls")
track.num_segments = 2
cur_time = 0
def time_side_effect():
nonlocal cur_time
if cur_time >= 80:
stream.keepalive = False # Thread should exit and be joinable.
cur_time += 40
return cur_time
with patch("av.open") as av_open, patch(
"homeassistant.components.stream.worker.time"
) as mock_time, patch(
"homeassistant.components.stream.worker.STREAM_RESTART_INCREMENT", 0
):
av_open.side_effect = av.error.InvalidDataError(-2, "error")
mock_time.time.side_effect = time_side_effect
# Request stream
request_stream(hass, source, keepalive=True)
stream._thread.join()
stream._thread = None
assert av_open.call_count == 2
# Stop stream, if it hasn't quit already
stream.stop() | [
"async",
"def",
"test_stream_keepalive",
"(",
"hass",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"stream\"",
",",
"{",
"\"stream\"",
":",
"{",
"}",
"}",
")",
"# Setup demo HLS track",
"source",
"=",
"\"test_stream_keepalive_source\"",
"stream",
"=",
"preload_stream",
"(",
"hass",
",",
"source",
")",
"track",
"=",
"stream",
".",
"add_provider",
"(",
"\"hls\"",
")",
"track",
".",
"num_segments",
"=",
"2",
"cur_time",
"=",
"0",
"def",
"time_side_effect",
"(",
")",
":",
"nonlocal",
"cur_time",
"if",
"cur_time",
">=",
"80",
":",
"stream",
".",
"keepalive",
"=",
"False",
"# Thread should exit and be joinable.",
"cur_time",
"+=",
"40",
"return",
"cur_time",
"with",
"patch",
"(",
"\"av.open\"",
")",
"as",
"av_open",
",",
"patch",
"(",
"\"homeassistant.components.stream.worker.time\"",
")",
"as",
"mock_time",
",",
"patch",
"(",
"\"homeassistant.components.stream.worker.STREAM_RESTART_INCREMENT\"",
",",
"0",
")",
":",
"av_open",
".",
"side_effect",
"=",
"av",
".",
"error",
".",
"InvalidDataError",
"(",
"-",
"2",
",",
"\"error\"",
")",
"mock_time",
".",
"time",
".",
"side_effect",
"=",
"time_side_effect",
"# Request stream",
"request_stream",
"(",
"hass",
",",
"source",
",",
"keepalive",
"=",
"True",
")",
"stream",
".",
"_thread",
".",
"join",
"(",
")",
"stream",
".",
"_thread",
"=",
"None",
"assert",
"av_open",
".",
"call_count",
"==",
"2",
"# Stop stream, if it hasn't quit already",
"stream",
".",
"stop",
"(",
")"
] | [
128,
0
] | [
161,
17
] | python | en | ['en', 'de', 'en'] | True |
get_log_path | (experiment_id) | generate stdout and stderr log path | generate stdout and stderr log path | def get_log_path(experiment_id):
'''generate stdout and stderr log path'''
os.makedirs(os.path.join(NNI_HOME_DIR, experiment_id, 'log'), exist_ok=True)
stdout_full_path = os.path.join(NNI_HOME_DIR, experiment_id, 'log', 'nnictl_stdout.log')
stderr_full_path = os.path.join(NNI_HOME_DIR, experiment_id, 'log', 'nnictl_stderr.log')
return stdout_full_path, stderr_full_path | [
"def",
"get_log_path",
"(",
"experiment_id",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"join",
"(",
"NNI_HOME_DIR",
",",
"experiment_id",
",",
"'log'",
")",
",",
"exist_ok",
"=",
"True",
")",
"stdout_full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"NNI_HOME_DIR",
",",
"experiment_id",
",",
"'log'",
",",
"'nnictl_stdout.log'",
")",
"stderr_full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"NNI_HOME_DIR",
",",
"experiment_id",
",",
"'log'",
",",
"'nnictl_stderr.log'",
")",
"return",
"stdout_full_path",
",",
"stderr_full_path"
] | [
29,
0
] | [
34,
45
] | python | en | ['en', 'en', 'en'] | True |
print_log_content | (config_file_name) | print log information | print log information | def print_log_content(config_file_name):
'''print log information'''
stdout_full_path, stderr_full_path = get_log_path(config_file_name)
print_normal(' Stdout:')
print(check_output_command(stdout_full_path))
print('\n\n')
print_normal(' Stderr:')
print(check_output_command(stderr_full_path)) | [
"def",
"print_log_content",
"(",
"config_file_name",
")",
":",
"stdout_full_path",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"print_normal",
"(",
"' Stdout:'",
")",
"print",
"(",
"check_output_command",
"(",
"stdout_full_path",
")",
")",
"print",
"(",
"'\\n\\n'",
")",
"print_normal",
"(",
"' Stderr:'",
")",
"print",
"(",
"check_output_command",
"(",
"stderr_full_path",
")",
")"
] | [
36,
0
] | [
43,
49
] | python | en | ['fr', 'jv', 'en'] | False |
start_rest_server | (port, platform, mode, experiment_id, foreground=False, log_dir=None, log_level=None, url_prefix=None) | Run nni manager process | Run nni manager process | def start_rest_server(port, platform, mode, experiment_id, foreground=False, log_dir=None, log_level=None, url_prefix=None):
'''Run nni manager process'''
if detect_port(port):
print_error('Port %s is used by another process, please reset the port!\n' \
'You could use \'nnictl create --help\' to get help information' % port)
exit(1)
if (platform not in ['local', 'aml']) and detect_port(int(port) + 1):
print_error('%s mode need an additional adjacent port %d, and the port %d is used by another process!\n' \
'You could set another port to start experiment!\n' \
'You could use \'nnictl create --help\' to get help information' % (platform, (int(port) + 1), (int(port) + 1)))
exit(1)
print_normal('Starting restful server...')
entry_dir = nni_node.__path__[0]
if (not entry_dir) or (not os.path.exists(entry_dir)):
print_error('Fail to find nni under python library')
exit(1)
entry_file = os.path.join(entry_dir, 'main.js')
if sys.platform == 'win32':
node_command = os.path.join(entry_dir, 'node.exe')
else:
node_command = os.path.join(entry_dir, 'node')
cmds = [node_command, '--max-old-space-size=4096', entry_file, '--port', str(port), '--mode', platform, \
'--experiment_id', experiment_id]
if mode == 'view':
cmds += ['--start_mode', 'resume']
cmds += ['--readonly', 'true']
else:
cmds += ['--start_mode', mode]
if log_dir is not None:
cmds += ['--log_dir', log_dir]
if log_level is not None:
cmds += ['--log_level', log_level]
if foreground:
cmds += ['--foreground', 'true']
if url_prefix:
_validate_prefix_path(url_prefix)
set_prefix_url(url_prefix)
cmds += ['--url_prefix', url_prefix]
stdout_full_path, stderr_full_path = get_log_path(experiment_id)
with open(stdout_full_path, 'a+') as stdout_file, open(stderr_full_path, 'a+') as stderr_file:
start_time = time.time()
time_now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
#add time information in the header of log files
log_header = LOG_HEADER % str(time_now)
stdout_file.write(log_header)
stderr_file.write(log_header)
if sys.platform == 'win32':
from subprocess import CREATE_NEW_PROCESS_GROUP
if foreground:
process = Popen(cmds, cwd=entry_dir, stdout=PIPE, stderr=STDOUT, creationflags=CREATE_NEW_PROCESS_GROUP)
else:
process = Popen(cmds, cwd=entry_dir, stdout=stdout_file, stderr=stderr_file, creationflags=CREATE_NEW_PROCESS_GROUP)
else:
if foreground:
process = Popen(cmds, cwd=entry_dir, stdout=PIPE, stderr=PIPE)
else:
process = Popen(cmds, cwd=entry_dir, stdout=stdout_file, stderr=stderr_file)
return process, int(start_time * 1000) | [
"def",
"start_rest_server",
"(",
"port",
",",
"platform",
",",
"mode",
",",
"experiment_id",
",",
"foreground",
"=",
"False",
",",
"log_dir",
"=",
"None",
",",
"log_level",
"=",
"None",
",",
"url_prefix",
"=",
"None",
")",
":",
"if",
"detect_port",
"(",
"port",
")",
":",
"print_error",
"(",
"'Port %s is used by another process, please reset the port!\\n'",
"'You could use \\'nnictl create --help\\' to get help information'",
"%",
"port",
")",
"exit",
"(",
"1",
")",
"if",
"(",
"platform",
"not",
"in",
"[",
"'local'",
",",
"'aml'",
"]",
")",
"and",
"detect_port",
"(",
"int",
"(",
"port",
")",
"+",
"1",
")",
":",
"print_error",
"(",
"'%s mode need an additional adjacent port %d, and the port %d is used by another process!\\n'",
"'You could set another port to start experiment!\\n'",
"'You could use \\'nnictl create --help\\' to get help information'",
"%",
"(",
"platform",
",",
"(",
"int",
"(",
"port",
")",
"+",
"1",
")",
",",
"(",
"int",
"(",
"port",
")",
"+",
"1",
")",
")",
")",
"exit",
"(",
"1",
")",
"print_normal",
"(",
"'Starting restful server...'",
")",
"entry_dir",
"=",
"nni_node",
".",
"__path__",
"[",
"0",
"]",
"if",
"(",
"not",
"entry_dir",
")",
"or",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"entry_dir",
")",
")",
":",
"print_error",
"(",
"'Fail to find nni under python library'",
")",
"exit",
"(",
"1",
")",
"entry_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"entry_dir",
",",
"'main.js'",
")",
"if",
"sys",
".",
"platform",
"==",
"'win32'",
":",
"node_command",
"=",
"os",
".",
"path",
".",
"join",
"(",
"entry_dir",
",",
"'node.exe'",
")",
"else",
":",
"node_command",
"=",
"os",
".",
"path",
".",
"join",
"(",
"entry_dir",
",",
"'node'",
")",
"cmds",
"=",
"[",
"node_command",
",",
"'--max-old-space-size=4096'",
",",
"entry_file",
",",
"'--port'",
",",
"str",
"(",
"port",
")",
",",
"'--mode'",
",",
"platform",
",",
"'--experiment_id'",
",",
"experiment_id",
"]",
"if",
"mode",
"==",
"'view'",
":",
"cmds",
"+=",
"[",
"'--start_mode'",
",",
"'resume'",
"]",
"cmds",
"+=",
"[",
"'--readonly'",
",",
"'true'",
"]",
"else",
":",
"cmds",
"+=",
"[",
"'--start_mode'",
",",
"mode",
"]",
"if",
"log_dir",
"is",
"not",
"None",
":",
"cmds",
"+=",
"[",
"'--log_dir'",
",",
"log_dir",
"]",
"if",
"log_level",
"is",
"not",
"None",
":",
"cmds",
"+=",
"[",
"'--log_level'",
",",
"log_level",
"]",
"if",
"foreground",
":",
"cmds",
"+=",
"[",
"'--foreground'",
",",
"'true'",
"]",
"if",
"url_prefix",
":",
"_validate_prefix_path",
"(",
"url_prefix",
")",
"set_prefix_url",
"(",
"url_prefix",
")",
"cmds",
"+=",
"[",
"'--url_prefix'",
",",
"url_prefix",
"]",
"stdout_full_path",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"experiment_id",
")",
"with",
"open",
"(",
"stdout_full_path",
",",
"'a+'",
")",
"as",
"stdout_file",
",",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"stderr_file",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"time_now",
"=",
"time",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S'",
",",
"time",
".",
"localtime",
"(",
"start_time",
")",
")",
"#add time information in the header of log files",
"log_header",
"=",
"LOG_HEADER",
"%",
"str",
"(",
"time_now",
")",
"stdout_file",
".",
"write",
"(",
"log_header",
")",
"stderr_file",
".",
"write",
"(",
"log_header",
")",
"if",
"sys",
".",
"platform",
"==",
"'win32'",
":",
"from",
"subprocess",
"import",
"CREATE_NEW_PROCESS_GROUP",
"if",
"foreground",
":",
"process",
"=",
"Popen",
"(",
"cmds",
",",
"cwd",
"=",
"entry_dir",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"STDOUT",
",",
"creationflags",
"=",
"CREATE_NEW_PROCESS_GROUP",
")",
"else",
":",
"process",
"=",
"Popen",
"(",
"cmds",
",",
"cwd",
"=",
"entry_dir",
",",
"stdout",
"=",
"stdout_file",
",",
"stderr",
"=",
"stderr_file",
",",
"creationflags",
"=",
"CREATE_NEW_PROCESS_GROUP",
")",
"else",
":",
"if",
"foreground",
":",
"process",
"=",
"Popen",
"(",
"cmds",
",",
"cwd",
"=",
"entry_dir",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
")",
"else",
":",
"process",
"=",
"Popen",
"(",
"cmds",
",",
"cwd",
"=",
"entry_dir",
",",
"stdout",
"=",
"stdout_file",
",",
"stderr",
"=",
"stderr_file",
")",
"return",
"process",
",",
"int",
"(",
"start_time",
"*",
"1000",
")"
] | [
45,
0
] | [
107,
42
] | python | da | ['da', 'ha', 'en'] | False |
set_trial_config | (experiment_config, port, config_file_name) | set trial configuration | set trial configuration | def set_trial_config(experiment_config, port, config_file_name):
'''set trial configuration'''
request_data = dict()
request_data['trial_config'] = experiment_config['trial']
response = rest_put(cluster_metadata_url(port), json.dumps(request_data), REST_TIME_OUT)
if check_response(response):
return True
else:
print('Error message is {}'.format(response.text))
_, stderr_full_path = get_log_path(config_file_name)
if response:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
return False | [
"def",
"set_trial_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
":",
"request_data",
"=",
"dict",
"(",
")",
"request_data",
"[",
"'trial_config'",
"]",
"=",
"experiment_config",
"[",
"'trial'",
"]",
"response",
"=",
"rest_put",
"(",
"cluster_metadata_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"request_data",
")",
",",
"REST_TIME_OUT",
")",
"if",
"check_response",
"(",
"response",
")",
":",
"return",
"True",
"else",
":",
"print",
"(",
"'Error message is {}'",
".",
"format",
"(",
"response",
".",
"text",
")",
")",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"if",
"response",
":",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"return",
"False"
] | [
109,
0
] | [
122,
20
] | python | en | ['en', 'fr', 'en'] | True |
set_adl_config | (experiment_config, port, config_file_name) | set adl configuration | set adl configuration | def set_adl_config(experiment_config, port, config_file_name):
'''set adl configuration'''
adl_config_data = dict()
# hack for supporting v2 config, need refactor
adl_config_data['adl_config'] = {}
response = rest_put(cluster_metadata_url(port), json.dumps(adl_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
#set trial_config
return set_trial_config(experiment_config, port, config_file_name), None | [
"def",
"set_adl_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
":",
"adl_config_data",
"=",
"dict",
"(",
")",
"# hack for supporting v2 config, need refactor",
"adl_config_data",
"[",
"'adl_config'",
"]",
"=",
"{",
"}",
"response",
"=",
"rest_put",
"(",
"cluster_metadata_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"adl_config_data",
")",
",",
"REST_TIME_OUT",
")",
"err_message",
"=",
"None",
"if",
"not",
"response",
"or",
"not",
"response",
".",
"status_code",
"==",
"200",
":",
"if",
"response",
"is",
"not",
"None",
":",
"err_message",
"=",
"response",
".",
"text",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"err_message",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"return",
"False",
",",
"err_message",
"set_V1_common_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"result",
",",
"message",
"=",
"setNNIManagerIp",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"if",
"not",
"result",
":",
"return",
"result",
",",
"message",
"#set trial_config",
"return",
"set_trial_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
",",
"None"
] | [
124,
0
] | [
143,
76
] | python | en | ['en', 'fr', 'en'] | True |
setNNIManagerIp | (experiment_config, port, config_file_name) | set nniManagerIp | set nniManagerIp | def setNNIManagerIp(experiment_config, port, config_file_name):
'''set nniManagerIp'''
if experiment_config.get('nniManagerIp') is None:
return True, None
ip_config_dict = dict()
ip_config_dict['nni_manager_ip'] = {'nniManagerIp': experiment_config['nniManagerIp']}
response = rest_put(cluster_metadata_url(port), json.dumps(ip_config_dict), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
return True, None | [
"def",
"setNNIManagerIp",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
":",
"if",
"experiment_config",
".",
"get",
"(",
"'nniManagerIp'",
")",
"is",
"None",
":",
"return",
"True",
",",
"None",
"ip_config_dict",
"=",
"dict",
"(",
")",
"ip_config_dict",
"[",
"'nni_manager_ip'",
"]",
"=",
"{",
"'nniManagerIp'",
":",
"experiment_config",
"[",
"'nniManagerIp'",
"]",
"}",
"response",
"=",
"rest_put",
"(",
"cluster_metadata_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"ip_config_dict",
")",
",",
"REST_TIME_OUT",
")",
"err_message",
"=",
"None",
"if",
"not",
"response",
"or",
"not",
"response",
".",
"status_code",
"==",
"200",
":",
"if",
"response",
"is",
"not",
"None",
":",
"err_message",
"=",
"response",
".",
"text",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"err_message",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"return",
"False",
",",
"err_message",
"return",
"True",
",",
"None"
] | [
172,
0
] | [
187,
21
] | python | da | ['en', 'da', 'sw'] | False |
set_kubeflow_config | (experiment_config, port, config_file_name) | set kubeflow configuration | set kubeflow configuration | def set_kubeflow_config(experiment_config, port, config_file_name):
'''set kubeflow configuration'''
kubeflow_config_data = dict()
kubeflow_config_data['kubeflow_config'] = experiment_config['kubeflowConfig']
response = rest_put(cluster_metadata_url(port), json.dumps(kubeflow_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
#set trial_config
return set_trial_config(experiment_config, port, config_file_name), err_message | [
"def",
"set_kubeflow_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
":",
"kubeflow_config_data",
"=",
"dict",
"(",
")",
"kubeflow_config_data",
"[",
"'kubeflow_config'",
"]",
"=",
"experiment_config",
"[",
"'kubeflowConfig'",
"]",
"response",
"=",
"rest_put",
"(",
"cluster_metadata_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"kubeflow_config_data",
")",
",",
"REST_TIME_OUT",
")",
"err_message",
"=",
"None",
"if",
"not",
"response",
"or",
"not",
"response",
".",
"status_code",
"==",
"200",
":",
"if",
"response",
"is",
"not",
"None",
":",
"err_message",
"=",
"response",
".",
"text",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"err_message",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"return",
"False",
",",
"err_message",
"set_V1_common_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"result",
",",
"message",
"=",
"setNNIManagerIp",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"if",
"not",
"result",
":",
"return",
"result",
",",
"message",
"#set trial_config",
"return",
"set_trial_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
",",
"err_message"
] | [
189,
0
] | [
207,
83
] | python | en | ['en', 'xh', 'ur'] | False |
set_frameworkcontroller_config | (experiment_config, port, config_file_name) | set kubeflow configuration | set kubeflow configuration | def set_frameworkcontroller_config(experiment_config, port, config_file_name):
'''set kubeflow configuration'''
frameworkcontroller_config_data = dict()
frameworkcontroller_config_data['frameworkcontroller_config'] = experiment_config['frameworkcontrollerConfig']
response = rest_put(cluster_metadata_url(port), json.dumps(frameworkcontroller_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
#set trial_config
return set_trial_config(experiment_config, port, config_file_name), err_message | [
"def",
"set_frameworkcontroller_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
":",
"frameworkcontroller_config_data",
"=",
"dict",
"(",
")",
"frameworkcontroller_config_data",
"[",
"'frameworkcontroller_config'",
"]",
"=",
"experiment_config",
"[",
"'frameworkcontrollerConfig'",
"]",
"response",
"=",
"rest_put",
"(",
"cluster_metadata_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"frameworkcontroller_config_data",
")",
",",
"REST_TIME_OUT",
")",
"err_message",
"=",
"None",
"if",
"not",
"response",
"or",
"not",
"response",
".",
"status_code",
"==",
"200",
":",
"if",
"response",
"is",
"not",
"None",
":",
"err_message",
"=",
"response",
".",
"text",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"err_message",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"return",
"False",
",",
"err_message",
"set_V1_common_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"result",
",",
"message",
"=",
"setNNIManagerIp",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"if",
"not",
"result",
":",
"return",
"result",
",",
"message",
"#set trial_config",
"return",
"set_trial_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
",",
"err_message"
] | [
209,
0
] | [
227,
83
] | python | en | ['en', 'xh', 'ur'] | False |
set_experiment_v1 | (experiment_config, mode, port, config_file_name) | Call startExperiment (rest POST /experiment) with yaml file content | Call startExperiment (rest POST /experiment) with yaml file content | def set_experiment_v1(experiment_config, mode, port, config_file_name):
'''Call startExperiment (rest POST /experiment) with yaml file content'''
request_data = dict()
request_data['authorName'] = experiment_config['authorName']
request_data['experimentName'] = experiment_config['experimentName']
request_data['trialConcurrency'] = experiment_config['trialConcurrency']
request_data['maxExecDuration'] = experiment_config['maxExecDuration']
request_data['maxExperimentDuration'] = str(experiment_config['maxExecDuration']) + 's'
request_data['maxTrialNum'] = experiment_config['maxTrialNum']
request_data['maxTrialNumber'] = experiment_config['maxTrialNum']
request_data['searchSpace'] = experiment_config.get('searchSpace')
request_data['trainingServicePlatform'] = experiment_config.get('trainingServicePlatform')
# hack for hotfix, fix config.trainingService undefined error, need refactor
request_data['trainingService'] = {'platform': experiment_config.get('trainingServicePlatform')}
if experiment_config.get('description'):
request_data['description'] = experiment_config['description']
if experiment_config.get('multiPhase'):
request_data['multiPhase'] = experiment_config.get('multiPhase')
if experiment_config.get('multiThread'):
request_data['multiThread'] = experiment_config.get('multiThread')
if experiment_config.get('nniManagerIp'):
request_data['nniManagerIp'] = experiment_config.get('nniManagerIp')
if experiment_config.get('advisor'):
request_data['advisor'] = experiment_config['advisor']
if request_data['advisor'].get('gpuNum'):
print_error('gpuNum is deprecated, please use gpuIndices instead.')
if request_data['advisor'].get('gpuIndices') and isinstance(request_data['advisor'].get('gpuIndices'), int):
request_data['advisor']['gpuIndices'] = str(request_data['advisor'].get('gpuIndices'))
else:
request_data['tuner'] = experiment_config['tuner']
if request_data['tuner'].get('gpuNum'):
print_error('gpuNum is deprecated, please use gpuIndices instead.')
if request_data['tuner'].get('gpuIndices') and isinstance(request_data['tuner'].get('gpuIndices'), int):
request_data['tuner']['gpuIndices'] = str(request_data['tuner'].get('gpuIndices'))
if 'assessor' in experiment_config:
request_data['assessor'] = experiment_config['assessor']
if request_data['assessor'].get('gpuNum'):
print_error('gpuNum is deprecated, please remove it from your config file.')
#debug mode should disable version check
if experiment_config.get('debug') is not None:
request_data['versionCheck'] = not experiment_config.get('debug')
#validate version check
if experiment_config.get('versionCheck') is not None:
request_data['versionCheck'] = experiment_config.get('versionCheck')
if experiment_config.get('logCollection'):
request_data['logCollection'] = experiment_config.get('logCollection')
request_data['clusterMetaData'] = []
if experiment_config['trainingServicePlatform'] == 'kubeflow':
request_data['clusterMetaData'].append(
{'key': 'kubeflow_config', 'value': experiment_config['kubeflowConfig']})
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
elif experiment_config['trainingServicePlatform'] == 'frameworkcontroller':
request_data['clusterMetaData'].append(
{'key': 'frameworkcontroller_config', 'value': experiment_config['frameworkcontrollerConfig']})
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
elif experiment_config['trainingServicePlatform'] == 'adl':
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
response = rest_post(experiment_url(port), json.dumps(request_data), REST_TIME_OUT, show_error=True)
if check_response(response):
return response
else:
_, stderr_full_path = get_log_path(config_file_name)
if response is not None:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Setting experiment error, error message is {}'.format(response.text))
return None | [
"def",
"set_experiment_v1",
"(",
"experiment_config",
",",
"mode",
",",
"port",
",",
"config_file_name",
")",
":",
"request_data",
"=",
"dict",
"(",
")",
"request_data",
"[",
"'authorName'",
"]",
"=",
"experiment_config",
"[",
"'authorName'",
"]",
"request_data",
"[",
"'experimentName'",
"]",
"=",
"experiment_config",
"[",
"'experimentName'",
"]",
"request_data",
"[",
"'trialConcurrency'",
"]",
"=",
"experiment_config",
"[",
"'trialConcurrency'",
"]",
"request_data",
"[",
"'maxExecDuration'",
"]",
"=",
"experiment_config",
"[",
"'maxExecDuration'",
"]",
"request_data",
"[",
"'maxExperimentDuration'",
"]",
"=",
"str",
"(",
"experiment_config",
"[",
"'maxExecDuration'",
"]",
")",
"+",
"'s'",
"request_data",
"[",
"'maxTrialNum'",
"]",
"=",
"experiment_config",
"[",
"'maxTrialNum'",
"]",
"request_data",
"[",
"'maxTrialNumber'",
"]",
"=",
"experiment_config",
"[",
"'maxTrialNum'",
"]",
"request_data",
"[",
"'searchSpace'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'searchSpace'",
")",
"request_data",
"[",
"'trainingServicePlatform'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'trainingServicePlatform'",
")",
"# hack for hotfix, fix config.trainingService undefined error, need refactor",
"request_data",
"[",
"'trainingService'",
"]",
"=",
"{",
"'platform'",
":",
"experiment_config",
".",
"get",
"(",
"'trainingServicePlatform'",
")",
"}",
"if",
"experiment_config",
".",
"get",
"(",
"'description'",
")",
":",
"request_data",
"[",
"'description'",
"]",
"=",
"experiment_config",
"[",
"'description'",
"]",
"if",
"experiment_config",
".",
"get",
"(",
"'multiPhase'",
")",
":",
"request_data",
"[",
"'multiPhase'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'multiPhase'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'multiThread'",
")",
":",
"request_data",
"[",
"'multiThread'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'multiThread'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'nniManagerIp'",
")",
":",
"request_data",
"[",
"'nniManagerIp'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'nniManagerIp'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'advisor'",
")",
":",
"request_data",
"[",
"'advisor'",
"]",
"=",
"experiment_config",
"[",
"'advisor'",
"]",
"if",
"request_data",
"[",
"'advisor'",
"]",
".",
"get",
"(",
"'gpuNum'",
")",
":",
"print_error",
"(",
"'gpuNum is deprecated, please use gpuIndices instead.'",
")",
"if",
"request_data",
"[",
"'advisor'",
"]",
".",
"get",
"(",
"'gpuIndices'",
")",
"and",
"isinstance",
"(",
"request_data",
"[",
"'advisor'",
"]",
".",
"get",
"(",
"'gpuIndices'",
")",
",",
"int",
")",
":",
"request_data",
"[",
"'advisor'",
"]",
"[",
"'gpuIndices'",
"]",
"=",
"str",
"(",
"request_data",
"[",
"'advisor'",
"]",
".",
"get",
"(",
"'gpuIndices'",
")",
")",
"else",
":",
"request_data",
"[",
"'tuner'",
"]",
"=",
"experiment_config",
"[",
"'tuner'",
"]",
"if",
"request_data",
"[",
"'tuner'",
"]",
".",
"get",
"(",
"'gpuNum'",
")",
":",
"print_error",
"(",
"'gpuNum is deprecated, please use gpuIndices instead.'",
")",
"if",
"request_data",
"[",
"'tuner'",
"]",
".",
"get",
"(",
"'gpuIndices'",
")",
"and",
"isinstance",
"(",
"request_data",
"[",
"'tuner'",
"]",
".",
"get",
"(",
"'gpuIndices'",
")",
",",
"int",
")",
":",
"request_data",
"[",
"'tuner'",
"]",
"[",
"'gpuIndices'",
"]",
"=",
"str",
"(",
"request_data",
"[",
"'tuner'",
"]",
".",
"get",
"(",
"'gpuIndices'",
")",
")",
"if",
"'assessor'",
"in",
"experiment_config",
":",
"request_data",
"[",
"'assessor'",
"]",
"=",
"experiment_config",
"[",
"'assessor'",
"]",
"if",
"request_data",
"[",
"'assessor'",
"]",
".",
"get",
"(",
"'gpuNum'",
")",
":",
"print_error",
"(",
"'gpuNum is deprecated, please remove it from your config file.'",
")",
"#debug mode should disable version check",
"if",
"experiment_config",
".",
"get",
"(",
"'debug'",
")",
"is",
"not",
"None",
":",
"request_data",
"[",
"'versionCheck'",
"]",
"=",
"not",
"experiment_config",
".",
"get",
"(",
"'debug'",
")",
"#validate version check",
"if",
"experiment_config",
".",
"get",
"(",
"'versionCheck'",
")",
"is",
"not",
"None",
":",
"request_data",
"[",
"'versionCheck'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'versionCheck'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'logCollection'",
")",
":",
"request_data",
"[",
"'logCollection'",
"]",
"=",
"experiment_config",
".",
"get",
"(",
"'logCollection'",
")",
"request_data",
"[",
"'clusterMetaData'",
"]",
"=",
"[",
"]",
"if",
"experiment_config",
"[",
"'trainingServicePlatform'",
"]",
"==",
"'kubeflow'",
":",
"request_data",
"[",
"'clusterMetaData'",
"]",
".",
"append",
"(",
"{",
"'key'",
":",
"'kubeflow_config'",
",",
"'value'",
":",
"experiment_config",
"[",
"'kubeflowConfig'",
"]",
"}",
")",
"request_data",
"[",
"'clusterMetaData'",
"]",
".",
"append",
"(",
"{",
"'key'",
":",
"'trial_config'",
",",
"'value'",
":",
"experiment_config",
"[",
"'trial'",
"]",
"}",
")",
"elif",
"experiment_config",
"[",
"'trainingServicePlatform'",
"]",
"==",
"'frameworkcontroller'",
":",
"request_data",
"[",
"'clusterMetaData'",
"]",
".",
"append",
"(",
"{",
"'key'",
":",
"'frameworkcontroller_config'",
",",
"'value'",
":",
"experiment_config",
"[",
"'frameworkcontrollerConfig'",
"]",
"}",
")",
"request_data",
"[",
"'clusterMetaData'",
"]",
".",
"append",
"(",
"{",
"'key'",
":",
"'trial_config'",
",",
"'value'",
":",
"experiment_config",
"[",
"'trial'",
"]",
"}",
")",
"elif",
"experiment_config",
"[",
"'trainingServicePlatform'",
"]",
"==",
"'adl'",
":",
"request_data",
"[",
"'clusterMetaData'",
"]",
".",
"append",
"(",
"{",
"'key'",
":",
"'trial_config'",
",",
"'value'",
":",
"experiment_config",
"[",
"'trial'",
"]",
"}",
")",
"response",
"=",
"rest_post",
"(",
"experiment_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"request_data",
")",
",",
"REST_TIME_OUT",
",",
"show_error",
"=",
"True",
")",
"if",
"check_response",
"(",
"response",
")",
":",
"return",
"response",
"else",
":",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"if",
"response",
"is",
"not",
"None",
":",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"print_error",
"(",
"'Setting experiment error, error message is {}'",
".",
"format",
"(",
"response",
".",
"text",
")",
")",
"return",
"None"
] | [
243,
0
] | [
312,
19
] | python | en | ['en', 'en', 'en'] | True |
set_experiment_v2 | (experiment_config, mode, port, config_file_name) | Call startExperiment (rest POST /experiment) with yaml file content | Call startExperiment (rest POST /experiment) with yaml file content | def set_experiment_v2(experiment_config, mode, port, config_file_name):
'''Call startExperiment (rest POST /experiment) with yaml file content'''
response = rest_post(experiment_url(port), json.dumps(experiment_config), REST_TIME_OUT, show_error=True)
if check_response(response):
return response
else:
_, stderr_full_path = get_log_path(config_file_name)
if response is not None:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Setting experiment error, error message is {}'.format(response.text))
return None | [
"def",
"set_experiment_v2",
"(",
"experiment_config",
",",
"mode",
",",
"port",
",",
"config_file_name",
")",
":",
"response",
"=",
"rest_post",
"(",
"experiment_url",
"(",
"port",
")",
",",
"json",
".",
"dumps",
"(",
"experiment_config",
")",
",",
"REST_TIME_OUT",
",",
"show_error",
"=",
"True",
")",
"if",
"check_response",
"(",
"response",
")",
":",
"return",
"response",
"else",
":",
"_",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"config_file_name",
")",
"if",
"response",
"is",
"not",
"None",
":",
"with",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"fout",
":",
"fout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
")",
"print_error",
"(",
"'Setting experiment error, error message is {}'",
".",
"format",
"(",
"response",
".",
"text",
")",
")",
"return",
"None"
] | [
314,
0
] | [
325,
19
] | python | en | ['en', 'en', 'en'] | True |
set_platform_config | (platform, experiment_config, port, config_file_name, rest_process) | call set_cluster_metadata for specific platform | call set_cluster_metadata for specific platform | def set_platform_config(platform, experiment_config, port, config_file_name, rest_process):
'''call set_cluster_metadata for specific platform'''
print_normal('Setting {0} config...'.format(platform))
config_result, err_msg = None, None
if platform == 'adl':
config_result, err_msg = set_adl_config(experiment_config, port, config_file_name)
elif platform == 'kubeflow':
config_result, err_msg = set_kubeflow_config(experiment_config, port, config_file_name)
elif platform == 'frameworkcontroller':
config_result, err_msg = set_frameworkcontroller_config(experiment_config, port, config_file_name)
else:
raise Exception(ERROR_INFO % 'Unsupported platform!')
exit(1)
if config_result:
config_result, err_msg = set_shared_storage(experiment_config, port, config_file_name)
if config_result:
print_normal('Successfully set {0} config!'.format(platform))
else:
print_error('Failed! Error is: {}'.format(err_msg))
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Rest server stopped!')
exit(1) | [
"def",
"set_platform_config",
"(",
"platform",
",",
"experiment_config",
",",
"port",
",",
"config_file_name",
",",
"rest_process",
")",
":",
"print_normal",
"(",
"'Setting {0} config...'",
".",
"format",
"(",
"platform",
")",
")",
"config_result",
",",
"err_msg",
"=",
"None",
",",
"None",
"if",
"platform",
"==",
"'adl'",
":",
"config_result",
",",
"err_msg",
"=",
"set_adl_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"elif",
"platform",
"==",
"'kubeflow'",
":",
"config_result",
",",
"err_msg",
"=",
"set_kubeflow_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"elif",
"platform",
"==",
"'frameworkcontroller'",
":",
"config_result",
",",
"err_msg",
"=",
"set_frameworkcontroller_config",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"else",
":",
"raise",
"Exception",
"(",
"ERROR_INFO",
"%",
"'Unsupported platform!'",
")",
"exit",
"(",
"1",
")",
"if",
"config_result",
":",
"config_result",
",",
"err_msg",
"=",
"set_shared_storage",
"(",
"experiment_config",
",",
"port",
",",
"config_file_name",
")",
"if",
"config_result",
":",
"print_normal",
"(",
"'Successfully set {0} config!'",
".",
"format",
"(",
"platform",
")",
")",
"else",
":",
"print_error",
"(",
"'Failed! Error is: {}'",
".",
"format",
"(",
"err_msg",
")",
")",
"try",
":",
"kill_command",
"(",
"rest_process",
".",
"pid",
")",
"except",
"Exception",
":",
"raise",
"Exception",
"(",
"ERROR_INFO",
"%",
"'Rest server stopped!'",
")",
"exit",
"(",
"1",
")"
] | [
327,
0
] | [
350,
15
] | python | en | ['en', 'en', 'en'] | True |
launch_experiment | (args, experiment_config, mode, experiment_id, config_version) | follow steps to start rest server and start experiment | follow steps to start rest server and start experiment | def launch_experiment(args, experiment_config, mode, experiment_id, config_version):
'''follow steps to start rest server and start experiment'''
# check packages for tuner
package_name, module_name = None, None
if experiment_config.get('tuner') and experiment_config['tuner'].get('builtinTunerName'):
package_name = experiment_config['tuner']['builtinTunerName']
module_name, _ = get_builtin_module_class_name('tuners', package_name)
elif experiment_config.get('advisor') and experiment_config['advisor'].get('builtinAdvisorName'):
package_name = experiment_config['advisor']['builtinAdvisorName']
module_name, _ = get_builtin_module_class_name('advisors', package_name)
if package_name and module_name:
try:
stdout_full_path, stderr_full_path = get_log_path(experiment_id)
with open(stdout_full_path, 'a+') as stdout_file, open(stderr_full_path, 'a+') as stderr_file:
check_call([sys.executable, '-c', 'import %s'%(module_name)], stdout=stdout_file, stderr=stderr_file)
except CalledProcessError:
print_error('some errors happen when import package %s.' %(package_name))
print_log_content(experiment_id)
if package_name in ['SMAC', 'BOHB', 'PPOTuner']:
print_error(f'The dependencies for {package_name} can be installed through pip install nni[{package_name}]')
raise
if config_version == 1:
log_dir = experiment_config['logDir'] if experiment_config.get('logDir') else NNI_HOME_DIR
else:
log_dir = experiment_config['experimentWorkingDirectory'] if experiment_config.get('experimentWorkingDirectory') else NNI_HOME_DIR
log_level = experiment_config['logLevel'] if experiment_config.get('logLevel') else 'info'
#view experiment mode do not need debug function, when view an experiment, there will be no new logs created
foreground = False
if mode != 'view':
foreground = args.foreground
if log_level not in ['trace', 'debug'] and (args.debug or experiment_config.get('debug') is True):
log_level = 'debug'
# start rest server
if config_version == 1:
platform = experiment_config['trainingServicePlatform']
elif isinstance(experiment_config['trainingService'], list):
platform = 'hybrid'
else:
platform = experiment_config['trainingService']['platform']
rest_process, start_time = start_rest_server(args.port, platform, \
mode, experiment_id, foreground, log_dir, log_level, args.url_prefix)
# save experiment information
Experiments().add_experiment(experiment_id, args.port, start_time,
platform,
experiment_config.get('experimentName', 'N/A')
, pid=rest_process.pid, logDir=log_dir, prefixUrl=args.url_prefix)
# Deal with annotation
if experiment_config.get('useAnnotation'):
path = os.path.join(tempfile.gettempdir(), get_user(), 'nni', 'annotation')
if not os.path.isdir(path):
os.makedirs(path)
path = tempfile.mkdtemp(dir=path)
nas_mode = experiment_config['trial'].get('nasMode', 'classic_mode')
code_dir = expand_annotations(experiment_config['trial']['codeDir'], path, nas_mode=nas_mode)
experiment_config['trial']['codeDir'] = code_dir
search_space = generate_search_space(code_dir)
experiment_config['searchSpace'] = search_space
assert search_space, ERROR_INFO % 'Generated search space is empty'
elif config_version == 1:
if experiment_config.get('searchSpacePath'):
search_space = get_json_content(experiment_config.get('searchSpacePath'))
experiment_config['searchSpace'] = search_space
else:
experiment_config['searchSpace'] = ''
# check rest server
running, _ = check_rest_server(args.port)
if running:
print_normal('Successfully started Restful server!')
else:
print_error('Restful server start failed!')
print_log_content(experiment_id)
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Rest server stopped!')
exit(1)
if config_version == 1 and mode != 'view':
# set platform configuration
set_platform_config(experiment_config['trainingServicePlatform'], experiment_config, args.port,\
experiment_id, rest_process)
# start a new experiment
print_normal('Starting experiment...')
# set debug configuration
if mode != 'view' and experiment_config.get('debug') is None:
experiment_config['debug'] = args.debug
if config_version == 1:
response = set_experiment_v1(experiment_config, mode, args.port, experiment_id)
else:
response = set_experiment_v2(experiment_config, mode, args.port, experiment_id)
if response:
if experiment_id is None:
experiment_id = json.loads(response.text).get('experiment_id')
else:
print_error('Start experiment failed!')
print_log_content(experiment_id)
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Restful server stopped!')
exit(1)
url_prefix_format = '' if args.url_prefix is None else '/{0}'.format(args.url_prefix)
if experiment_config.get('nniManagerIp'):
web_ui_url_list = ['http://{0}:{1}{2}'.format(experiment_config['nniManagerIp'], str(args.port), url_prefix_format)]
else:
web_ui_url_list = get_local_urls(args.port, url_prefix_format)
Experiments().update_experiment(experiment_id, 'webuiUrl', web_ui_url_list)
print_normal(EXPERIMENT_SUCCESS_INFO % (experiment_id, ' '.join(web_ui_url_list)))
if mode != 'view' and args.foreground:
try:
while True:
log_content = rest_process.stdout.readline().strip().decode('utf-8')
print(log_content)
except KeyboardInterrupt:
kill_command(rest_process.pid)
print_normal('Stopping experiment...') | [
"def",
"launch_experiment",
"(",
"args",
",",
"experiment_config",
",",
"mode",
",",
"experiment_id",
",",
"config_version",
")",
":",
"# check packages for tuner",
"package_name",
",",
"module_name",
"=",
"None",
",",
"None",
"if",
"experiment_config",
".",
"get",
"(",
"'tuner'",
")",
"and",
"experiment_config",
"[",
"'tuner'",
"]",
".",
"get",
"(",
"'builtinTunerName'",
")",
":",
"package_name",
"=",
"experiment_config",
"[",
"'tuner'",
"]",
"[",
"'builtinTunerName'",
"]",
"module_name",
",",
"_",
"=",
"get_builtin_module_class_name",
"(",
"'tuners'",
",",
"package_name",
")",
"elif",
"experiment_config",
".",
"get",
"(",
"'advisor'",
")",
"and",
"experiment_config",
"[",
"'advisor'",
"]",
".",
"get",
"(",
"'builtinAdvisorName'",
")",
":",
"package_name",
"=",
"experiment_config",
"[",
"'advisor'",
"]",
"[",
"'builtinAdvisorName'",
"]",
"module_name",
",",
"_",
"=",
"get_builtin_module_class_name",
"(",
"'advisors'",
",",
"package_name",
")",
"if",
"package_name",
"and",
"module_name",
":",
"try",
":",
"stdout_full_path",
",",
"stderr_full_path",
"=",
"get_log_path",
"(",
"experiment_id",
")",
"with",
"open",
"(",
"stdout_full_path",
",",
"'a+'",
")",
"as",
"stdout_file",
",",
"open",
"(",
"stderr_full_path",
",",
"'a+'",
")",
"as",
"stderr_file",
":",
"check_call",
"(",
"[",
"sys",
".",
"executable",
",",
"'-c'",
",",
"'import %s'",
"%",
"(",
"module_name",
")",
"]",
",",
"stdout",
"=",
"stdout_file",
",",
"stderr",
"=",
"stderr_file",
")",
"except",
"CalledProcessError",
":",
"print_error",
"(",
"'some errors happen when import package %s.'",
"%",
"(",
"package_name",
")",
")",
"print_log_content",
"(",
"experiment_id",
")",
"if",
"package_name",
"in",
"[",
"'SMAC'",
",",
"'BOHB'",
",",
"'PPOTuner'",
"]",
":",
"print_error",
"(",
"f'The dependencies for {package_name} can be installed through pip install nni[{package_name}]'",
")",
"raise",
"if",
"config_version",
"==",
"1",
":",
"log_dir",
"=",
"experiment_config",
"[",
"'logDir'",
"]",
"if",
"experiment_config",
".",
"get",
"(",
"'logDir'",
")",
"else",
"NNI_HOME_DIR",
"else",
":",
"log_dir",
"=",
"experiment_config",
"[",
"'experimentWorkingDirectory'",
"]",
"if",
"experiment_config",
".",
"get",
"(",
"'experimentWorkingDirectory'",
")",
"else",
"NNI_HOME_DIR",
"log_level",
"=",
"experiment_config",
"[",
"'logLevel'",
"]",
"if",
"experiment_config",
".",
"get",
"(",
"'logLevel'",
")",
"else",
"'info'",
"#view experiment mode do not need debug function, when view an experiment, there will be no new logs created",
"foreground",
"=",
"False",
"if",
"mode",
"!=",
"'view'",
":",
"foreground",
"=",
"args",
".",
"foreground",
"if",
"log_level",
"not",
"in",
"[",
"'trace'",
",",
"'debug'",
"]",
"and",
"(",
"args",
".",
"debug",
"or",
"experiment_config",
".",
"get",
"(",
"'debug'",
")",
"is",
"True",
")",
":",
"log_level",
"=",
"'debug'",
"# start rest server",
"if",
"config_version",
"==",
"1",
":",
"platform",
"=",
"experiment_config",
"[",
"'trainingServicePlatform'",
"]",
"elif",
"isinstance",
"(",
"experiment_config",
"[",
"'trainingService'",
"]",
",",
"list",
")",
":",
"platform",
"=",
"'hybrid'",
"else",
":",
"platform",
"=",
"experiment_config",
"[",
"'trainingService'",
"]",
"[",
"'platform'",
"]",
"rest_process",
",",
"start_time",
"=",
"start_rest_server",
"(",
"args",
".",
"port",
",",
"platform",
",",
"mode",
",",
"experiment_id",
",",
"foreground",
",",
"log_dir",
",",
"log_level",
",",
"args",
".",
"url_prefix",
")",
"# save experiment information",
"Experiments",
"(",
")",
".",
"add_experiment",
"(",
"experiment_id",
",",
"args",
".",
"port",
",",
"start_time",
",",
"platform",
",",
"experiment_config",
".",
"get",
"(",
"'experimentName'",
",",
"'N/A'",
")",
",",
"pid",
"=",
"rest_process",
".",
"pid",
",",
"logDir",
"=",
"log_dir",
",",
"prefixUrl",
"=",
"args",
".",
"url_prefix",
")",
"# Deal with annotation",
"if",
"experiment_config",
".",
"get",
"(",
"'useAnnotation'",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tempfile",
".",
"gettempdir",
"(",
")",
",",
"get_user",
"(",
")",
",",
"'nni'",
",",
"'annotation'",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"os",
".",
"makedirs",
"(",
"path",
")",
"path",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"dir",
"=",
"path",
")",
"nas_mode",
"=",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'nasMode'",
",",
"'classic_mode'",
")",
"code_dir",
"=",
"expand_annotations",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'codeDir'",
"]",
",",
"path",
",",
"nas_mode",
"=",
"nas_mode",
")",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'codeDir'",
"]",
"=",
"code_dir",
"search_space",
"=",
"generate_search_space",
"(",
"code_dir",
")",
"experiment_config",
"[",
"'searchSpace'",
"]",
"=",
"search_space",
"assert",
"search_space",
",",
"ERROR_INFO",
"%",
"'Generated search space is empty'",
"elif",
"config_version",
"==",
"1",
":",
"if",
"experiment_config",
".",
"get",
"(",
"'searchSpacePath'",
")",
":",
"search_space",
"=",
"get_json_content",
"(",
"experiment_config",
".",
"get",
"(",
"'searchSpacePath'",
")",
")",
"experiment_config",
"[",
"'searchSpace'",
"]",
"=",
"search_space",
"else",
":",
"experiment_config",
"[",
"'searchSpace'",
"]",
"=",
"''",
"# check rest server",
"running",
",",
"_",
"=",
"check_rest_server",
"(",
"args",
".",
"port",
")",
"if",
"running",
":",
"print_normal",
"(",
"'Successfully started Restful server!'",
")",
"else",
":",
"print_error",
"(",
"'Restful server start failed!'",
")",
"print_log_content",
"(",
"experiment_id",
")",
"try",
":",
"kill_command",
"(",
"rest_process",
".",
"pid",
")",
"except",
"Exception",
":",
"raise",
"Exception",
"(",
"ERROR_INFO",
"%",
"'Rest server stopped!'",
")",
"exit",
"(",
"1",
")",
"if",
"config_version",
"==",
"1",
"and",
"mode",
"!=",
"'view'",
":",
"# set platform configuration",
"set_platform_config",
"(",
"experiment_config",
"[",
"'trainingServicePlatform'",
"]",
",",
"experiment_config",
",",
"args",
".",
"port",
",",
"experiment_id",
",",
"rest_process",
")",
"# start a new experiment",
"print_normal",
"(",
"'Starting experiment...'",
")",
"# set debug configuration",
"if",
"mode",
"!=",
"'view'",
"and",
"experiment_config",
".",
"get",
"(",
"'debug'",
")",
"is",
"None",
":",
"experiment_config",
"[",
"'debug'",
"]",
"=",
"args",
".",
"debug",
"if",
"config_version",
"==",
"1",
":",
"response",
"=",
"set_experiment_v1",
"(",
"experiment_config",
",",
"mode",
",",
"args",
".",
"port",
",",
"experiment_id",
")",
"else",
":",
"response",
"=",
"set_experiment_v2",
"(",
"experiment_config",
",",
"mode",
",",
"args",
".",
"port",
",",
"experiment_id",
")",
"if",
"response",
":",
"if",
"experiment_id",
"is",
"None",
":",
"experiment_id",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
".",
"get",
"(",
"'experiment_id'",
")",
"else",
":",
"print_error",
"(",
"'Start experiment failed!'",
")",
"print_log_content",
"(",
"experiment_id",
")",
"try",
":",
"kill_command",
"(",
"rest_process",
".",
"pid",
")",
"except",
"Exception",
":",
"raise",
"Exception",
"(",
"ERROR_INFO",
"%",
"'Restful server stopped!'",
")",
"exit",
"(",
"1",
")",
"url_prefix_format",
"=",
"''",
"if",
"args",
".",
"url_prefix",
"is",
"None",
"else",
"'/{0}'",
".",
"format",
"(",
"args",
".",
"url_prefix",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'nniManagerIp'",
")",
":",
"web_ui_url_list",
"=",
"[",
"'http://{0}:{1}{2}'",
".",
"format",
"(",
"experiment_config",
"[",
"'nniManagerIp'",
"]",
",",
"str",
"(",
"args",
".",
"port",
")",
",",
"url_prefix_format",
")",
"]",
"else",
":",
"web_ui_url_list",
"=",
"get_local_urls",
"(",
"args",
".",
"port",
",",
"url_prefix_format",
")",
"Experiments",
"(",
")",
".",
"update_experiment",
"(",
"experiment_id",
",",
"'webuiUrl'",
",",
"web_ui_url_list",
")",
"print_normal",
"(",
"EXPERIMENT_SUCCESS_INFO",
"%",
"(",
"experiment_id",
",",
"' '",
".",
"join",
"(",
"web_ui_url_list",
")",
")",
")",
"if",
"mode",
"!=",
"'view'",
"and",
"args",
".",
"foreground",
":",
"try",
":",
"while",
"True",
":",
"log_content",
"=",
"rest_process",
".",
"stdout",
".",
"readline",
"(",
")",
".",
"strip",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"print",
"(",
"log_content",
")",
"except",
"KeyboardInterrupt",
":",
"kill_command",
"(",
"rest_process",
".",
"pid",
")",
"print_normal",
"(",
"'Stopping experiment...'",
")"
] | [
352,
0
] | [
470,
50
] | python | en | ['en', 'en', 'en'] | True |
create_experiment | (args) | start a new experiment | start a new experiment | def create_experiment(args):
'''start a new experiment'''
experiment_id = ''.join(random.sample(string.ascii_letters + string.digits, 8))
config_path = os.path.abspath(args.config)
if not os.path.exists(config_path):
print_error('Please set correct config path!')
exit(1)
config_yml = get_yml_content(config_path)
if 'trainingServicePlatform' in config_yml:
_validate_v1(config_yml, config_path)
platform = config_yml['trainingServicePlatform']
if platform in k8s_training_services:
schema = 1
config_v1 = config_yml
else:
schema = 2
config_v2 = convert.to_v2(config_yml).json()
else:
config_v2 = _validate_v2(config_yml, config_path)
schema = 2
try:
if schema == 1:
launch_experiment(args, config_v1, 'new', experiment_id, 1)
else:
launch_experiment(args, config_v2, 'new', experiment_id, 2)
except Exception as exception:
restServerPid = Experiments().get_all_experiments().get(experiment_id, {}).get('pid')
if restServerPid:
kill_command(restServerPid)
print_error(exception)
exit(1) | [
"def",
"create_experiment",
"(",
"args",
")",
":",
"experiment_id",
"=",
"''",
".",
"join",
"(",
"random",
".",
"sample",
"(",
"string",
".",
"ascii_letters",
"+",
"string",
".",
"digits",
",",
"8",
")",
")",
"config_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"args",
".",
"config",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"config_path",
")",
":",
"print_error",
"(",
"'Please set correct config path!'",
")",
"exit",
"(",
"1",
")",
"config_yml",
"=",
"get_yml_content",
"(",
"config_path",
")",
"if",
"'trainingServicePlatform'",
"in",
"config_yml",
":",
"_validate_v1",
"(",
"config_yml",
",",
"config_path",
")",
"platform",
"=",
"config_yml",
"[",
"'trainingServicePlatform'",
"]",
"if",
"platform",
"in",
"k8s_training_services",
":",
"schema",
"=",
"1",
"config_v1",
"=",
"config_yml",
"else",
":",
"schema",
"=",
"2",
"config_v2",
"=",
"convert",
".",
"to_v2",
"(",
"config_yml",
")",
".",
"json",
"(",
")",
"else",
":",
"config_v2",
"=",
"_validate_v2",
"(",
"config_yml",
",",
"config_path",
")",
"schema",
"=",
"2",
"try",
":",
"if",
"schema",
"==",
"1",
":",
"launch_experiment",
"(",
"args",
",",
"config_v1",
",",
"'new'",
",",
"experiment_id",
",",
"1",
")",
"else",
":",
"launch_experiment",
"(",
"args",
",",
"config_v2",
",",
"'new'",
",",
"experiment_id",
",",
"2",
")",
"except",
"Exception",
"as",
"exception",
":",
"restServerPid",
"=",
"Experiments",
"(",
")",
".",
"get_all_experiments",
"(",
")",
".",
"get",
"(",
"experiment_id",
",",
"{",
"}",
")",
".",
"get",
"(",
"'pid'",
")",
"if",
"restServerPid",
":",
"kill_command",
"(",
"restServerPid",
")",
"print_error",
"(",
"exception",
")",
"exit",
"(",
"1",
")"
] | [
493,
0
] | [
525,
15
] | python | en | ['it', 'lb', 'en'] | False |
manage_stopped_experiment | (args, mode) | view a stopped experiment | view a stopped experiment | def manage_stopped_experiment(args, mode):
'''view a stopped experiment'''
update_experiment()
experiments_config = Experiments()
experiments_dict = experiments_config.get_all_experiments()
experiment_id = None
#find the latest stopped experiment
if not args.id:
print_error('Please set experiment id! \nYou could use \'nnictl {0} id\' to {0} a stopped experiment!\n' \
'You could use \'nnictl experiment list --all\' to show all experiments!'.format(mode))
exit(1)
else:
if experiments_dict.get(args.id) is None:
print_error('Id %s not exist!' % args.id)
exit(1)
if experiments_dict[args.id]['status'] != 'STOPPED':
print_error('Only stopped experiments can be {0}ed!'.format(mode))
exit(1)
experiment_id = args.id
print_normal('{0} experiment {1}...'.format(mode, experiment_id))
experiment_config = Config(experiment_id, experiments_dict[args.id]['logDir']).get_config()
experiments_config.update_experiment(args.id, 'port', args.port)
args.url_prefix = experiments_dict[args.id]['prefixUrl']
assert 'trainingService' in experiment_config or 'trainingServicePlatform' in experiment_config
try:
if 'trainingServicePlatform' in experiment_config:
experiment_config['logDir'] = experiments_dict[args.id]['logDir']
launch_experiment(args, experiment_config, mode, experiment_id, 1)
else:
experiment_config['experimentWorkingDirectory'] = experiments_dict[args.id]['logDir']
launch_experiment(args, experiment_config, mode, experiment_id, 2)
except Exception as exception:
restServerPid = Experiments().get_all_experiments().get(experiment_id, {}).get('pid')
if restServerPid:
kill_command(restServerPid)
print_error(exception)
exit(1) | [
"def",
"manage_stopped_experiment",
"(",
"args",
",",
"mode",
")",
":",
"update_experiment",
"(",
")",
"experiments_config",
"=",
"Experiments",
"(",
")",
"experiments_dict",
"=",
"experiments_config",
".",
"get_all_experiments",
"(",
")",
"experiment_id",
"=",
"None",
"#find the latest stopped experiment",
"if",
"not",
"args",
".",
"id",
":",
"print_error",
"(",
"'Please set experiment id! \\nYou could use \\'nnictl {0} id\\' to {0} a stopped experiment!\\n'",
"'You could use \\'nnictl experiment list --all\\' to show all experiments!'",
".",
"format",
"(",
"mode",
")",
")",
"exit",
"(",
"1",
")",
"else",
":",
"if",
"experiments_dict",
".",
"get",
"(",
"args",
".",
"id",
")",
"is",
"None",
":",
"print_error",
"(",
"'Id %s not exist!'",
"%",
"args",
".",
"id",
")",
"exit",
"(",
"1",
")",
"if",
"experiments_dict",
"[",
"args",
".",
"id",
"]",
"[",
"'status'",
"]",
"!=",
"'STOPPED'",
":",
"print_error",
"(",
"'Only stopped experiments can be {0}ed!'",
".",
"format",
"(",
"mode",
")",
")",
"exit",
"(",
"1",
")",
"experiment_id",
"=",
"args",
".",
"id",
"print_normal",
"(",
"'{0} experiment {1}...'",
".",
"format",
"(",
"mode",
",",
"experiment_id",
")",
")",
"experiment_config",
"=",
"Config",
"(",
"experiment_id",
",",
"experiments_dict",
"[",
"args",
".",
"id",
"]",
"[",
"'logDir'",
"]",
")",
".",
"get_config",
"(",
")",
"experiments_config",
".",
"update_experiment",
"(",
"args",
".",
"id",
",",
"'port'",
",",
"args",
".",
"port",
")",
"args",
".",
"url_prefix",
"=",
"experiments_dict",
"[",
"args",
".",
"id",
"]",
"[",
"'prefixUrl'",
"]",
"assert",
"'trainingService'",
"in",
"experiment_config",
"or",
"'trainingServicePlatform'",
"in",
"experiment_config",
"try",
":",
"if",
"'trainingServicePlatform'",
"in",
"experiment_config",
":",
"experiment_config",
"[",
"'logDir'",
"]",
"=",
"experiments_dict",
"[",
"args",
".",
"id",
"]",
"[",
"'logDir'",
"]",
"launch_experiment",
"(",
"args",
",",
"experiment_config",
",",
"mode",
",",
"experiment_id",
",",
"1",
")",
"else",
":",
"experiment_config",
"[",
"'experimentWorkingDirectory'",
"]",
"=",
"experiments_dict",
"[",
"args",
".",
"id",
"]",
"[",
"'logDir'",
"]",
"launch_experiment",
"(",
"args",
",",
"experiment_config",
",",
"mode",
",",
"experiment_id",
",",
"2",
")",
"except",
"Exception",
"as",
"exception",
":",
"restServerPid",
"=",
"Experiments",
"(",
")",
".",
"get_all_experiments",
"(",
")",
".",
"get",
"(",
"experiment_id",
",",
"{",
"}",
")",
".",
"get",
"(",
"'pid'",
")",
"if",
"restServerPid",
":",
"kill_command",
"(",
"restServerPid",
")",
"print_error",
"(",
"exception",
")",
"exit",
"(",
"1",
")"
] | [
527,
0
] | [
563,
15
] | python | en | ['it', 'en', 'en'] | True |
view_experiment | (args) | view a stopped experiment | view a stopped experiment | def view_experiment(args):
'''view a stopped experiment'''
manage_stopped_experiment(args, 'view') | [
"def",
"view_experiment",
"(",
"args",
")",
":",
"manage_stopped_experiment",
"(",
"args",
",",
"'view'",
")"
] | [
565,
0
] | [
567,
43
] | python | en | ['it', 'en', 'en'] | True |
resume_experiment | (args) | resume an experiment | resume an experiment | def resume_experiment(args):
'''resume an experiment'''
manage_stopped_experiment(args, 'resume') | [
"def",
"resume_experiment",
"(",
"args",
")",
":",
"manage_stopped_experiment",
"(",
"args",
",",
"'resume'",
")"
] | [
569,
0
] | [
571,
45
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) | Set up the Minecraft Server sensor platform. | Set up the Minecraft Server sensor platform. | async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Minecraft Server sensor platform."""
server = hass.data[DOMAIN][config_entry.unique_id]
# Create entities list.
entities = [
MinecraftServerVersionSensor(server),
MinecraftServerProtocolVersionSensor(server),
MinecraftServerLatencyTimeSensor(server),
MinecraftServerPlayersOnlineSensor(server),
MinecraftServerPlayersMaxSensor(server),
]
# Add sensor entities.
async_add_entities(entities, True) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
":",
"ConfigEntry",
",",
"async_add_entities",
")",
"->",
"None",
":",
"server",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"unique_id",
"]",
"# Create entities list.",
"entities",
"=",
"[",
"MinecraftServerVersionSensor",
"(",
"server",
")",
",",
"MinecraftServerProtocolVersionSensor",
"(",
"server",
")",
",",
"MinecraftServerLatencyTimeSensor",
"(",
"server",
")",
",",
"MinecraftServerPlayersOnlineSensor",
"(",
"server",
")",
",",
"MinecraftServerPlayersMaxSensor",
"(",
"server",
")",
",",
"]",
"# Add sensor entities.",
"async_add_entities",
"(",
"entities",
",",
"True",
")"
] | [
28,
0
] | [
44,
38
] | python | en | ['en', 'lv', 'en'] | True |
MinecraftServerSensorEntity.__init__ | (
self,
server: MinecraftServer,
type_name: str,
icon: str = None,
unit: str = None,
device_class: str = None,
) | Initialize sensor base entity. | Initialize sensor base entity. | def __init__(
self,
server: MinecraftServer,
type_name: str,
icon: str = None,
unit: str = None,
device_class: str = None,
) -> None:
"""Initialize sensor base entity."""
super().__init__(server, type_name, icon, device_class)
self._state = None
self._unit = unit | [
"def",
"__init__",
"(",
"self",
",",
"server",
":",
"MinecraftServer",
",",
"type_name",
":",
"str",
",",
"icon",
":",
"str",
"=",
"None",
",",
"unit",
":",
"str",
"=",
"None",
",",
"device_class",
":",
"str",
"=",
"None",
",",
")",
"->",
"None",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"server",
",",
"type_name",
",",
"icon",
",",
"device_class",
")",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_unit",
"=",
"unit"
] | [
50,
4
] | [
61,
25
] | python | es | ['es', 'zu', 'it'] | False |
MinecraftServerSensorEntity.available | (self) | Return sensor availability. | Return sensor availability. | def available(self) -> bool:
"""Return sensor availability."""
return self._server.online | [
"def",
"available",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"self",
".",
"_server",
".",
"online"
] | [
64,
4
] | [
66,
34
] | python | en | ['fr', 'ga', 'en'] | False |
MinecraftServerSensorEntity.state | (self) | Return sensor state. | Return sensor state. | def state(self) -> Any:
"""Return sensor state."""
return self._state | [
"def",
"state",
"(",
"self",
")",
"->",
"Any",
":",
"return",
"self",
".",
"_state"
] | [
69,
4
] | [
71,
26
] | python | en | ['en', 'bs', 'en'] | True |
MinecraftServerSensorEntity.unit_of_measurement | (self) | Return sensor measurement unit. | Return sensor measurement unit. | def unit_of_measurement(self) -> str:
"""Return sensor measurement unit."""
return self._unit | [
"def",
"unit_of_measurement",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_unit"
] | [
74,
4
] | [
76,
25
] | python | en | ['en', 'ca', 'en'] | True |
MinecraftServerVersionSensor.__init__ | (self, server: MinecraftServer) | Initialize version sensor. | Initialize version sensor. | def __init__(self, server: MinecraftServer) -> None:
"""Initialize version sensor."""
super().__init__(
server=server, type_name=NAME_VERSION, icon=ICON_VERSION, unit=UNIT_VERSION
) | [
"def",
"__init__",
"(",
"self",
",",
"server",
":",
"MinecraftServer",
")",
"->",
"None",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"server",
"=",
"server",
",",
"type_name",
"=",
"NAME_VERSION",
",",
"icon",
"=",
"ICON_VERSION",
",",
"unit",
"=",
"UNIT_VERSION",
")"
] | [
82,
4
] | [
86,
9
] | python | en | ['nl', 'ro', 'en'] | False |
MinecraftServerVersionSensor.async_update | (self) | Update version. | Update version. | async def async_update(self) -> None:
"""Update version."""
self._state = self._server.version | [
"async",
"def",
"async_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_server",
".",
"version"
] | [
88,
4
] | [
90,
42
] | python | de | ['nl', 'de', 'en'] | False |
MinecraftServerProtocolVersionSensor.__init__ | (self, server: MinecraftServer) | Initialize protocol version sensor. | Initialize protocol version sensor. | def __init__(self, server: MinecraftServer) -> None:
"""Initialize protocol version sensor."""
super().__init__(
server=server,
type_name=NAME_PROTOCOL_VERSION,
icon=ICON_PROTOCOL_VERSION,
unit=UNIT_PROTOCOL_VERSION,
) | [
"def",
"__init__",
"(",
"self",
",",
"server",
":",
"MinecraftServer",
")",
"->",
"None",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"server",
"=",
"server",
",",
"type_name",
"=",
"NAME_PROTOCOL_VERSION",
",",
"icon",
"=",
"ICON_PROTOCOL_VERSION",
",",
"unit",
"=",
"UNIT_PROTOCOL_VERSION",
",",
")"
] | [
96,
4
] | [
103,
9
] | python | en | ['en', 'ro', 'en'] | True |
MinecraftServerProtocolVersionSensor.async_update | (self) | Update protocol version. | Update protocol version. | async def async_update(self) -> None:
"""Update protocol version."""
self._state = self._server.protocol_version | [
"async",
"def",
"async_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_server",
".",
"protocol_version"
] | [
105,
4
] | [
107,
51
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.