Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
VenstarThermostat.set_hvac_mode | (self, hvac_mode) | Set new target operation mode. | Set new target operation mode. | def set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
self._set_operation_mode(hvac_mode) | [
"def",
"set_hvac_mode",
"(",
"self",
",",
"hvac_mode",
")",
":",
"self",
".",
"_set_operation_mode",
"(",
"hvac_mode",
")"
] | [
324,
4
] | [
326,
43
] | python | en | ['nl', 'en', 'en'] | True |
VenstarThermostat.set_humidity | (self, humidity) | Set new target humidity. | Set new target humidity. | def set_humidity(self, humidity):
"""Set new target humidity."""
success = self._client.set_hum_setpoint(humidity)
if not success:
_LOGGER.error("Failed to change the target humidity level") | [
"def",
"set_humidity",
"(",
"self",
",",
"humidity",
")",
":",
"success",
"=",
"self",
".",
"_client",
".",
"set_hum_setpoint",
"(",
"humidity",
")",
"if",
"not",
"success",
":",
"_LOGGER",
".",
"error",
"(",
"\"Failed to change the target humidity level\"",
")"
] | [
328,
4
] | [
333,
71
] | python | en | ['en', 'en', 'en'] | True |
VenstarThermostat.set_preset_mode | (self, preset_mode) | Set the hold mode. | Set the hold mode. | def set_preset_mode(self, preset_mode):
"""Set the hold mode."""
if preset_mode == PRESET_AWAY:
success = self._client.set_away(self._client.AWAY_AWAY)
elif preset_mode == HOLD_MODE_TEMPERATURE:
success = self._client.set_away(self._client.AWAY_HOME)
success = success and self._client.set_schedule(0)
elif preset_mode == PRESET_NONE:
success = self._client.set_away(self._client.AWAY_HOME)
success = success and self._client.set_schedule(1)
else:
_LOGGER.error("Unknown hold mode: %s", preset_mode)
success = False
if not success:
_LOGGER.error("Failed to change the schedule/hold state") | [
"def",
"set_preset_mode",
"(",
"self",
",",
"preset_mode",
")",
":",
"if",
"preset_mode",
"==",
"PRESET_AWAY",
":",
"success",
"=",
"self",
".",
"_client",
".",
"set_away",
"(",
"self",
".",
"_client",
".",
"AWAY_AWAY",
")",
"elif",
"preset_mode",
"==",
"HOLD_MODE_TEMPERATURE",
":",
"success",
"=",
"self",
".",
"_client",
".",
"set_away",
"(",
"self",
".",
"_client",
".",
"AWAY_HOME",
")",
"success",
"=",
"success",
"and",
"self",
".",
"_client",
".",
"set_schedule",
"(",
"0",
")",
"elif",
"preset_mode",
"==",
"PRESET_NONE",
":",
"success",
"=",
"self",
".",
"_client",
".",
"set_away",
"(",
"self",
".",
"_client",
".",
"AWAY_HOME",
")",
"success",
"=",
"success",
"and",
"self",
".",
"_client",
".",
"set_schedule",
"(",
"1",
")",
"else",
":",
"_LOGGER",
".",
"error",
"(",
"\"Unknown hold mode: %s\"",
",",
"preset_mode",
")",
"success",
"=",
"False",
"if",
"not",
"success",
":",
"_LOGGER",
".",
"error",
"(",
"\"Failed to change the schedule/hold state\"",
")"
] | [
335,
4
] | [
350,
69
] | python | en | ['en', 'en', 'en'] | True |
load_dsc_snow_output_annual | (catchment, output_dem, hydro_year_to_take, dsc_snow_output_folder, dsc_snow_dem_folder, run_opt,origin='bottomleft') |
load output from dsc_snow model previously run from linux VM
:param catchment: string giving catchment area to run model on
:param output_dem: string identifying the grid to run model on
:param hydro_year_to_take: integer specifying the hydrological year to run model over. 2001 = 1/4/2000 to 31/3/2001
:return: st_swe, st_melt, st_acc, out_dt. daily grids of SWE at day's end, total melt and accumulation over the previous day, and datetimes of ouput
|
load output from dsc_snow model previously run from linux VM
:param catchment: string giving catchment area to run model on
:param output_dem: string identifying the grid to run model on
:param hydro_year_to_take: integer specifying the hydrological year to run model over. 2001 = 1/4/2000 to 31/3/2001
:return: st_swe, st_melt, st_acc, out_dt. daily grids of SWE at day's end, total melt and accumulation over the previous day, and datetimes of ouput
| def load_dsc_snow_output_annual(catchment, output_dem, hydro_year_to_take, dsc_snow_output_folder, dsc_snow_dem_folder, run_opt,origin='bottomleft'):
"""
load output from dsc_snow model previously run from linux VM
:param catchment: string giving catchment area to run model on
:param output_dem: string identifying the grid to run model on
:param hydro_year_to_take: integer specifying the hydrological year to run model over. 2001 = 1/4/2000 to 31/3/2001
:return: st_swe, st_melt, st_acc, out_dt. daily grids of SWE at day's end, total melt and accumulation over the previous day, and datetimes of ouput
"""
data_id = '{}_{}'.format(catchment, output_dem)
dsc_snow_output = nc.Dataset(dsc_snow_output_folder + '/{}_{}_{}.nc'.format(data_id, hydro_year_to_take, run_opt), 'r')
out_dt = nc.num2date(dsc_snow_output.variables['time'][:], dsc_snow_output.variables['time'].units)
st_swe = dsc_snow_output.variables['snow_water_equivalent'][:]
st_melt_total = dsc_snow_output.variables['ablation_total'][:]
st_acc_total = dsc_snow_output.variables['accumulation_total'][:]
if origin == 'topleft':
st_swe = np.flip(st_swe,axis=1)
st_melt_total = np.flip(st_melt_total, axis=1)
st_acc_total = np.flip(st_acc_total, axis=1)
# convert to daily sums
st_melt = np.concatenate((st_melt_total[:1, :], np.diff(st_melt_total, axis=0)))
st_acc = np.concatenate((st_melt_total[:1, :], np.diff(st_acc_total, axis=0)))
if origin == 'topleft':
topo_file = nc.Dataset(dsc_snow_dem_folder + '/{}_topo_no_ice_origintopleft.nc'.format(data_id), 'r')
mask = np.flipud(topo_file.variables['catchment'][:].astype('int'))
else:
topo_file = nc.Dataset(dsc_snow_dem_folder + '/{}_topo_no_ice.nc'.format(data_id), 'r')
mask = topo_file.variables['catchment'][:].astype('int')
mask = mask != 0 # convert to boolean
# mask out values outside of catchment
st_swe[:, mask == False] = np.nan
st_melt[:, mask == False] = np.nan
st_acc[:, mask == False] = np.nan
return st_swe * 1e3, st_melt * 1e3, st_acc * 1e3, out_dt, mask | [
"def",
"load_dsc_snow_output_annual",
"(",
"catchment",
",",
"output_dem",
",",
"hydro_year_to_take",
",",
"dsc_snow_output_folder",
",",
"dsc_snow_dem_folder",
",",
"run_opt",
",",
"origin",
"=",
"'bottomleft'",
")",
":",
"data_id",
"=",
"'{}_{}'",
".",
"format",
"(",
"catchment",
",",
"output_dem",
")",
"dsc_snow_output",
"=",
"nc",
".",
"Dataset",
"(",
"dsc_snow_output_folder",
"+",
"'/{}_{}_{}.nc'",
".",
"format",
"(",
"data_id",
",",
"hydro_year_to_take",
",",
"run_opt",
")",
",",
"'r'",
")",
"out_dt",
"=",
"nc",
".",
"num2date",
"(",
"dsc_snow_output",
".",
"variables",
"[",
"'time'",
"]",
"[",
":",
"]",
",",
"dsc_snow_output",
".",
"variables",
"[",
"'time'",
"]",
".",
"units",
")",
"st_swe",
"=",
"dsc_snow_output",
".",
"variables",
"[",
"'snow_water_equivalent'",
"]",
"[",
":",
"]",
"st_melt_total",
"=",
"dsc_snow_output",
".",
"variables",
"[",
"'ablation_total'",
"]",
"[",
":",
"]",
"st_acc_total",
"=",
"dsc_snow_output",
".",
"variables",
"[",
"'accumulation_total'",
"]",
"[",
":",
"]",
"if",
"origin",
"==",
"'topleft'",
":",
"st_swe",
"=",
"np",
".",
"flip",
"(",
"st_swe",
",",
"axis",
"=",
"1",
")",
"st_melt_total",
"=",
"np",
".",
"flip",
"(",
"st_melt_total",
",",
"axis",
"=",
"1",
")",
"st_acc_total",
"=",
"np",
".",
"flip",
"(",
"st_acc_total",
",",
"axis",
"=",
"1",
")",
"# convert to daily sums",
"st_melt",
"=",
"np",
".",
"concatenate",
"(",
"(",
"st_melt_total",
"[",
":",
"1",
",",
":",
"]",
",",
"np",
".",
"diff",
"(",
"st_melt_total",
",",
"axis",
"=",
"0",
")",
")",
")",
"st_acc",
"=",
"np",
".",
"concatenate",
"(",
"(",
"st_melt_total",
"[",
":",
"1",
",",
":",
"]",
",",
"np",
".",
"diff",
"(",
"st_acc_total",
",",
"axis",
"=",
"0",
")",
")",
")",
"if",
"origin",
"==",
"'topleft'",
":",
"topo_file",
"=",
"nc",
".",
"Dataset",
"(",
"dsc_snow_dem_folder",
"+",
"'/{}_topo_no_ice_origintopleft.nc'",
".",
"format",
"(",
"data_id",
")",
",",
"'r'",
")",
"mask",
"=",
"np",
".",
"flipud",
"(",
"topo_file",
".",
"variables",
"[",
"'catchment'",
"]",
"[",
":",
"]",
".",
"astype",
"(",
"'int'",
")",
")",
"else",
":",
"topo_file",
"=",
"nc",
".",
"Dataset",
"(",
"dsc_snow_dem_folder",
"+",
"'/{}_topo_no_ice.nc'",
".",
"format",
"(",
"data_id",
")",
",",
"'r'",
")",
"mask",
"=",
"topo_file",
".",
"variables",
"[",
"'catchment'",
"]",
"[",
":",
"]",
".",
"astype",
"(",
"'int'",
")",
"mask",
"=",
"mask",
"!=",
"0",
"# convert to boolean",
"# mask out values outside of catchment",
"st_swe",
"[",
":",
",",
"mask",
"==",
"False",
"]",
"=",
"np",
".",
"nan",
"st_melt",
"[",
":",
",",
"mask",
"==",
"False",
"]",
"=",
"np",
".",
"nan",
"st_acc",
"[",
":",
",",
"mask",
"==",
"False",
"]",
"=",
"np",
".",
"nan",
"return",
"st_swe",
"*",
"1e3",
",",
"st_melt",
"*",
"1e3",
",",
"st_acc",
"*",
"1e3",
",",
"out_dt",
",",
"mask"
] | [
20,
0
] | [
60,
66
] | python | en | ['en', 'error', 'th'] | False |
load_subset_modis_annual | (catchment, output_dem, year_to_take, modis_folder, dem_folder, modis_dem, mask_folder, catchment_shp_folder) |
load modis data from file and cut to catchment of interest
:param catchment: string giving catchment area to run model on
:param output_dem: string identifying the grid to run model on
:param year_to_take: integer specifying the hydrological year to run model over. 2001 = 1/4/2000 to 31/3/2001
:return: trimmed_fsca, modis_dt, trimmed_mask. The data, datetimes and catchment mask
|
load modis data from file and cut to catchment of interest
:param catchment: string giving catchment area to run model on
:param output_dem: string identifying the grid to run model on
:param year_to_take: integer specifying the hydrological year to run model over. 2001 = 1/4/2000 to 31/3/2001
:return: trimmed_fsca, modis_dt, trimmed_mask. The data, datetimes and catchment mask
| def load_subset_modis_annual(catchment, output_dem, year_to_take, modis_folder, dem_folder, modis_dem, mask_folder, catchment_shp_folder):
"""
load modis data from file and cut to catchment of interest
:param catchment: string giving catchment area to run model on
:param output_dem: string identifying the grid to run model on
:param year_to_take: integer specifying the hydrological year to run model over. 2001 = 1/4/2000 to 31/3/2001
:return: trimmed_fsca, modis_dt, trimmed_mask. The data, datetimes and catchment mask
"""
# load a file
nc_file = nc.Dataset(modis_folder + '/DSC_MOD10A1_{}_v0_nosparse_interp001.nc'.format(year_to_take))
ndsi = nc_file.variables['NDSI_Snow_Cover_Cloudfree'][:] # .astype('float32') # nsdi in %
# trim to only the catchment desired
if mask_folder is not None:
mask, trimmed_mask = load_mask_modis(catchment, None, mask_folder, None, modis_dem)
else: # if no catchment specified, just mask to the valid data points.
mask = np.ones(ndsi.shape[1:])
trimmed_mask = mask
# trimmed_fsca = trim_data_bounds(mask, lat_array, lon_array, fsca[183].copy(), y_centres, x_centres)
trimmed_ndsi = trim_data_to_mask(ndsi, mask)
trimmed_ndsi = trimmed_ndsi.astype(np.float32, copy=False)
trimmed_fsca = -1 + 1.45 * trimmed_ndsi # convert to snow cover fraction in % (as per Modis collection 5)
trimmed_fsca[trimmed_ndsi > 100] = np.nan # set all points with inland water or ocean(237 or 239) to nan
trimmed_fsca[trimmed_fsca > 100] = 100 # limit fsca to 100%
trimmed_fsca[trimmed_fsca < 0] = 0 # limit fsca to 0
# read date and convert into hydrological year
modis_dt = nc.num2date(nc_file.variables['time'][:], nc_file.variables['time'].units)
# mask out values outside of catchment
trimmed_fsca[:, trimmed_mask == 0] = np.nan
return trimmed_fsca, modis_dt, trimmed_mask | [
"def",
"load_subset_modis_annual",
"(",
"catchment",
",",
"output_dem",
",",
"year_to_take",
",",
"modis_folder",
",",
"dem_folder",
",",
"modis_dem",
",",
"mask_folder",
",",
"catchment_shp_folder",
")",
":",
"# load a file",
"nc_file",
"=",
"nc",
".",
"Dataset",
"(",
"modis_folder",
"+",
"'/DSC_MOD10A1_{}_v0_nosparse_interp001.nc'",
".",
"format",
"(",
"year_to_take",
")",
")",
"ndsi",
"=",
"nc_file",
".",
"variables",
"[",
"'NDSI_Snow_Cover_Cloudfree'",
"]",
"[",
":",
"]",
"# .astype('float32') # nsdi in %",
"# trim to only the catchment desired",
"if",
"mask_folder",
"is",
"not",
"None",
":",
"mask",
",",
"trimmed_mask",
"=",
"load_mask_modis",
"(",
"catchment",
",",
"None",
",",
"mask_folder",
",",
"None",
",",
"modis_dem",
")",
"else",
":",
"# if no catchment specified, just mask to the valid data points.",
"mask",
"=",
"np",
".",
"ones",
"(",
"ndsi",
".",
"shape",
"[",
"1",
":",
"]",
")",
"trimmed_mask",
"=",
"mask",
"# trimmed_fsca = trim_data_bounds(mask, lat_array, lon_array, fsca[183].copy(), y_centres, x_centres)",
"trimmed_ndsi",
"=",
"trim_data_to_mask",
"(",
"ndsi",
",",
"mask",
")",
"trimmed_ndsi",
"=",
"trimmed_ndsi",
".",
"astype",
"(",
"np",
".",
"float32",
",",
"copy",
"=",
"False",
")",
"trimmed_fsca",
"=",
"-",
"1",
"+",
"1.45",
"*",
"trimmed_ndsi",
"# convert to snow cover fraction in % (as per Modis collection 5)",
"trimmed_fsca",
"[",
"trimmed_ndsi",
">",
"100",
"]",
"=",
"np",
".",
"nan",
"# set all points with inland water or ocean(237 or 239) to nan",
"trimmed_fsca",
"[",
"trimmed_fsca",
">",
"100",
"]",
"=",
"100",
"# limit fsca to 100%",
"trimmed_fsca",
"[",
"trimmed_fsca",
"<",
"0",
"]",
"=",
"0",
"# limit fsca to 0",
"# read date and convert into hydrological year",
"modis_dt",
"=",
"nc",
".",
"num2date",
"(",
"nc_file",
".",
"variables",
"[",
"'time'",
"]",
"[",
":",
"]",
",",
"nc_file",
".",
"variables",
"[",
"'time'",
"]",
".",
"units",
")",
"# mask out values outside of catchment",
"trimmed_fsca",
"[",
":",
",",
"trimmed_mask",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"return",
"trimmed_fsca",
",",
"modis_dt",
",",
"trimmed_mask"
] | [
63,
0
] | [
94,
47
] | python | en | ['en', 'error', 'th'] | False |
CustomerTuner.generate_parameters | (self, parameter_id, **kwargs) | Returns a set of trial graph config, as a serializable object.
parameter_id : int
| Returns a set of trial graph config, as a serializable object.
parameter_id : int
| def generate_parameters(self, parameter_id, **kwargs):
"""Returns a set of trial graph config, as a serializable object.
parameter_id : int
"""
if len(self.population) <= 0:
logger.debug("the len of poplution lower than zero.")
raise Exception('The population is empty')
pos = -1
for i in range(len(self.population)):
if self.population[i].result == None:
pos = i
break
if pos != -1:
indiv = copy.deepcopy(self.population[pos])
self.population.pop(pos)
temp = json.loads(graph_dumps(indiv.config))
else:
random.shuffle(self.population)
if self.population[0].result < self.population[1].result:
self.population[0] = self.population[1]
indiv = copy.deepcopy(self.population[0])
self.population.pop(1)
indiv.mutation()
graph = indiv.config
temp = json.loads(graph_dumps(graph))
logger.debug('generate_parameter return value is:')
logger.debug(temp)
return temp | [
"def",
"generate_parameters",
"(",
"self",
",",
"parameter_id",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"len",
"(",
"self",
".",
"population",
")",
"<=",
"0",
":",
"logger",
".",
"debug",
"(",
"\"the len of poplution lower than zero.\"",
")",
"raise",
"Exception",
"(",
"'The population is empty'",
")",
"pos",
"=",
"-",
"1",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"population",
")",
")",
":",
"if",
"self",
".",
"population",
"[",
"i",
"]",
".",
"result",
"==",
"None",
":",
"pos",
"=",
"i",
"break",
"if",
"pos",
"!=",
"-",
"1",
":",
"indiv",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"population",
"[",
"pos",
"]",
")",
"self",
".",
"population",
".",
"pop",
"(",
"pos",
")",
"temp",
"=",
"json",
".",
"loads",
"(",
"graph_dumps",
"(",
"indiv",
".",
"config",
")",
")",
"else",
":",
"random",
".",
"shuffle",
"(",
"self",
".",
"population",
")",
"if",
"self",
".",
"population",
"[",
"0",
"]",
".",
"result",
"<",
"self",
".",
"population",
"[",
"1",
"]",
".",
"result",
":",
"self",
".",
"population",
"[",
"0",
"]",
"=",
"self",
".",
"population",
"[",
"1",
"]",
"indiv",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"population",
"[",
"0",
"]",
")",
"self",
".",
"population",
".",
"pop",
"(",
"1",
")",
"indiv",
".",
"mutation",
"(",
")",
"graph",
"=",
"indiv",
".",
"config",
"temp",
"=",
"json",
".",
"loads",
"(",
"graph_dumps",
"(",
"graph",
")",
")",
"logger",
".",
"debug",
"(",
"'generate_parameter return value is:'",
")",
"logger",
".",
"debug",
"(",
"temp",
")",
"return",
"temp"
] | [
81,
4
] | [
108,
19
] | python | en | ['en', 'en', 'en'] | True |
CustomerTuner.receive_trial_result | (self, parameter_id, parameters, value, **kwargs) |
Record an observation of the objective function
parameter_id : int
parameters : dict of parameters
value: final metrics of the trial, including reward
|
Record an observation of the objective function
parameter_id : int
parameters : dict of parameters
value: final metrics of the trial, including reward
| def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
'''
Record an observation of the objective function
parameter_id : int
parameters : dict of parameters
value: final metrics of the trial, including reward
'''
reward = extract_scalar_reward(value)
if self.optimize_mode is OptimizeMode.Minimize:
reward = -reward
logger.debug('receive trial result is:\n')
logger.debug(str(parameters))
logger.debug(str(reward))
indiv = Individual(graph_loads(parameters), result=reward)
self.population.append(indiv)
return | [
"def",
"receive_trial_result",
"(",
"self",
",",
"parameter_id",
",",
"parameters",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"reward",
"=",
"extract_scalar_reward",
"(",
"value",
")",
"if",
"self",
".",
"optimize_mode",
"is",
"OptimizeMode",
".",
"Minimize",
":",
"reward",
"=",
"-",
"reward",
"logger",
".",
"debug",
"(",
"'receive trial result is:\\n'",
")",
"logger",
".",
"debug",
"(",
"str",
"(",
"parameters",
")",
")",
"logger",
".",
"debug",
"(",
"str",
"(",
"reward",
")",
")",
"indiv",
"=",
"Individual",
"(",
"graph_loads",
"(",
"parameters",
")",
",",
"result",
"=",
"reward",
")",
"self",
".",
"population",
".",
"append",
"(",
"indiv",
")",
"return"
] | [
111,
4
] | [
128,
14
] | python | en | ['en', 'error', 'th'] | False |
test_setup | (hass) | Test the general setup of the integration. | Test the general setup of the integration. | async def test_setup(hass):
"""Test the general setup of the integration."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(38.0, -3.0),
locality="Locality 1",
attribution="Attribution 1",
time=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
magnitude=5.7,
mmi=5,
depth=10.5,
quality="best",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345", "Title 2", 20.5, (38.1, -3.1), magnitude=4.6
)
mock_entry_3 = _generate_mock_feed_entry(
"3456", "Title 3", 25.5, (38.2, -3.2), locality="Locality 3"
)
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (38.3, -3.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_2, mock_entry_3]
assert await async_setup_component(hass, geonetnz_quakes.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
# 3 geolocation and 1 sensor entities
assert len(all_states) == 4
entity_registry = await async_get_registry(hass)
assert len(entity_registry.entities) == 4
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_LOCALITY: "Locality 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_TIME: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_MAGNITUDE: 5.7,
ATTR_DEPTH: 10.5,
ATTR_MMI: 5,
ATTR_QUALITY: "best",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 15.5
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: 38.1,
ATTR_LONGITUDE: -3.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_MAGNITUDE: 4.6,
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 20.5
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: 38.2,
ATTR_LONGITUDE: -3.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_LOCALITY: "Locality 3",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 25.5
# Simulate an update - two existing, one new entry, one outdated entry
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_4, mock_entry_3]
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, removes all entities
mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(entity_registry.entities) == 1 | [
"async",
"def",
"test_setup",
"(",
"hass",
")",
":",
"# Set up some mock feed entries for this test.",
"mock_entry_1",
"=",
"_generate_mock_feed_entry",
"(",
"\"1234\"",
",",
"\"Title 1\"",
",",
"15.5",
",",
"(",
"38.0",
",",
"-",
"3.0",
")",
",",
"locality",
"=",
"\"Locality 1\"",
",",
"attribution",
"=",
"\"Attribution 1\"",
",",
"time",
"=",
"datetime",
".",
"datetime",
"(",
"2018",
",",
"9",
",",
"22",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"magnitude",
"=",
"5.7",
",",
"mmi",
"=",
"5",
",",
"depth",
"=",
"10.5",
",",
"quality",
"=",
"\"best\"",
",",
")",
"mock_entry_2",
"=",
"_generate_mock_feed_entry",
"(",
"\"2345\"",
",",
"\"Title 2\"",
",",
"20.5",
",",
"(",
"38.1",
",",
"-",
"3.1",
")",
",",
"magnitude",
"=",
"4.6",
")",
"mock_entry_3",
"=",
"_generate_mock_feed_entry",
"(",
"\"3456\"",
",",
"\"Title 3\"",
",",
"25.5",
",",
"(",
"38.2",
",",
"-",
"3.2",
")",
",",
"locality",
"=",
"\"Locality 3\"",
")",
"mock_entry_4",
"=",
"_generate_mock_feed_entry",
"(",
"\"4567\"",
",",
"\"Title 4\"",
",",
"12.5",
",",
"(",
"38.3",
",",
"-",
"3.3",
")",
")",
"# Patching 'utcnow' to gain more control over the timed update.",
"utcnow",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"utcnow",
")",
",",
"patch",
"(",
"\"aio_geojson_client.feed.GeoJsonFeed.update\"",
")",
"as",
"mock_feed_update",
":",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
",",
"mock_entry_2",
",",
"mock_entry_3",
"]",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"geonetnz_quakes",
".",
"DOMAIN",
",",
"CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Artificially trigger update and collect events.",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_HOMEASSISTANT_START",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"# 3 geolocation and 1 sensor entities",
"assert",
"len",
"(",
"all_states",
")",
"==",
"4",
"entity_registry",
"=",
"await",
"async_get_registry",
"(",
"hass",
")",
"assert",
"len",
"(",
"entity_registry",
".",
"entities",
")",
"==",
"4",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.title_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Title 1\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"1234\"",
",",
"ATTR_LATITUDE",
":",
"38.0",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.0",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Title 1\"",
",",
"ATTR_LOCALITY",
":",
"\"Locality 1\"",
",",
"ATTR_ATTRIBUTION",
":",
"\"Attribution 1\"",
",",
"ATTR_TIME",
":",
"datetime",
".",
"datetime",
"(",
"2018",
",",
"9",
",",
"22",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"ATTR_MAGNITUDE",
":",
"5.7",
",",
"ATTR_DEPTH",
":",
"10.5",
",",
"ATTR_MMI",
":",
"5",
",",
"ATTR_QUALITY",
":",
"\"best\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"geonetnz_quakes\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"15.5",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.title_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Title 2\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"2345\"",
",",
"ATTR_LATITUDE",
":",
"38.1",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.1",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Title 2\"",
",",
"ATTR_MAGNITUDE",
":",
"4.6",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"geonetnz_quakes\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"20.5",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.title_3\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Title 3\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"3456\"",
",",
"ATTR_LATITUDE",
":",
"38.2",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.2",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Title 3\"",
",",
"ATTR_LOCALITY",
":",
"\"Locality 3\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"geonetnz_quakes\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"25.5",
"# Simulate an update - two existing, one new entry, one outdated entry",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
",",
"mock_entry_4",
",",
"mock_entry_3",
"]",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"DEFAULT_SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"4",
"# Simulate an update - empty data, but successful update,",
"# so no changes to entities.",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK_NO_DATA\"",
",",
"None",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"2",
"*",
"DEFAULT_SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"4",
"# Simulate an update - empty data, removes all entities",
"mock_feed_update",
".",
"return_value",
"=",
"\"ERROR\"",
",",
"None",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"3",
"*",
"DEFAULT_SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"1",
"assert",
"len",
"(",
"entity_registry",
".",
"entities",
")",
"==",
"1"
] | [
38,
0
] | [
157,
49
] | python | en | ['en', 'en', 'en'] | True |
test_setup_imperial | (hass) | Test the setup of the integration using imperial unit system. | Test the setup of the integration using imperial unit system. | async def test_setup_imperial(hass):
"""Test the setup of the integration using imperial unit system."""
hass.config.units = IMPERIAL_SYSTEM
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 15.5, (38.0, -3.0))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update, patch(
"aio_geojson_client.feed.GeoJsonFeed.last_timestamp", create=True
):
mock_feed_update.return_value = "OK", [mock_entry_1]
assert await async_setup_component(hass, geonetnz_quakes.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 2
# Test conversion of 200 miles to kilometers.
feeds = hass.data[DOMAIN][FEED]
assert feeds is not None
assert len(feeds) == 1
manager = list(feeds.values())[0]
# Ensure that the filter value in km is correctly set.
assert manager._feed_manager._feed._filter_radius == 321.8688
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_UNIT_OF_MEASUREMENT: "mi",
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
# 15.5km (as defined in mock entry) has been converted to 9.6mi.
assert float(state.state) == 9.6 | [
"async",
"def",
"test_setup_imperial",
"(",
"hass",
")",
":",
"hass",
".",
"config",
".",
"units",
"=",
"IMPERIAL_SYSTEM",
"# Set up some mock feed entries for this test.",
"mock_entry_1",
"=",
"_generate_mock_feed_entry",
"(",
"\"1234\"",
",",
"\"Title 1\"",
",",
"15.5",
",",
"(",
"38.0",
",",
"-",
"3.0",
")",
")",
"# Patching 'utcnow' to gain more control over the timed update.",
"utcnow",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"utcnow",
")",
",",
"patch",
"(",
"\"aio_geojson_client.feed.GeoJsonFeed.update\"",
")",
"as",
"mock_feed_update",
",",
"patch",
"(",
"\"aio_geojson_client.feed.GeoJsonFeed.last_timestamp\"",
",",
"create",
"=",
"True",
")",
":",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
"]",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"geonetnz_quakes",
".",
"DOMAIN",
",",
"CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Artificially trigger update and collect events.",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_HOMEASSISTANT_START",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"2",
"# Test conversion of 200 miles to kilometers.",
"feeds",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"FEED",
"]",
"assert",
"feeds",
"is",
"not",
"None",
"assert",
"len",
"(",
"feeds",
")",
"==",
"1",
"manager",
"=",
"list",
"(",
"feeds",
".",
"values",
"(",
")",
")",
"[",
"0",
"]",
"# Ensure that the filter value in km is correctly set.",
"assert",
"manager",
".",
"_feed_manager",
".",
"_feed",
".",
"_filter_radius",
"==",
"321.8688",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.title_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Title 1\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"1234\"",
",",
"ATTR_LATITUDE",
":",
"38.0",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.0",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Title 1\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"\"mi\"",
",",
"ATTR_SOURCE",
":",
"\"geonetnz_quakes\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"# 15.5km (as defined in mock entry) has been converted to 9.6mi.",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"9.6"
] | [
160,
0
] | [
204,
40
] | python | en | ['en', 'en', 'en'] | True |
register_oauth2_implementations | (
hass: HomeAssistant, client_id: str, client_secret: str
) | Register Toon OAuth2 implementations. | Register Toon OAuth2 implementations. | def register_oauth2_implementations(
hass: HomeAssistant, client_id: str, client_secret: str
) -> None:
"""Register Toon OAuth2 implementations."""
config_flow.ToonFlowHandler.async_register_implementation(
hass,
ToonLocalOAuth2Implementation(
hass,
client_id=client_id,
client_secret=client_secret,
name="Eneco Toon",
tenant_id="eneco",
issuer="identity.toon.eu",
),
)
config_flow.ToonFlowHandler.async_register_implementation(
hass,
ToonLocalOAuth2Implementation(
hass,
client_id=client_id,
client_secret=client_secret,
name="Engie Electrabel Boxx",
tenant_id="electrabel",
),
)
config_flow.ToonFlowHandler.async_register_implementation(
hass,
ToonLocalOAuth2Implementation(
hass,
client_id=client_id,
client_secret=client_secret,
name="Viesgo",
tenant_id="viesgo",
),
) | [
"def",
"register_oauth2_implementations",
"(",
"hass",
":",
"HomeAssistant",
",",
"client_id",
":",
"str",
",",
"client_secret",
":",
"str",
")",
"->",
"None",
":",
"config_flow",
".",
"ToonFlowHandler",
".",
"async_register_implementation",
"(",
"hass",
",",
"ToonLocalOAuth2Implementation",
"(",
"hass",
",",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"name",
"=",
"\"Eneco Toon\"",
",",
"tenant_id",
"=",
"\"eneco\"",
",",
"issuer",
"=",
"\"identity.toon.eu\"",
",",
")",
",",
")",
"config_flow",
".",
"ToonFlowHandler",
".",
"async_register_implementation",
"(",
"hass",
",",
"ToonLocalOAuth2Implementation",
"(",
"hass",
",",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"name",
"=",
"\"Engie Electrabel Boxx\"",
",",
"tenant_id",
"=",
"\"electrabel\"",
",",
")",
",",
")",
"config_flow",
".",
"ToonFlowHandler",
".",
"async_register_implementation",
"(",
"hass",
",",
"ToonLocalOAuth2Implementation",
"(",
"hass",
",",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"name",
"=",
"\"Viesgo\"",
",",
"tenant_id",
"=",
"\"viesgo\"",
",",
")",
",",
")"
] | [
10,
0
] | [
44,
5
] | python | en | ['en', 'en', 'en'] | True |
ToonLocalOAuth2Implementation.__init__ | (
self,
hass: HomeAssistant,
client_id: str,
client_secret: str,
name: str,
tenant_id: str,
issuer: Optional[str] = None,
) | Local Toon Oauth Implementation. | Local Toon Oauth Implementation. | def __init__(
self,
hass: HomeAssistant,
client_id: str,
client_secret: str,
name: str,
tenant_id: str,
issuer: Optional[str] = None,
):
"""Local Toon Oauth Implementation."""
self._name = name
self.tenant_id = tenant_id
self.issuer = issuer
super().__init__(
hass=hass,
domain=tenant_id,
client_id=client_id,
client_secret=client_secret,
authorize_url="https://api.toon.eu/authorize",
token_url="https://api.toon.eu/token",
) | [
"def",
"__init__",
"(",
"self",
",",
"hass",
":",
"HomeAssistant",
",",
"client_id",
":",
"str",
",",
"client_secret",
":",
"str",
",",
"name",
":",
"str",
",",
"tenant_id",
":",
"str",
",",
"issuer",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
")",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"tenant_id",
"=",
"tenant_id",
"self",
".",
"issuer",
"=",
"issuer",
"super",
"(",
")",
".",
"__init__",
"(",
"hass",
"=",
"hass",
",",
"domain",
"=",
"tenant_id",
",",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"authorize_url",
"=",
"\"https://api.toon.eu/authorize\"",
",",
"token_url",
"=",
"\"https://api.toon.eu/token\"",
",",
")"
] | [
50,
4
] | [
71,
9
] | python | en | ['en', 'en', 'en'] | True |
ToonLocalOAuth2Implementation.name | (self) | Name of the implementation. | Name of the implementation. | def name(self) -> str:
"""Name of the implementation."""
return f"{self._name} via Configuration.yaml" | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"f\"{self._name} via Configuration.yaml\""
] | [
74,
4
] | [
76,
53
] | python | en | ['en', 'en', 'en'] | True |
ToonLocalOAuth2Implementation.extra_authorize_data | (self) | Extra data that needs to be appended to the authorize url. | Extra data that needs to be appended to the authorize url. | def extra_authorize_data(self) -> dict:
"""Extra data that needs to be appended to the authorize url."""
data = {"tenant_id": self.tenant_id}
if self.issuer is not None:
data["issuer"] = self.issuer
return data | [
"def",
"extra_authorize_data",
"(",
"self",
")",
"->",
"dict",
":",
"data",
"=",
"{",
"\"tenant_id\"",
":",
"self",
".",
"tenant_id",
"}",
"if",
"self",
".",
"issuer",
"is",
"not",
"None",
":",
"data",
"[",
"\"issuer\"",
"]",
"=",
"self",
".",
"issuer",
"return",
"data"
] | [
79,
4
] | [
86,
19
] | python | en | ['en', 'en', 'en'] | True |
ToonLocalOAuth2Implementation.async_resolve_external_data | (self, external_data: Any) | Initialize local Toon auth implementation. | Initialize local Toon auth implementation. | async def async_resolve_external_data(self, external_data: Any) -> dict:
"""Initialize local Toon auth implementation."""
data = {
"grant_type": "authorization_code",
"code": external_data,
"redirect_uri": self.redirect_uri,
"tenant_id": self.tenant_id,
}
if self.issuer is not None:
data["issuer"] = self.issuer
return await self._token_request(data) | [
"async",
"def",
"async_resolve_external_data",
"(",
"self",
",",
"external_data",
":",
"Any",
")",
"->",
"dict",
":",
"data",
"=",
"{",
"\"grant_type\"",
":",
"\"authorization_code\"",
",",
"\"code\"",
":",
"external_data",
",",
"\"redirect_uri\"",
":",
"self",
".",
"redirect_uri",
",",
"\"tenant_id\"",
":",
"self",
".",
"tenant_id",
",",
"}",
"if",
"self",
".",
"issuer",
"is",
"not",
"None",
":",
"data",
"[",
"\"issuer\"",
"]",
"=",
"self",
".",
"issuer",
"return",
"await",
"self",
".",
"_token_request",
"(",
"data",
")"
] | [
88,
4
] | [
100,
46
] | python | en | ['en', 'en', 'en'] | True |
ToonLocalOAuth2Implementation._async_refresh_token | (self, token: dict) | Refresh tokens. | Refresh tokens. | async def _async_refresh_token(self, token: dict) -> dict:
"""Refresh tokens."""
data = {
"grant_type": "refresh_token",
"client_id": self.client_id,
"refresh_token": token["refresh_token"],
"tenant_id": self.tenant_id,
}
new_token = await self._token_request(data)
return {**token, **new_token} | [
"async",
"def",
"_async_refresh_token",
"(",
"self",
",",
"token",
":",
"dict",
")",
"->",
"dict",
":",
"data",
"=",
"{",
"\"grant_type\"",
":",
"\"refresh_token\"",
",",
"\"client_id\"",
":",
"self",
".",
"client_id",
",",
"\"refresh_token\"",
":",
"token",
"[",
"\"refresh_token\"",
"]",
",",
"\"tenant_id\"",
":",
"self",
".",
"tenant_id",
",",
"}",
"new_token",
"=",
"await",
"self",
".",
"_token_request",
"(",
"data",
")",
"return",
"{",
"*",
"*",
"token",
",",
"*",
"*",
"new_token",
"}"
] | [
102,
4
] | [
112,
37
] | python | en | ['en', 'en', 'en'] | False |
ToonLocalOAuth2Implementation._token_request | (self, data: dict) | Make a token request. | Make a token request. | async def _token_request(self, data: dict) -> dict:
"""Make a token request."""
session = async_get_clientsession(self.hass)
headers = {}
data["client_id"] = self.client_id
data["tenant_id"] = self.tenant_id
if self.client_secret is not None:
data["client_secret"] = self.client_secret
if self.issuer is not None:
data["issuer"] = self.issuer
headers["issuer"] = self.issuer
resp = await session.post(self.token_url, data=data, headers=headers)
resp.raise_for_status()
resp_json = cast(dict, await resp.json())
# The Toon API returns "expires_in" as a string for some tenants.
# This is not according to OAuth specifications.
resp_json["expires_in"] = float(resp_json["expires_in"])
return resp_json | [
"async",
"def",
"_token_request",
"(",
"self",
",",
"data",
":",
"dict",
")",
"->",
"dict",
":",
"session",
"=",
"async_get_clientsession",
"(",
"self",
".",
"hass",
")",
"headers",
"=",
"{",
"}",
"data",
"[",
"\"client_id\"",
"]",
"=",
"self",
".",
"client_id",
"data",
"[",
"\"tenant_id\"",
"]",
"=",
"self",
".",
"tenant_id",
"if",
"self",
".",
"client_secret",
"is",
"not",
"None",
":",
"data",
"[",
"\"client_secret\"",
"]",
"=",
"self",
".",
"client_secret",
"if",
"self",
".",
"issuer",
"is",
"not",
"None",
":",
"data",
"[",
"\"issuer\"",
"]",
"=",
"self",
".",
"issuer",
"headers",
"[",
"\"issuer\"",
"]",
"=",
"self",
".",
"issuer",
"resp",
"=",
"await",
"session",
".",
"post",
"(",
"self",
".",
"token_url",
",",
"data",
"=",
"data",
",",
"headers",
"=",
"headers",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"resp_json",
"=",
"cast",
"(",
"dict",
",",
"await",
"resp",
".",
"json",
"(",
")",
")",
"# The Toon API returns \"expires_in\" as a string for some tenants.",
"# This is not according to OAuth specifications.",
"resp_json",
"[",
"\"expires_in\"",
"]",
"=",
"float",
"(",
"resp_json",
"[",
"\"expires_in\"",
"]",
")",
"return",
"resp_json"
] | [
114,
4
] | [
135,
24
] | python | en | ['en', 'de', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up Sensibo devices. | Set up Sensibo devices. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up Sensibo devices."""
client = pysensibo.SensiboClient(
config[CONF_API_KEY], session=async_get_clientsession(hass), timeout=TIMEOUT
)
devices = []
try:
for dev in await client.async_get_devices(_INITIAL_FETCH_FIELDS):
if config[CONF_ID] == ALL or dev["id"] in config[CONF_ID]:
devices.append(
SensiboClimate(client, dev, hass.config.units.temperature_unit)
)
except (
aiohttp.client_exceptions.ClientConnectorError,
asyncio.TimeoutError,
pysensibo.SensiboError,
) as err:
_LOGGER.exception("Failed to connect to Sensibo servers")
raise PlatformNotReady from err
if not devices:
return
async_add_entities(devices)
async def async_assume_state(service):
"""Set state according to external service call.."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
if entity_ids:
target_climate = [
device for device in devices if device.entity_id in entity_ids
]
else:
target_climate = devices
update_tasks = []
for climate in target_climate:
await climate.async_assume_state(service.data.get(ATTR_STATE))
update_tasks.append(climate.async_update_ha_state(True))
if update_tasks:
await asyncio.wait(update_tasks)
hass.services.async_register(
SENSIBO_DOMAIN,
SERVICE_ASSUME_STATE,
async_assume_state,
schema=ASSUME_STATE_SCHEMA,
) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"client",
"=",
"pysensibo",
".",
"SensiboClient",
"(",
"config",
"[",
"CONF_API_KEY",
"]",
",",
"session",
"=",
"async_get_clientsession",
"(",
"hass",
")",
",",
"timeout",
"=",
"TIMEOUT",
")",
"devices",
"=",
"[",
"]",
"try",
":",
"for",
"dev",
"in",
"await",
"client",
".",
"async_get_devices",
"(",
"_INITIAL_FETCH_FIELDS",
")",
":",
"if",
"config",
"[",
"CONF_ID",
"]",
"==",
"ALL",
"or",
"dev",
"[",
"\"id\"",
"]",
"in",
"config",
"[",
"CONF_ID",
"]",
":",
"devices",
".",
"append",
"(",
"SensiboClimate",
"(",
"client",
",",
"dev",
",",
"hass",
".",
"config",
".",
"units",
".",
"temperature_unit",
")",
")",
"except",
"(",
"aiohttp",
".",
"client_exceptions",
".",
"ClientConnectorError",
",",
"asyncio",
".",
"TimeoutError",
",",
"pysensibo",
".",
"SensiboError",
",",
")",
"as",
"err",
":",
"_LOGGER",
".",
"exception",
"(",
"\"Failed to connect to Sensibo servers\"",
")",
"raise",
"PlatformNotReady",
"from",
"err",
"if",
"not",
"devices",
":",
"return",
"async_add_entities",
"(",
"devices",
")",
"async",
"def",
"async_assume_state",
"(",
"service",
")",
":",
"\"\"\"Set state according to external service call..\"\"\"",
"entity_ids",
"=",
"service",
".",
"data",
".",
"get",
"(",
"ATTR_ENTITY_ID",
")",
"if",
"entity_ids",
":",
"target_climate",
"=",
"[",
"device",
"for",
"device",
"in",
"devices",
"if",
"device",
".",
"entity_id",
"in",
"entity_ids",
"]",
"else",
":",
"target_climate",
"=",
"devices",
"update_tasks",
"=",
"[",
"]",
"for",
"climate",
"in",
"target_climate",
":",
"await",
"climate",
".",
"async_assume_state",
"(",
"service",
".",
"data",
".",
"get",
"(",
"ATTR_STATE",
")",
")",
"update_tasks",
".",
"append",
"(",
"climate",
".",
"async_update_ha_state",
"(",
"True",
")",
")",
"if",
"update_tasks",
":",
"await",
"asyncio",
".",
"wait",
"(",
"update_tasks",
")",
"hass",
".",
"services",
".",
"async_register",
"(",
"SENSIBO_DOMAIN",
",",
"SERVICE_ASSUME_STATE",
",",
"async_assume_state",
",",
"schema",
"=",
"ASSUME_STATE_SCHEMA",
",",
")"
] | [
86,
0
] | [
134,
5
] | python | en | ['eo', 'ht', 'en'] | False |
SensiboClimate.__init__ | (self, client, data, units) | Build SensiboClimate.
client: aiohttp session.
data: initially-fetched data.
| Build SensiboClimate. | def __init__(self, client, data, units):
"""Build SensiboClimate.
client: aiohttp session.
data: initially-fetched data.
"""
self._client = client
self._id = data["id"]
self._external_state = None
self._units = units
self._available = False
self._do_update(data) | [
"def",
"__init__",
"(",
"self",
",",
"client",
",",
"data",
",",
"units",
")",
":",
"self",
".",
"_client",
"=",
"client",
"self",
".",
"_id",
"=",
"data",
"[",
"\"id\"",
"]",
"self",
".",
"_external_state",
"=",
"None",
"self",
".",
"_units",
"=",
"units",
"self",
".",
"_available",
"=",
"False",
"self",
".",
"_do_update",
"(",
"data",
")"
] | [
140,
4
] | [
151,
29
] | python | en | ['en', 'ro', 'it'] | False |
SensiboClimate.supported_features | (self) | Return the list of supported features. | Return the list of supported features. | def supported_features(self):
"""Return the list of supported features."""
return self._supported_features | [
"def",
"supported_features",
"(",
"self",
")",
":",
"return",
"self",
".",
"_supported_features"
] | [
154,
4
] | [
156,
39
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.state | (self) | Return the current state. | Return the current state. | def state(self):
"""Return the current state."""
return self._external_state or super().state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_external_state",
"or",
"super",
"(",
")",
".",
"state"
] | [
188,
4
] | [
190,
52
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
return {"battery": self.current_battery} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"{",
"\"battery\"",
":",
"self",
".",
"current_battery",
"}"
] | [
193,
4
] | [
195,
48
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.temperature_unit | (self) | Return the unit of measurement which this thermostat uses. | Return the unit of measurement which this thermostat uses. | def temperature_unit(self):
"""Return the unit of measurement which this thermostat uses."""
return self._temperature_unit | [
"def",
"temperature_unit",
"(",
"self",
")",
":",
"return",
"self",
".",
"_temperature_unit"
] | [
198,
4
] | [
200,
37
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.available | (self) | Return True if entity is available. | Return True if entity is available. | def available(self):
"""Return True if entity is available."""
return self._available | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"self",
".",
"_available"
] | [
203,
4
] | [
205,
30
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.target_temperature | (self) | Return the temperature we try to reach. | Return the temperature we try to reach. | def target_temperature(self):
"""Return the temperature we try to reach."""
return self._ac_states.get("targetTemperature") | [
"def",
"target_temperature",
"(",
"self",
")",
":",
"return",
"self",
".",
"_ac_states",
".",
"get",
"(",
"\"targetTemperature\"",
")"
] | [
208,
4
] | [
210,
55
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.target_temperature_step | (self) | Return the supported step of target temperature. | Return the supported step of target temperature. | def target_temperature_step(self):
"""Return the supported step of target temperature."""
if self.temperature_unit == self.hass.config.units.temperature_unit:
# We are working in same units as the a/c unit. Use whole degrees
# like the API supports.
return 1
# Unit conversion is going on. No point to stick to specific steps.
return None | [
"def",
"target_temperature_step",
"(",
"self",
")",
":",
"if",
"self",
".",
"temperature_unit",
"==",
"self",
".",
"hass",
".",
"config",
".",
"units",
".",
"temperature_unit",
":",
"# We are working in same units as the a/c unit. Use whole degrees",
"# like the API supports.",
"return",
"1",
"# Unit conversion is going on. No point to stick to specific steps.",
"return",
"None"
] | [
213,
4
] | [
220,
19
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.hvac_mode | (self) | Return current operation ie. heat, cool, idle. | Return current operation ie. heat, cool, idle. | def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
if not self._ac_states["on"]:
return HVAC_MODE_OFF
return SENSIBO_TO_HA.get(self._ac_states["mode"]) | [
"def",
"hvac_mode",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_ac_states",
"[",
"\"on\"",
"]",
":",
"return",
"HVAC_MODE_OFF",
"return",
"SENSIBO_TO_HA",
".",
"get",
"(",
"self",
".",
"_ac_states",
"[",
"\"mode\"",
"]",
")"
] | [
223,
4
] | [
227,
57
] | python | en | ['nl', 'en', 'en'] | True |
SensiboClimate.current_humidity | (self) | Return the current humidity. | Return the current humidity. | def current_humidity(self):
"""Return the current humidity."""
return self._measurements["humidity"] | [
"def",
"current_humidity",
"(",
"self",
")",
":",
"return",
"self",
".",
"_measurements",
"[",
"\"humidity\"",
"]"
] | [
230,
4
] | [
232,
45
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.current_battery | (self) | Return the current battery voltage. | Return the current battery voltage. | def current_battery(self):
"""Return the current battery voltage."""
return self._measurements.get("batteryVoltage") | [
"def",
"current_battery",
"(",
"self",
")",
":",
"return",
"self",
".",
"_measurements",
".",
"get",
"(",
"\"batteryVoltage\"",
")"
] | [
235,
4
] | [
237,
55
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.current_temperature | (self) | Return the current temperature. | Return the current temperature. | def current_temperature(self):
"""Return the current temperature."""
# This field is not affected by temperatureUnit.
# It is always in C
return convert_temperature(
self._measurements["temperature"], TEMP_CELSIUS, self.temperature_unit
) | [
"def",
"current_temperature",
"(",
"self",
")",
":",
"# This field is not affected by temperatureUnit.",
"# It is always in C",
"return",
"convert_temperature",
"(",
"self",
".",
"_measurements",
"[",
"\"temperature\"",
"]",
",",
"TEMP_CELSIUS",
",",
"self",
".",
"temperature_unit",
")"
] | [
240,
4
] | [
246,
9
] | python | en | ['en', 'la', 'en'] | True |
SensiboClimate.hvac_modes | (self) | List of available operation modes. | List of available operation modes. | def hvac_modes(self):
"""List of available operation modes."""
return self._operations | [
"def",
"hvac_modes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_operations"
] | [
249,
4
] | [
251,
31
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.fan_mode | (self) | Return the fan setting. | Return the fan setting. | def fan_mode(self):
"""Return the fan setting."""
return self._ac_states.get("fanLevel") | [
"def",
"fan_mode",
"(",
"self",
")",
":",
"return",
"self",
".",
"_ac_states",
".",
"get",
"(",
"\"fanLevel\"",
")"
] | [
254,
4
] | [
256,
46
] | python | en | ['en', 'fy', 'en'] | True |
SensiboClimate.fan_modes | (self) | List of available fan modes. | List of available fan modes. | def fan_modes(self):
"""List of available fan modes."""
return self._current_capabilities.get("fanLevels") | [
"def",
"fan_modes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_current_capabilities",
".",
"get",
"(",
"\"fanLevels\"",
")"
] | [
259,
4
] | [
261,
58
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.swing_mode | (self) | Return the fan setting. | Return the fan setting. | def swing_mode(self):
"""Return the fan setting."""
return self._ac_states.get("swing") | [
"def",
"swing_mode",
"(",
"self",
")",
":",
"return",
"self",
".",
"_ac_states",
".",
"get",
"(",
"\"swing\"",
")"
] | [
264,
4
] | [
266,
43
] | python | en | ['en', 'fy', 'en'] | True |
SensiboClimate.swing_modes | (self) | List of available swing modes. | List of available swing modes. | def swing_modes(self):
"""List of available swing modes."""
return self._current_capabilities.get("swing") | [
"def",
"swing_modes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_current_capabilities",
".",
"get",
"(",
"\"swing\"",
")"
] | [
269,
4
] | [
271,
54
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.name | (self) | Return the name of the entity. | Return the name of the entity. | def name(self):
"""Return the name of the entity."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
274,
4
] | [
276,
25
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.min_temp | (self) | Return the minimum temperature. | Return the minimum temperature. | def min_temp(self):
"""Return the minimum temperature."""
return (
self._temperatures_list[0] if self._temperatures_list else super().min_temp
) | [
"def",
"min_temp",
"(",
"self",
")",
":",
"return",
"(",
"self",
".",
"_temperatures_list",
"[",
"0",
"]",
"if",
"self",
".",
"_temperatures_list",
"else",
"super",
"(",
")",
".",
"min_temp",
")"
] | [
279,
4
] | [
283,
9
] | python | en | ['en', 'la', 'en'] | True |
SensiboClimate.max_temp | (self) | Return the maximum temperature. | Return the maximum temperature. | def max_temp(self):
"""Return the maximum temperature."""
return (
self._temperatures_list[-1] if self._temperatures_list else super().max_temp
) | [
"def",
"max_temp",
"(",
"self",
")",
":",
"return",
"(",
"self",
".",
"_temperatures_list",
"[",
"-",
"1",
"]",
"if",
"self",
".",
"_temperatures_list",
"else",
"super",
"(",
")",
".",
"max_temp",
")"
] | [
286,
4
] | [
290,
9
] | python | en | ['en', 'la', 'en'] | True |
SensiboClimate.unique_id | (self) | Return unique ID based on Sensibo ID. | Return unique ID based on Sensibo ID. | def unique_id(self):
"""Return unique ID based on Sensibo ID."""
return self._id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_id"
] | [
293,
4
] | [
295,
23
] | python | en | ['en', 'et', 'en'] | True |
SensiboClimate.async_set_temperature | (self, **kwargs) | Set new target temperature. | Set new target temperature. | async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
temperature = int(temperature)
if temperature not in self._temperatures_list:
# Requested temperature is not supported.
if temperature == self.target_temperature:
return
index = self._temperatures_list.index(self.target_temperature)
if (
temperature > self.target_temperature
and index < len(self._temperatures_list) - 1
):
temperature = self._temperatures_list[index + 1]
elif temperature < self.target_temperature and index > 0:
temperature = self._temperatures_list[index - 1]
else:
return
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "targetTemperature", temperature, self._ac_states
) | [
"async",
"def",
"async_set_temperature",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"temperature",
"=",
"kwargs",
".",
"get",
"(",
"ATTR_TEMPERATURE",
")",
"if",
"temperature",
"is",
"None",
":",
"return",
"temperature",
"=",
"int",
"(",
"temperature",
")",
"if",
"temperature",
"not",
"in",
"self",
".",
"_temperatures_list",
":",
"# Requested temperature is not supported.",
"if",
"temperature",
"==",
"self",
".",
"target_temperature",
":",
"return",
"index",
"=",
"self",
".",
"_temperatures_list",
".",
"index",
"(",
"self",
".",
"target_temperature",
")",
"if",
"(",
"temperature",
">",
"self",
".",
"target_temperature",
"and",
"index",
"<",
"len",
"(",
"self",
".",
"_temperatures_list",
")",
"-",
"1",
")",
":",
"temperature",
"=",
"self",
".",
"_temperatures_list",
"[",
"index",
"+",
"1",
"]",
"elif",
"temperature",
"<",
"self",
".",
"target_temperature",
"and",
"index",
">",
"0",
":",
"temperature",
"=",
"self",
".",
"_temperatures_list",
"[",
"index",
"-",
"1",
"]",
"else",
":",
"return",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"targetTemperature\"",
",",
"temperature",
",",
"self",
".",
"_ac_states",
")"
] | [
297,
4
] | [
321,
13
] | python | en | ['en', 'ca', 'en'] | True |
SensiboClimate.async_set_fan_mode | (self, fan_mode) | Set new target fan mode. | Set new target fan mode. | async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "fanLevel", fan_mode, self._ac_states
) | [
"async",
"def",
"async_set_fan_mode",
"(",
"self",
",",
"fan_mode",
")",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"fanLevel\"",
",",
"fan_mode",
",",
"self",
".",
"_ac_states",
")"
] | [
323,
4
] | [
328,
13
] | python | en | ['sv', 'fy', 'en'] | False |
SensiboClimate.async_set_hvac_mode | (self, hvac_mode) | Set new target operation mode. | Set new target operation mode. | async def async_set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
if hvac_mode == HVAC_MODE_OFF:
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "on", False, self._ac_states
)
return
# Turn on if not currently on.
if not self._ac_states["on"]:
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "on", True, self._ac_states
)
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "mode", HA_TO_SENSIBO[hvac_mode], self._ac_states
) | [
"async",
"def",
"async_set_hvac_mode",
"(",
"self",
",",
"hvac_mode",
")",
":",
"if",
"hvac_mode",
"==",
"HVAC_MODE_OFF",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"on\"",
",",
"False",
",",
"self",
".",
"_ac_states",
")",
"return",
"# Turn on if not currently on.",
"if",
"not",
"self",
".",
"_ac_states",
"[",
"\"on\"",
"]",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"on\"",
",",
"True",
",",
"self",
".",
"_ac_states",
")",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"mode\"",
",",
"HA_TO_SENSIBO",
"[",
"hvac_mode",
"]",
",",
"self",
".",
"_ac_states",
")"
] | [
330,
4
] | [
349,
13
] | python | en | ['nl', 'en', 'en'] | True |
SensiboClimate.async_set_swing_mode | (self, swing_mode) | Set new target swing operation. | Set new target swing operation. | async def async_set_swing_mode(self, swing_mode):
"""Set new target swing operation."""
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "swing", swing_mode, self._ac_states
) | [
"async",
"def",
"async_set_swing_mode",
"(",
"self",
",",
"swing_mode",
")",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"swing\"",
",",
"swing_mode",
",",
"self",
".",
"_ac_states",
")"
] | [
351,
4
] | [
356,
13
] | python | en | ['en', 'en', 'en'] | True |
SensiboClimate.async_turn_on | (self) | Turn Sensibo unit on. | Turn Sensibo unit on. | async def async_turn_on(self):
"""Turn Sensibo unit on."""
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "on", True, self._ac_states
) | [
"async",
"def",
"async_turn_on",
"(",
"self",
")",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"on\"",
",",
"True",
",",
"self",
".",
"_ac_states",
")"
] | [
358,
4
] | [
363,
13
] | python | en | ['es', 'sq', 'en'] | False |
SensiboClimate.async_turn_off | (self) | Turn Sensibo unit on. | Turn Sensibo unit on. | async def async_turn_off(self):
"""Turn Sensibo unit on."""
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id, "on", False, self._ac_states
) | [
"async",
"def",
"async_turn_off",
"(",
"self",
")",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"on\"",
",",
"False",
",",
"self",
".",
"_ac_states",
")"
] | [
365,
4
] | [
370,
13
] | python | en | ['es', 'sq', 'en'] | False |
SensiboClimate.async_assume_state | (self, state) | Set external state. | Set external state. | async def async_assume_state(self, state):
"""Set external state."""
change_needed = (state != HVAC_MODE_OFF and not self._ac_states["on"]) or (
state == HVAC_MODE_OFF and self._ac_states["on"]
)
if change_needed:
with async_timeout.timeout(TIMEOUT):
await self._client.async_set_ac_state_property(
self._id,
"on",
state != HVAC_MODE_OFF, # value
self._ac_states,
True, # assumed_state
)
if state in [STATE_ON, HVAC_MODE_OFF]:
self._external_state = None
else:
self._external_state = state | [
"async",
"def",
"async_assume_state",
"(",
"self",
",",
"state",
")",
":",
"change_needed",
"=",
"(",
"state",
"!=",
"HVAC_MODE_OFF",
"and",
"not",
"self",
".",
"_ac_states",
"[",
"\"on\"",
"]",
")",
"or",
"(",
"state",
"==",
"HVAC_MODE_OFF",
"and",
"self",
".",
"_ac_states",
"[",
"\"on\"",
"]",
")",
"if",
"change_needed",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"await",
"self",
".",
"_client",
".",
"async_set_ac_state_property",
"(",
"self",
".",
"_id",
",",
"\"on\"",
",",
"state",
"!=",
"HVAC_MODE_OFF",
",",
"# value",
"self",
".",
"_ac_states",
",",
"True",
",",
"# assumed_state",
")",
"if",
"state",
"in",
"[",
"STATE_ON",
",",
"HVAC_MODE_OFF",
"]",
":",
"self",
".",
"_external_state",
"=",
"None",
"else",
":",
"self",
".",
"_external_state",
"=",
"state"
] | [
372,
4
] | [
391,
40
] | python | da | ['da', 'lb', 'en'] | False |
SensiboClimate.async_update | (self) | Retrieve latest state. | Retrieve latest state. | async def async_update(self):
"""Retrieve latest state."""
try:
with async_timeout.timeout(TIMEOUT):
data = await self._client.async_get_device(self._id, _FETCH_FIELDS)
self._do_update(data)
except (aiohttp.client_exceptions.ClientError, pysensibo.SensiboError):
_LOGGER.warning("Failed to connect to Sensibo servers")
self._available = False | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"try",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"TIMEOUT",
")",
":",
"data",
"=",
"await",
"self",
".",
"_client",
".",
"async_get_device",
"(",
"self",
".",
"_id",
",",
"_FETCH_FIELDS",
")",
"self",
".",
"_do_update",
"(",
"data",
")",
"except",
"(",
"aiohttp",
".",
"client_exceptions",
".",
"ClientError",
",",
"pysensibo",
".",
"SensiboError",
")",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Failed to connect to Sensibo servers\"",
")",
"self",
".",
"_available",
"=",
"False"
] | [
393,
4
] | [
401,
35
] | python | en | ['es', 'sk', 'en'] | False |
SensorManager.__init__ | (self, bridge) | Initialize the sensor manager. | Initialize the sensor manager. | def __init__(self, bridge):
"""Initialize the sensor manager."""
self.bridge = bridge
self._component_add_entities = {}
self.current = {}
self.current_events = {}
self._enabled_platforms = ("binary_sensor", "sensor")
self.coordinator = DataUpdateCoordinator(
bridge.hass,
_LOGGER,
name="sensor",
update_method=self.async_update_data,
update_interval=self.SCAN_INTERVAL,
request_refresh_debouncer=debounce.Debouncer(
bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True
),
) | [
"def",
"__init__",
"(",
"self",
",",
"bridge",
")",
":",
"self",
".",
"bridge",
"=",
"bridge",
"self",
".",
"_component_add_entities",
"=",
"{",
"}",
"self",
".",
"current",
"=",
"{",
"}",
"self",
".",
"current_events",
"=",
"{",
"}",
"self",
".",
"_enabled_platforms",
"=",
"(",
"\"binary_sensor\"",
",",
"\"sensor\"",
")",
"self",
".",
"coordinator",
"=",
"DataUpdateCoordinator",
"(",
"bridge",
".",
"hass",
",",
"_LOGGER",
",",
"name",
"=",
"\"sensor\"",
",",
"update_method",
"=",
"self",
".",
"async_update_data",
",",
"update_interval",
"=",
"self",
".",
"SCAN_INTERVAL",
",",
"request_refresh_debouncer",
"=",
"debounce",
".",
"Debouncer",
"(",
"bridge",
".",
"hass",
",",
"_LOGGER",
",",
"cooldown",
"=",
"REQUEST_REFRESH_DELAY",
",",
"immediate",
"=",
"True",
")",
",",
")"
] | [
37,
4
] | [
54,
9
] | python | en | ['en', 'en', 'en'] | True |
SensorManager.async_update_data | (self) | Update sensor data. | Update sensor data. | async def async_update_data(self):
"""Update sensor data."""
try:
with async_timeout.timeout(4):
return await self.bridge.async_request_call(
self.bridge.api.sensors.update
)
except Unauthorized as err:
await self.bridge.handle_unauthorized_error()
raise UpdateFailed("Unauthorized") from err
except AiohueException as err:
raise UpdateFailed(f"Hue error: {err}") from err | [
"async",
"def",
"async_update_data",
"(",
"self",
")",
":",
"try",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"4",
")",
":",
"return",
"await",
"self",
".",
"bridge",
".",
"async_request_call",
"(",
"self",
".",
"bridge",
".",
"api",
".",
"sensors",
".",
"update",
")",
"except",
"Unauthorized",
"as",
"err",
":",
"await",
"self",
".",
"bridge",
".",
"handle_unauthorized_error",
"(",
")",
"raise",
"UpdateFailed",
"(",
"\"Unauthorized\"",
")",
"from",
"err",
"except",
"AiohueException",
"as",
"err",
":",
"raise",
"UpdateFailed",
"(",
"f\"Hue error: {err}\"",
")",
"from",
"err"
] | [
56,
4
] | [
67,
60
] | python | en | ['ro', 'no', 'en'] | False |
SensorManager.async_register_component | (self, platform, async_add_entities) | Register async_add_entities methods for components. | Register async_add_entities methods for components. | async def async_register_component(self, platform, async_add_entities):
"""Register async_add_entities methods for components."""
self._component_add_entities[platform] = async_add_entities
if len(self._component_add_entities) < len(self._enabled_platforms):
_LOGGER.debug("Aborting start with %s, waiting for the rest", platform)
return
# We have all components available, start the updating.
self.bridge.reset_jobs.append(
self.coordinator.async_add_listener(self.async_update_items)
)
await self.coordinator.async_refresh() | [
"async",
"def",
"async_register_component",
"(",
"self",
",",
"platform",
",",
"async_add_entities",
")",
":",
"self",
".",
"_component_add_entities",
"[",
"platform",
"]",
"=",
"async_add_entities",
"if",
"len",
"(",
"self",
".",
"_component_add_entities",
")",
"<",
"len",
"(",
"self",
".",
"_enabled_platforms",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Aborting start with %s, waiting for the rest\"",
",",
"platform",
")",
"return",
"# We have all components available, start the updating.",
"self",
".",
"bridge",
".",
"reset_jobs",
".",
"append",
"(",
"self",
".",
"coordinator",
".",
"async_add_listener",
"(",
"self",
".",
"async_update_items",
")",
")",
"await",
"self",
".",
"coordinator",
".",
"async_refresh",
"(",
")"
] | [
69,
4
] | [
81,
46
] | python | en | ['da', 'en', 'en'] | True |
SensorManager.async_update_items | (self) | Update sensors from the bridge. | Update sensors from the bridge. | def async_update_items(self):
"""Update sensors from the bridge."""
api = self.bridge.api.sensors
if len(self._component_add_entities) < len(self._enabled_platforms):
return
to_add = {}
primary_sensor_devices = {}
current = self.current
# Physical Hue motion sensors present as three sensors in the API: a
# presence sensor, a temperature sensor, and a light level sensor. Of
# these, only the presence sensor is assigned the user-friendly name
# that the user has given to the device. Each of these sensors is
# linked by a common device_id, which is the first twenty-three
# characters of the unique id (then followed by a hyphen and an ID
# specific to the individual sensor).
#
# To set up neat values, and assign the sensor entities to the same
# device, we first, iterate over all the sensors and find the Hue
# presence sensors, then iterate over all the remaining sensors -
# finding the remaining ones that may or may not be related to the
# presence sensors.
for item_id in api:
if api[item_id].type != TYPE_ZLL_PRESENCE:
continue
primary_sensor_devices[_device_id(api[item_id])] = api[item_id]
# Iterate again now we have all the presence sensors, and add the
# related sensors with nice names where appropriate.
for item_id in api:
uniqueid = api[item_id].uniqueid
if current.get(uniqueid, self.current_events.get(uniqueid)) is not None:
continue
sensor_type = api[item_id].type
# Check for event generator devices
event_config = EVENT_CONFIG_MAP.get(sensor_type)
if event_config is not None:
base_name = api[item_id].name
name = event_config["name_format"].format(base_name)
new_event = event_config["class"](api[item_id], name, self.bridge)
self.bridge.hass.async_create_task(
new_event.async_update_device_registry()
)
self.current_events[uniqueid] = new_event
sensor_config = SENSOR_CONFIG_MAP.get(sensor_type)
if sensor_config is None:
continue
base_name = api[item_id].name
primary_sensor = primary_sensor_devices.get(_device_id(api[item_id]))
if primary_sensor is not None:
base_name = primary_sensor.name
name = sensor_config["name_format"].format(base_name)
current[uniqueid] = sensor_config["class"](
api[item_id], name, self.bridge, primary_sensor=primary_sensor
)
to_add.setdefault(sensor_config["platform"], []).append(current[uniqueid])
self.bridge.hass.async_create_task(
remove_devices(
self.bridge,
[value.uniqueid for value in api.values()],
current,
)
)
for platform in to_add:
self._component_add_entities[platform](to_add[platform]) | [
"def",
"async_update_items",
"(",
"self",
")",
":",
"api",
"=",
"self",
".",
"bridge",
".",
"api",
".",
"sensors",
"if",
"len",
"(",
"self",
".",
"_component_add_entities",
")",
"<",
"len",
"(",
"self",
".",
"_enabled_platforms",
")",
":",
"return",
"to_add",
"=",
"{",
"}",
"primary_sensor_devices",
"=",
"{",
"}",
"current",
"=",
"self",
".",
"current",
"# Physical Hue motion sensors present as three sensors in the API: a",
"# presence sensor, a temperature sensor, and a light level sensor. Of",
"# these, only the presence sensor is assigned the user-friendly name",
"# that the user has given to the device. Each of these sensors is",
"# linked by a common device_id, which is the first twenty-three",
"# characters of the unique id (then followed by a hyphen and an ID",
"# specific to the individual sensor).",
"#",
"# To set up neat values, and assign the sensor entities to the same",
"# device, we first, iterate over all the sensors and find the Hue",
"# presence sensors, then iterate over all the remaining sensors -",
"# finding the remaining ones that may or may not be related to the",
"# presence sensors.",
"for",
"item_id",
"in",
"api",
":",
"if",
"api",
"[",
"item_id",
"]",
".",
"type",
"!=",
"TYPE_ZLL_PRESENCE",
":",
"continue",
"primary_sensor_devices",
"[",
"_device_id",
"(",
"api",
"[",
"item_id",
"]",
")",
"]",
"=",
"api",
"[",
"item_id",
"]",
"# Iterate again now we have all the presence sensors, and add the",
"# related sensors with nice names where appropriate.",
"for",
"item_id",
"in",
"api",
":",
"uniqueid",
"=",
"api",
"[",
"item_id",
"]",
".",
"uniqueid",
"if",
"current",
".",
"get",
"(",
"uniqueid",
",",
"self",
".",
"current_events",
".",
"get",
"(",
"uniqueid",
")",
")",
"is",
"not",
"None",
":",
"continue",
"sensor_type",
"=",
"api",
"[",
"item_id",
"]",
".",
"type",
"# Check for event generator devices",
"event_config",
"=",
"EVENT_CONFIG_MAP",
".",
"get",
"(",
"sensor_type",
")",
"if",
"event_config",
"is",
"not",
"None",
":",
"base_name",
"=",
"api",
"[",
"item_id",
"]",
".",
"name",
"name",
"=",
"event_config",
"[",
"\"name_format\"",
"]",
".",
"format",
"(",
"base_name",
")",
"new_event",
"=",
"event_config",
"[",
"\"class\"",
"]",
"(",
"api",
"[",
"item_id",
"]",
",",
"name",
",",
"self",
".",
"bridge",
")",
"self",
".",
"bridge",
".",
"hass",
".",
"async_create_task",
"(",
"new_event",
".",
"async_update_device_registry",
"(",
")",
")",
"self",
".",
"current_events",
"[",
"uniqueid",
"]",
"=",
"new_event",
"sensor_config",
"=",
"SENSOR_CONFIG_MAP",
".",
"get",
"(",
"sensor_type",
")",
"if",
"sensor_config",
"is",
"None",
":",
"continue",
"base_name",
"=",
"api",
"[",
"item_id",
"]",
".",
"name",
"primary_sensor",
"=",
"primary_sensor_devices",
".",
"get",
"(",
"_device_id",
"(",
"api",
"[",
"item_id",
"]",
")",
")",
"if",
"primary_sensor",
"is",
"not",
"None",
":",
"base_name",
"=",
"primary_sensor",
".",
"name",
"name",
"=",
"sensor_config",
"[",
"\"name_format\"",
"]",
".",
"format",
"(",
"base_name",
")",
"current",
"[",
"uniqueid",
"]",
"=",
"sensor_config",
"[",
"\"class\"",
"]",
"(",
"api",
"[",
"item_id",
"]",
",",
"name",
",",
"self",
".",
"bridge",
",",
"primary_sensor",
"=",
"primary_sensor",
")",
"to_add",
".",
"setdefault",
"(",
"sensor_config",
"[",
"\"platform\"",
"]",
",",
"[",
"]",
")",
".",
"append",
"(",
"current",
"[",
"uniqueid",
"]",
")",
"self",
".",
"bridge",
".",
"hass",
".",
"async_create_task",
"(",
"remove_devices",
"(",
"self",
".",
"bridge",
",",
"[",
"value",
".",
"uniqueid",
"for",
"value",
"in",
"api",
".",
"values",
"(",
")",
"]",
",",
"current",
",",
")",
")",
"for",
"platform",
"in",
"to_add",
":",
"self",
".",
"_component_add_entities",
"[",
"platform",
"]",
"(",
"to_add",
"[",
"platform",
"]",
")"
] | [
84,
4
] | [
159,
68
] | python | en | ['en', 'sn', 'en'] | True |
GenericHueSensor.available | (self) | Return if sensor is available. | Return if sensor is available. | def available(self):
"""Return if sensor is available."""
return self.bridge.sensor_manager.coordinator.last_update_success and (
self.bridge.allow_unreachable
# remotes like Hue Tap (ZGPSwitchSensor) have no _reachability_
or self.sensor.config.get("reachable", True)
) | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"self",
".",
"bridge",
".",
"sensor_manager",
".",
"coordinator",
".",
"last_update_success",
"and",
"(",
"self",
".",
"bridge",
".",
"allow_unreachable",
"# remotes like Hue Tap (ZGPSwitchSensor) have no _reachability_",
"or",
"self",
".",
"sensor",
".",
"config",
".",
"get",
"(",
"\"reachable\"",
",",
"True",
")",
")"
] | [
171,
4
] | [
177,
9
] | python | en | ['en', 'en', 'en'] | True |
GenericHueSensor.async_added_to_hass | (self) | When entity is added to hass. | When entity is added to hass. | async def async_added_to_hass(self):
"""When entity is added to hass."""
self.async_on_remove(
self.bridge.sensor_manager.coordinator.async_add_listener(
self.async_write_ha_state
)
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"self",
".",
"bridge",
".",
"sensor_manager",
".",
"coordinator",
".",
"async_add_listener",
"(",
"self",
".",
"async_write_ha_state",
")",
")"
] | [
179,
4
] | [
185,
9
] | python | en | ['en', 'en', 'en'] | True |
GenericHueSensor.async_update | (self) | Update the entity.
Only used by the generic entity update service.
| Update the entity. | async def async_update(self):
"""Update the entity.
Only used by the generic entity update service.
"""
await self.bridge.sensor_manager.coordinator.async_request_refresh() | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"await",
"self",
".",
"bridge",
".",
"sensor_manager",
".",
"coordinator",
".",
"async_request_refresh",
"(",
")"
] | [
187,
4
] | [
192,
76
] | python | en | ['en', 'en', 'en'] | True |
GenericZLLSensor.device_state_attributes | (self) | Return the device state attributes. | Return the device state attributes. | def device_state_attributes(self):
"""Return the device state attributes."""
return {"battery_level": self.sensor.battery} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"{",
"\"battery_level\"",
":",
"self",
".",
"sensor",
".",
"battery",
"}"
] | [
199,
4
] | [
201,
53
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the Wink platform. | Set up the Wink platform. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
for switch in pywink.get_switches():
_id = switch.object_id() + switch.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(switch, hass)])
for switch in pywink.get_powerstrips():
_id = switch.object_id() + switch.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(switch, hass)])
for sprinkler in pywink.get_sprinklers():
_id = sprinkler.object_id() + sprinkler.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(sprinkler, hass)])
for switch in pywink.get_binary_switch_groups():
_id = switch.object_id() + switch.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(switch, hass)]) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"for",
"switch",
"in",
"pywink",
".",
"get_switches",
"(",
")",
":",
"_id",
"=",
"switch",
".",
"object_id",
"(",
")",
"+",
"switch",
".",
"name",
"(",
")",
"if",
"_id",
"not",
"in",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"\"unique_ids\"",
"]",
":",
"add_entities",
"(",
"[",
"WinkToggleDevice",
"(",
"switch",
",",
"hass",
")",
"]",
")",
"for",
"switch",
"in",
"pywink",
".",
"get_powerstrips",
"(",
")",
":",
"_id",
"=",
"switch",
".",
"object_id",
"(",
")",
"+",
"switch",
".",
"name",
"(",
")",
"if",
"_id",
"not",
"in",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"\"unique_ids\"",
"]",
":",
"add_entities",
"(",
"[",
"WinkToggleDevice",
"(",
"switch",
",",
"hass",
")",
"]",
")",
"for",
"sprinkler",
"in",
"pywink",
".",
"get_sprinklers",
"(",
")",
":",
"_id",
"=",
"sprinkler",
".",
"object_id",
"(",
")",
"+",
"sprinkler",
".",
"name",
"(",
")",
"if",
"_id",
"not",
"in",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"\"unique_ids\"",
"]",
":",
"add_entities",
"(",
"[",
"WinkToggleDevice",
"(",
"sprinkler",
",",
"hass",
")",
"]",
")",
"for",
"switch",
"in",
"pywink",
".",
"get_binary_switch_groups",
"(",
")",
":",
"_id",
"=",
"switch",
".",
"object_id",
"(",
")",
"+",
"switch",
".",
"name",
"(",
")",
"if",
"_id",
"not",
"in",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"\"unique_ids\"",
"]",
":",
"add_entities",
"(",
"[",
"WinkToggleDevice",
"(",
"switch",
",",
"hass",
")",
"]",
")"
] | [
8,
0
] | [
26,
58
] | python | en | ['en', 'da', 'en'] | True |
WinkToggleDevice.async_added_to_hass | (self) | Call when entity is added to hass. | Call when entity is added to hass. | async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["switch"].append(self) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"\"entities\"",
"]",
"[",
"\"switch\"",
"]",
".",
"append",
"(",
"self",
")"
] | [
32,
4
] | [
34,
65
] | python | en | ['en', 'en', 'en'] | True |
WinkToggleDevice.is_on | (self) | Return true if device is on. | Return true if device is on. | def is_on(self):
"""Return true if device is on."""
return self.wink.state() | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"self",
".",
"wink",
".",
"state",
"(",
")"
] | [
37,
4
] | [
39,
32
] | python | en | ['en', 'fy', 'en'] | True |
WinkToggleDevice.turn_on | (self, **kwargs) | Turn the device on. | Turn the device on. | def turn_on(self, **kwargs):
"""Turn the device on."""
self.wink.set_state(True) | [
"def",
"turn_on",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"wink",
".",
"set_state",
"(",
"True",
")"
] | [
41,
4
] | [
43,
33
] | python | en | ['en', 'en', 'en'] | True |
WinkToggleDevice.turn_off | (self, **kwargs) | Turn the device off. | Turn the device off. | def turn_off(self, **kwargs):
"""Turn the device off."""
self.wink.set_state(False) | [
"def",
"turn_off",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"wink",
".",
"set_state",
"(",
"False",
")"
] | [
45,
4
] | [
47,
34
] | python | en | ['en', 'en', 'en'] | True |
WinkToggleDevice.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
attributes = super().device_state_attributes
try:
event = self.wink.last_event()
if event is not None:
attributes["last_event"] = event
except AttributeError:
pass
return attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"attributes",
"=",
"super",
"(",
")",
".",
"device_state_attributes",
"try",
":",
"event",
"=",
"self",
".",
"wink",
".",
"last_event",
"(",
")",
"if",
"event",
"is",
"not",
"None",
":",
"attributes",
"[",
"\"last_event\"",
"]",
"=",
"event",
"except",
"AttributeError",
":",
"pass",
"return",
"attributes"
] | [
50,
4
] | [
59,
25
] | python | en | ['en', 'en', 'en'] | True |
community_post | () | Topic JSON with a codeblock marked as auto syntax. | Topic JSON with a codeblock marked as auto syntax. | def community_post():
"""Topic JSON with a codeblock marked as auto syntax."""
return load_fixture("blueprint/community_post.json") | [
"def",
"community_post",
"(",
")",
":",
"return",
"load_fixture",
"(",
"\"blueprint/community_post.json\"",
")"
] | [
13,
0
] | [
15,
56
] | python | en | ['en', 'en', 'en'] | True |
test_get_community_post_import_url | () | Test variations of generating import forum url. | Test variations of generating import forum url. | def test_get_community_post_import_url():
"""Test variations of generating import forum url."""
assert (
importer._get_community_post_import_url(
"https://community.home-assistant.io/t/test-topic/123"
)
== "https://community.home-assistant.io/t/test-topic/123.json"
)
assert (
importer._get_community_post_import_url(
"https://community.home-assistant.io/t/test-topic/123/2"
)
== "https://community.home-assistant.io/t/test-topic/123.json"
) | [
"def",
"test_get_community_post_import_url",
"(",
")",
":",
"assert",
"(",
"importer",
".",
"_get_community_post_import_url",
"(",
"\"https://community.home-assistant.io/t/test-topic/123\"",
")",
"==",
"\"https://community.home-assistant.io/t/test-topic/123.json\"",
")",
"assert",
"(",
"importer",
".",
"_get_community_post_import_url",
"(",
"\"https://community.home-assistant.io/t/test-topic/123/2\"",
")",
"==",
"\"https://community.home-assistant.io/t/test-topic/123.json\"",
")"
] | [
18,
0
] | [
32,
5
] | python | en | ['en', 'la', 'en'] | True |
test_get_github_import_url | () | Test getting github import url. | Test getting github import url. | def test_get_github_import_url():
"""Test getting github import url."""
assert (
importer._get_github_import_url(
"https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml"
)
== "https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml"
)
assert (
importer._get_github_import_url(
"https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml"
)
== "https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml"
) | [
"def",
"test_get_github_import_url",
"(",
")",
":",
"assert",
"(",
"importer",
".",
"_get_github_import_url",
"(",
"\"https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\"",
")",
"==",
"\"https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml\"",
")",
"assert",
"(",
"importer",
".",
"_get_github_import_url",
"(",
"\"https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml\"",
")",
"==",
"\"https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml\"",
")"
] | [
35,
0
] | [
49,
5
] | python | en | ['nl', 'en', 'en'] | True |
test_extract_blueprint_from_community_topic | (community_post) | Test extracting blueprint. | Test extracting blueprint. | def test_extract_blueprint_from_community_topic(community_post):
"""Test extracting blueprint."""
imported_blueprint = importer._extract_blueprint_from_community_topic(
"http://example.com", json.loads(community_post)
)
assert imported_blueprint is not None
assert imported_blueprint.url == "http://example.com"
assert imported_blueprint.blueprint.domain == "automation"
assert imported_blueprint.blueprint.placeholders == {
"service_to_call",
"trigger_event",
} | [
"def",
"test_extract_blueprint_from_community_topic",
"(",
"community_post",
")",
":",
"imported_blueprint",
"=",
"importer",
".",
"_extract_blueprint_from_community_topic",
"(",
"\"http://example.com\"",
",",
"json",
".",
"loads",
"(",
"community_post",
")",
")",
"assert",
"imported_blueprint",
"is",
"not",
"None",
"assert",
"imported_blueprint",
".",
"url",
"==",
"\"http://example.com\"",
"assert",
"imported_blueprint",
".",
"blueprint",
".",
"domain",
"==",
"\"automation\"",
"assert",
"imported_blueprint",
".",
"blueprint",
".",
"placeholders",
"==",
"{",
"\"service_to_call\"",
",",
"\"trigger_event\"",
",",
"}"
] | [
52,
0
] | [
63,
5
] | python | en | ['fr', 'sr', 'en'] | False |
test_extract_blueprint_from_community_topic_invalid_yaml | () | Test extracting blueprint with invalid YAML. | Test extracting blueprint with invalid YAML. | def test_extract_blueprint_from_community_topic_invalid_yaml():
"""Test extracting blueprint with invalid YAML."""
with pytest.raises(HomeAssistantError):
importer._extract_blueprint_from_community_topic(
"http://example.com",
{
"post_stream": {
"posts": [
{"cooked": '<code class="lang-yaml">invalid: yaml: 2</code>'}
]
}
},
) | [
"def",
"test_extract_blueprint_from_community_topic_invalid_yaml",
"(",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"importer",
".",
"_extract_blueprint_from_community_topic",
"(",
"\"http://example.com\"",
",",
"{",
"\"post_stream\"",
":",
"{",
"\"posts\"",
":",
"[",
"{",
"\"cooked\"",
":",
"'<code class=\"lang-yaml\">invalid: yaml: 2</code>'",
"}",
"]",
"}",
"}",
",",
")"
] | [
66,
0
] | [
78,
9
] | python | en | ['en', 'en', 'en'] | True |
test__extract_blueprint_from_community_topic_wrong_lang | () | Test extracting blueprint with invalid YAML. | Test extracting blueprint with invalid YAML. | def test__extract_blueprint_from_community_topic_wrong_lang():
"""Test extracting blueprint with invalid YAML."""
assert (
importer._extract_blueprint_from_community_topic(
"http://example.com",
{
"post_stream": {
"posts": [
{"cooked": '<code class="lang-php">invalid yaml + 2</code>'}
]
}
},
)
is None
) | [
"def",
"test__extract_blueprint_from_community_topic_wrong_lang",
"(",
")",
":",
"assert",
"(",
"importer",
".",
"_extract_blueprint_from_community_topic",
"(",
"\"http://example.com\"",
",",
"{",
"\"post_stream\"",
":",
"{",
"\"posts\"",
":",
"[",
"{",
"\"cooked\"",
":",
"'<code class=\"lang-php\">invalid yaml + 2</code>'",
"}",
"]",
"}",
"}",
",",
")",
"is",
"None",
")"
] | [
81,
0
] | [
95,
5
] | python | en | ['en', 'en', 'en'] | True |
test_fetch_blueprint_from_community_url | (hass, aioclient_mock, community_post) | Test fetching blueprint from url. | Test fetching blueprint from url. | async def test_fetch_blueprint_from_community_url(hass, aioclient_mock, community_post):
"""Test fetching blueprint from url."""
aioclient_mock.get(
"https://community.home-assistant.io/t/test-topic/123.json", text=community_post
)
imported_blueprint = await importer.fetch_blueprint_from_url(
hass, "https://community.home-assistant.io/t/test-topic/123/2"
)
assert isinstance(imported_blueprint, importer.ImportedBlueprint)
assert imported_blueprint.blueprint.domain == "automation"
assert imported_blueprint.blueprint.placeholders == {
"service_to_call",
"trigger_event",
} | [
"async",
"def",
"test_fetch_blueprint_from_community_url",
"(",
"hass",
",",
"aioclient_mock",
",",
"community_post",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"https://community.home-assistant.io/t/test-topic/123.json\"",
",",
"text",
"=",
"community_post",
")",
"imported_blueprint",
"=",
"await",
"importer",
".",
"fetch_blueprint_from_url",
"(",
"hass",
",",
"\"https://community.home-assistant.io/t/test-topic/123/2\"",
")",
"assert",
"isinstance",
"(",
"imported_blueprint",
",",
"importer",
".",
"ImportedBlueprint",
")",
"assert",
"imported_blueprint",
".",
"blueprint",
".",
"domain",
"==",
"\"automation\"",
"assert",
"imported_blueprint",
".",
"blueprint",
".",
"placeholders",
"==",
"{",
"\"service_to_call\"",
",",
"\"trigger_event\"",
",",
"}"
] | [
98,
0
] | [
111,
5
] | python | en | ['en', 'en', 'en'] | True |
test_fetch_blueprint_from_github_url | (hass, aioclient_mock, url) | Test fetching blueprint from url. | Test fetching blueprint from url. | async def test_fetch_blueprint_from_github_url(hass, aioclient_mock, url):
"""Test fetching blueprint from url."""
aioclient_mock.get(
"https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml",
text=Path(
hass.config.path("blueprints/automation/test_event_service.yaml")
).read_text(),
)
imported_blueprint = await importer.fetch_blueprint_from_url(hass, url)
assert isinstance(imported_blueprint, importer.ImportedBlueprint)
assert imported_blueprint.blueprint.domain == "automation"
assert imported_blueprint.blueprint.placeholders == {
"service_to_call",
"trigger_event",
} | [
"async",
"def",
"test_fetch_blueprint_from_github_url",
"(",
"hass",
",",
"aioclient_mock",
",",
"url",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"https://raw.githubusercontent.com/balloob/home-assistant-config/main/blueprints/automation/motion_light.yaml\"",
",",
"text",
"=",
"Path",
"(",
"hass",
".",
"config",
".",
"path",
"(",
"\"blueprints/automation/test_event_service.yaml\"",
")",
")",
".",
"read_text",
"(",
")",
",",
")",
"imported_blueprint",
"=",
"await",
"importer",
".",
"fetch_blueprint_from_url",
"(",
"hass",
",",
"url",
")",
"assert",
"isinstance",
"(",
"imported_blueprint",
",",
"importer",
".",
"ImportedBlueprint",
")",
"assert",
"imported_blueprint",
".",
"blueprint",
".",
"domain",
"==",
"\"automation\"",
"assert",
"imported_blueprint",
".",
"blueprint",
".",
"placeholders",
"==",
"{",
"\"service_to_call\"",
",",
"\"trigger_event\"",
",",
"}"
] | [
121,
0
] | [
136,
5
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass: HomeAssistantType, config: ConfigType) | Set up the mobile app component. | Set up the mobile app component. | async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the mobile app component."""
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
app_config = await store.async_load()
if app_config is None:
app_config = {
DATA_BINARY_SENSOR: {},
DATA_CONFIG_ENTRIES: {},
DATA_DELETED_IDS: [],
DATA_SENSOR: {},
}
hass.data[DOMAIN] = {
DATA_BINARY_SENSOR: app_config.get(DATA_BINARY_SENSOR, {}),
DATA_CONFIG_ENTRIES: {},
DATA_DELETED_IDS: app_config.get(DATA_DELETED_IDS, []),
DATA_DEVICES: {},
DATA_SENSOR: app_config.get(DATA_SENSOR, {}),
DATA_STORE: store,
}
hass.http.register_view(RegistrationsView())
for deleted_id in hass.data[DOMAIN][DATA_DELETED_IDS]:
try:
webhook_register(
hass, DOMAIN, "Deleted Webhook", deleted_id, handle_webhook
)
except ValueError:
pass
hass.async_create_task(
discovery.async_load_platform(hass, "notify", DOMAIN, {}, config)
)
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config",
":",
"ConfigType",
")",
":",
"store",
"=",
"hass",
".",
"helpers",
".",
"storage",
".",
"Store",
"(",
"STORAGE_VERSION",
",",
"STORAGE_KEY",
")",
"app_config",
"=",
"await",
"store",
".",
"async_load",
"(",
")",
"if",
"app_config",
"is",
"None",
":",
"app_config",
"=",
"{",
"DATA_BINARY_SENSOR",
":",
"{",
"}",
",",
"DATA_CONFIG_ENTRIES",
":",
"{",
"}",
",",
"DATA_DELETED_IDS",
":",
"[",
"]",
",",
"DATA_SENSOR",
":",
"{",
"}",
",",
"}",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"=",
"{",
"DATA_BINARY_SENSOR",
":",
"app_config",
".",
"get",
"(",
"DATA_BINARY_SENSOR",
",",
"{",
"}",
")",
",",
"DATA_CONFIG_ENTRIES",
":",
"{",
"}",
",",
"DATA_DELETED_IDS",
":",
"app_config",
".",
"get",
"(",
"DATA_DELETED_IDS",
",",
"[",
"]",
")",
",",
"DATA_DEVICES",
":",
"{",
"}",
",",
"DATA_SENSOR",
":",
"app_config",
".",
"get",
"(",
"DATA_SENSOR",
",",
"{",
"}",
")",
",",
"DATA_STORE",
":",
"store",
",",
"}",
"hass",
".",
"http",
".",
"register_view",
"(",
"RegistrationsView",
"(",
")",
")",
"for",
"deleted_id",
"in",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_DELETED_IDS",
"]",
":",
"try",
":",
"webhook_register",
"(",
"hass",
",",
"DOMAIN",
",",
"\"Deleted Webhook\"",
",",
"deleted_id",
",",
"handle_webhook",
")",
"except",
"ValueError",
":",
"pass",
"hass",
".",
"async_create_task",
"(",
"discovery",
".",
"async_load_platform",
"(",
"hass",
",",
"\"notify\"",
",",
"DOMAIN",
",",
"{",
"}",
",",
"config",
")",
")",
"return",
"True"
] | [
36,
0
] | [
71,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, entry) | Set up a mobile_app entry. | Set up a mobile_app entry. | async def async_setup_entry(hass, entry):
"""Set up a mobile_app entry."""
registration = entry.data
webhook_id = registration[CONF_WEBHOOK_ID]
hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id] = entry
device_registry = await dr.async_get_registry(hass)
device = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, registration[ATTR_DEVICE_ID])},
manufacturer=registration[ATTR_MANUFACTURER],
model=registration[ATTR_MODEL],
name=registration[ATTR_DEVICE_NAME],
sw_version=registration[ATTR_OS_VERSION],
)
hass.data[DOMAIN][DATA_DEVICES][webhook_id] = device
registration_name = f"Mobile App: {registration[ATTR_DEVICE_NAME]}"
webhook_register(hass, DOMAIN, registration_name, webhook_id, handle_webhook)
for domain in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, domain)
)
await hass_notify.async_reload(hass, DOMAIN)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"entry",
")",
":",
"registration",
"=",
"entry",
".",
"data",
"webhook_id",
"=",
"registration",
"[",
"CONF_WEBHOOK_ID",
"]",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_CONFIG_ENTRIES",
"]",
"[",
"webhook_id",
"]",
"=",
"entry",
"device_registry",
"=",
"await",
"dr",
".",
"async_get_registry",
"(",
"hass",
")",
"device",
"=",
"device_registry",
".",
"async_get_or_create",
"(",
"config_entry_id",
"=",
"entry",
".",
"entry_id",
",",
"identifiers",
"=",
"{",
"(",
"DOMAIN",
",",
"registration",
"[",
"ATTR_DEVICE_ID",
"]",
")",
"}",
",",
"manufacturer",
"=",
"registration",
"[",
"ATTR_MANUFACTURER",
"]",
",",
"model",
"=",
"registration",
"[",
"ATTR_MODEL",
"]",
",",
"name",
"=",
"registration",
"[",
"ATTR_DEVICE_NAME",
"]",
",",
"sw_version",
"=",
"registration",
"[",
"ATTR_OS_VERSION",
"]",
",",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_DEVICES",
"]",
"[",
"webhook_id",
"]",
"=",
"device",
"registration_name",
"=",
"f\"Mobile App: {registration[ATTR_DEVICE_NAME]}\"",
"webhook_register",
"(",
"hass",
",",
"DOMAIN",
",",
"registration_name",
",",
"webhook_id",
",",
"handle_webhook",
")",
"for",
"domain",
"in",
"PLATFORMS",
":",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"entry",
",",
"domain",
")",
")",
"await",
"hass_notify",
".",
"async_reload",
"(",
"hass",
",",
"DOMAIN",
")",
"return",
"True"
] | [
74,
0
] | [
105,
15
] | python | en | ['en', 'da', 'en'] | True |
async_unload_entry | (hass, entry) | Unload a mobile app entry. | Unload a mobile app entry. | async def async_unload_entry(hass, entry):
"""Unload a mobile app entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
webhook_id = entry.data[CONF_WEBHOOK_ID]
webhook_unregister(hass, webhook_id)
del hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id]
await hass_notify.async_reload(hass, DOMAIN)
return True | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
",",
"entry",
")",
":",
"unload_ok",
"=",
"all",
"(",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"[",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"entry",
",",
"component",
")",
"for",
"component",
"in",
"PLATFORMS",
"]",
")",
")",
"if",
"not",
"unload_ok",
":",
"return",
"False",
"webhook_id",
"=",
"entry",
".",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
"webhook_unregister",
"(",
"hass",
",",
"webhook_id",
")",
"del",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_CONFIG_ENTRIES",
"]",
"[",
"webhook_id",
"]",
"await",
"hass_notify",
".",
"async_reload",
"(",
"hass",
",",
"DOMAIN",
")",
"return",
"True"
] | [
108,
0
] | [
127,
15
] | python | en | ['en', 'fr', 'en'] | True |
async_remove_entry | (hass, entry) | Cleanup when entry is removed. | Cleanup when entry is removed. | async def async_remove_entry(hass, entry):
"""Cleanup when entry is removed."""
hass.data[DOMAIN][DATA_DELETED_IDS].append(entry.data[CONF_WEBHOOK_ID])
store = hass.data[DOMAIN][DATA_STORE]
await store.async_save(savable_state(hass))
if CONF_CLOUDHOOK_URL in entry.data:
try:
await cloud.async_delete_cloudhook(hass, entry.data[CONF_WEBHOOK_ID])
except cloud.CloudNotAvailable:
pass | [
"async",
"def",
"async_remove_entry",
"(",
"hass",
",",
"entry",
")",
":",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_DELETED_IDS",
"]",
".",
"append",
"(",
"entry",
".",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
")",
"store",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_STORE",
"]",
"await",
"store",
".",
"async_save",
"(",
"savable_state",
"(",
"hass",
")",
")",
"if",
"CONF_CLOUDHOOK_URL",
"in",
"entry",
".",
"data",
":",
"try",
":",
"await",
"cloud",
".",
"async_delete_cloudhook",
"(",
"hass",
",",
"entry",
".",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
")",
"except",
"cloud",
".",
"CloudNotAvailable",
":",
"pass"
] | [
130,
0
] | [
140,
16
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the Neurio sensor. | Set up the Neurio sensor. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Neurio sensor."""
api_key = config.get(CONF_API_KEY)
api_secret = config.get(CONF_API_SECRET)
sensor_id = config.get(CONF_SENSOR_ID)
data = NeurioData(api_key, api_secret, sensor_id)
@Throttle(MIN_TIME_BETWEEN_DAILY_UPDATES)
def update_daily():
"""Update the daily power usage."""
data.get_daily_usage()
@Throttle(MIN_TIME_BETWEEN_ACTIVE_UPDATES)
def update_active():
"""Update the active power usage."""
data.get_active_power()
update_daily()
update_active()
# Active power sensor
add_entities([NeurioEnergy(data, ACTIVE_NAME, ACTIVE_TYPE, update_active)])
# Daily power sensor
add_entities([NeurioEnergy(data, DAILY_NAME, DAILY_TYPE, update_daily)]) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"api_key",
"=",
"config",
".",
"get",
"(",
"CONF_API_KEY",
")",
"api_secret",
"=",
"config",
".",
"get",
"(",
"CONF_API_SECRET",
")",
"sensor_id",
"=",
"config",
".",
"get",
"(",
"CONF_SENSOR_ID",
")",
"data",
"=",
"NeurioData",
"(",
"api_key",
",",
"api_secret",
",",
"sensor_id",
")",
"@",
"Throttle",
"(",
"MIN_TIME_BETWEEN_DAILY_UPDATES",
")",
"def",
"update_daily",
"(",
")",
":",
"\"\"\"Update the daily power usage.\"\"\"",
"data",
".",
"get_daily_usage",
"(",
")",
"@",
"Throttle",
"(",
"MIN_TIME_BETWEEN_ACTIVE_UPDATES",
")",
"def",
"update_active",
"(",
")",
":",
"\"\"\"Update the active power usage.\"\"\"",
"data",
".",
"get_active_power",
"(",
")",
"update_daily",
"(",
")",
"update_active",
"(",
")",
"# Active power sensor",
"add_entities",
"(",
"[",
"NeurioEnergy",
"(",
"data",
",",
"ACTIVE_NAME",
",",
"ACTIVE_TYPE",
",",
"update_active",
")",
"]",
")",
"# Daily power sensor",
"add_entities",
"(",
"[",
"NeurioEnergy",
"(",
"data",
",",
"DAILY_NAME",
",",
"DAILY_TYPE",
",",
"update_daily",
")",
"]",
")"
] | [
40,
0
] | [
64,
76
] | python | en | ['en', 'pt', 'en'] | True |
NeurioData.__init__ | (self, api_key, api_secret, sensor_id) | Initialize the data. | Initialize the data. | def __init__(self, api_key, api_secret, sensor_id):
"""Initialize the data."""
self.api_key = api_key
self.api_secret = api_secret
self.sensor_id = sensor_id
self._daily_usage = None
self._active_power = None
self._state = None
neurio_tp = neurio.TokenProvider(key=api_key, secret=api_secret)
self.neurio_client = neurio.Client(token_provider=neurio_tp) | [
"def",
"__init__",
"(",
"self",
",",
"api_key",
",",
"api_secret",
",",
"sensor_id",
")",
":",
"self",
".",
"api_key",
"=",
"api_key",
"self",
".",
"api_secret",
"=",
"api_secret",
"self",
".",
"sensor_id",
"=",
"sensor_id",
"self",
".",
"_daily_usage",
"=",
"None",
"self",
".",
"_active_power",
"=",
"None",
"self",
".",
"_state",
"=",
"None",
"neurio_tp",
"=",
"neurio",
".",
"TokenProvider",
"(",
"key",
"=",
"api_key",
",",
"secret",
"=",
"api_secret",
")",
"self",
".",
"neurio_client",
"=",
"neurio",
".",
"Client",
"(",
"token_provider",
"=",
"neurio_tp",
")"
] | [
70,
4
] | [
82,
68
] | python | en | ['en', 'en', 'en'] | True |
NeurioData.daily_usage | (self) | Return latest daily usage value. | Return latest daily usage value. | def daily_usage(self):
"""Return latest daily usage value."""
return self._daily_usage | [
"def",
"daily_usage",
"(",
"self",
")",
":",
"return",
"self",
".",
"_daily_usage"
] | [
85,
4
] | [
87,
32
] | python | en | ['en', 'en', 'en'] | True |
NeurioData.active_power | (self) | Return latest active power value. | Return latest active power value. | def active_power(self):
"""Return latest active power value."""
return self._active_power | [
"def",
"active_power",
"(",
"self",
")",
":",
"return",
"self",
".",
"_active_power"
] | [
90,
4
] | [
92,
33
] | python | en | ['en', 'en', 'en'] | True |
NeurioData.get_active_power | (self) | Return current power value. | Return current power value. | def get_active_power(self):
"""Return current power value."""
try:
sample = self.neurio_client.get_samples_live_last(self.sensor_id)
self._active_power = sample["consumptionPower"]
except (requests.exceptions.RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update current power usage")
return None | [
"def",
"get_active_power",
"(",
"self",
")",
":",
"try",
":",
"sample",
"=",
"self",
".",
"neurio_client",
".",
"get_samples_live_last",
"(",
"self",
".",
"sensor_id",
")",
"self",
".",
"_active_power",
"=",
"sample",
"[",
"\"consumptionPower\"",
"]",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"ValueError",
",",
"KeyError",
")",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not update current power usage\"",
")",
"return",
"None"
] | [
94,
4
] | [
101,
23
] | python | en | ['en', 'la', 'en'] | True |
NeurioData.get_daily_usage | (self) | Return current daily power usage. | Return current daily power usage. | def get_daily_usage(self):
"""Return current daily power usage."""
kwh = 0
start_time = dt_util.start_of_local_day().astimezone(dt_util.UTC).isoformat()
end_time = dt_util.utcnow().isoformat()
_LOGGER.debug("Start: %s, End: %s", start_time, end_time)
try:
history = self.neurio_client.get_samples_stats(
self.sensor_id, start_time, "days", end_time
)
except (requests.exceptions.RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update daily power usage")
return None
for result in history:
kwh += result["consumptionEnergy"] / 3600000
self._daily_usage = round(kwh, 2) | [
"def",
"get_daily_usage",
"(",
"self",
")",
":",
"kwh",
"=",
"0",
"start_time",
"=",
"dt_util",
".",
"start_of_local_day",
"(",
")",
".",
"astimezone",
"(",
"dt_util",
".",
"UTC",
")",
".",
"isoformat",
"(",
")",
"end_time",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
".",
"isoformat",
"(",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Start: %s, End: %s\"",
",",
"start_time",
",",
"end_time",
")",
"try",
":",
"history",
"=",
"self",
".",
"neurio_client",
".",
"get_samples_stats",
"(",
"self",
".",
"sensor_id",
",",
"start_time",
",",
"\"days\"",
",",
"end_time",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"ValueError",
",",
"KeyError",
")",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Could not update daily power usage\"",
")",
"return",
"None",
"for",
"result",
"in",
"history",
":",
"kwh",
"+=",
"result",
"[",
"\"consumptionEnergy\"",
"]",
"/",
"3600000",
"self",
".",
"_daily_usage",
"=",
"round",
"(",
"kwh",
",",
"2",
")"
] | [
103,
4
] | [
122,
41
] | python | en | ['en', 'en', 'en'] | True |
NeurioEnergy.__init__ | (self, data, name, sensor_type, update_call) | Initialize the sensor. | Initialize the sensor. | def __init__(self, data, name, sensor_type, update_call):
"""Initialize the sensor."""
self._name = name
self._data = data
self._sensor_type = sensor_type
self.update_sensor = update_call
self._state = None
if sensor_type == ACTIVE_TYPE:
self._unit_of_measurement = POWER_WATT
elif sensor_type == DAILY_TYPE:
self._unit_of_measurement = ENERGY_KILO_WATT_HOUR | [
"def",
"__init__",
"(",
"self",
",",
"data",
",",
"name",
",",
"sensor_type",
",",
"update_call",
")",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_data",
"=",
"data",
"self",
".",
"_sensor_type",
"=",
"sensor_type",
"self",
".",
"update_sensor",
"=",
"update_call",
"self",
".",
"_state",
"=",
"None",
"if",
"sensor_type",
"==",
"ACTIVE_TYPE",
":",
"self",
".",
"_unit_of_measurement",
"=",
"POWER_WATT",
"elif",
"sensor_type",
"==",
"DAILY_TYPE",
":",
"self",
".",
"_unit_of_measurement",
"=",
"ENERGY_KILO_WATT_HOUR"
] | [
128,
4
] | [
139,
61
] | python | en | ['en', 'en', 'en'] | True |
NeurioEnergy.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
142,
4
] | [
144,
25
] | python | en | ['en', 'mi', 'en'] | True |
NeurioEnergy.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
147,
4
] | [
149,
26
] | python | en | ['en', 'en', 'en'] | True |
NeurioEnergy.unit_of_measurement | (self) | Return the unit of measurement of this entity, if any. | Return the unit of measurement of this entity, if any. | def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unit_of_measurement"
] | [
152,
4
] | [
154,
40
] | python | en | ['en', 'en', 'en'] | True |
NeurioEnergy.icon | (self) | Icon to use in the frontend, if any. | Icon to use in the frontend, if any. | def icon(self):
"""Icon to use in the frontend, if any."""
return ICON | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"ICON"
] | [
157,
4
] | [
159,
19
] | python | en | ['en', 'en', 'en'] | True |
NeurioEnergy.update | (self) | Get the latest data, update state. | Get the latest data, update state. | def update(self):
"""Get the latest data, update state."""
self.update_sensor()
if self._sensor_type == ACTIVE_TYPE:
self._state = self._data.active_power
elif self._sensor_type == DAILY_TYPE:
self._state = self._data.daily_usage | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"update_sensor",
"(",
")",
"if",
"self",
".",
"_sensor_type",
"==",
"ACTIVE_TYPE",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_data",
".",
"active_power",
"elif",
"self",
".",
"_sensor_type",
"==",
"DAILY_TYPE",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_data",
".",
"daily_usage"
] | [
161,
4
] | [
168,
48
] | python | en | ['en', 'en', 'en'] | True |
test_service_setup | (hass) | Verify service setup works. | Verify service setup works. | async def test_service_setup(hass):
"""Verify service setup works."""
assert DECONZ_SERVICES not in hass.data
with patch(
"homeassistant.core.ServiceRegistry.async_register", return_value=Mock(True)
) as async_register:
await async_setup_services(hass)
assert hass.data[DECONZ_SERVICES] is True
assert async_register.call_count == 3 | [
"async",
"def",
"test_service_setup",
"(",
"hass",
")",
":",
"assert",
"DECONZ_SERVICES",
"not",
"in",
"hass",
".",
"data",
"with",
"patch",
"(",
"\"homeassistant.core.ServiceRegistry.async_register\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"async_register",
":",
"await",
"async_setup_services",
"(",
"hass",
")",
"assert",
"hass",
".",
"data",
"[",
"DECONZ_SERVICES",
"]",
"is",
"True",
"assert",
"async_register",
".",
"call_count",
"==",
"3"
] | [
75,
0
] | [
83,
45
] | python | en | ['en', 'cs', 'en'] | True |
test_service_setup_already_registered | (hass) | Make sure that services are only registered once. | Make sure that services are only registered once. | async def test_service_setup_already_registered(hass):
"""Make sure that services are only registered once."""
hass.data[DECONZ_SERVICES] = True
with patch(
"homeassistant.core.ServiceRegistry.async_register", return_value=Mock(True)
) as async_register:
await async_setup_services(hass)
async_register.assert_not_called() | [
"async",
"def",
"test_service_setup_already_registered",
"(",
"hass",
")",
":",
"hass",
".",
"data",
"[",
"DECONZ_SERVICES",
"]",
"=",
"True",
"with",
"patch",
"(",
"\"homeassistant.core.ServiceRegistry.async_register\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"async_register",
":",
"await",
"async_setup_services",
"(",
"hass",
")",
"async_register",
".",
"assert_not_called",
"(",
")"
] | [
86,
0
] | [
93,
42
] | python | en | ['en', 'en', 'en'] | True |
test_service_unload | (hass) | Verify service unload works. | Verify service unload works. | async def test_service_unload(hass):
"""Verify service unload works."""
hass.data[DECONZ_SERVICES] = True
with patch(
"homeassistant.core.ServiceRegistry.async_remove", return_value=Mock(True)
) as async_remove:
await async_unload_services(hass)
assert hass.data[DECONZ_SERVICES] is False
assert async_remove.call_count == 3 | [
"async",
"def",
"test_service_unload",
"(",
"hass",
")",
":",
"hass",
".",
"data",
"[",
"DECONZ_SERVICES",
"]",
"=",
"True",
"with",
"patch",
"(",
"\"homeassistant.core.ServiceRegistry.async_remove\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"async_remove",
":",
"await",
"async_unload_services",
"(",
"hass",
")",
"assert",
"hass",
".",
"data",
"[",
"DECONZ_SERVICES",
"]",
"is",
"False",
"assert",
"async_remove",
".",
"call_count",
"==",
"3"
] | [
96,
0
] | [
104,
43
] | python | en | ['en', 'fr', 'en'] | True |
test_service_unload_not_registered | (hass) | Make sure that services can only be unloaded once. | Make sure that services can only be unloaded once. | async def test_service_unload_not_registered(hass):
"""Make sure that services can only be unloaded once."""
with patch(
"homeassistant.core.ServiceRegistry.async_remove", return_value=Mock(True)
) as async_remove:
await async_unload_services(hass)
assert DECONZ_SERVICES not in hass.data
async_remove.assert_not_called() | [
"async",
"def",
"test_service_unload_not_registered",
"(",
"hass",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.core.ServiceRegistry.async_remove\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"async_remove",
":",
"await",
"async_unload_services",
"(",
"hass",
")",
"assert",
"DECONZ_SERVICES",
"not",
"in",
"hass",
".",
"data",
"async_remove",
".",
"assert_not_called",
"(",
")"
] | [
107,
0
] | [
114,
40
] | python | en | ['en', 'en', 'en'] | True |
test_configure_service_with_field | (hass) | Test that service invokes pydeconz with the correct path and data. | Test that service invokes pydeconz with the correct path and data. | async def test_configure_service_with_field(hass):
"""Test that service invokes pydeconz with the correct path and data."""
await setup_deconz_integration(hass)
data = {
SERVICE_FIELD: "/light/2",
CONF_BRIDGE_ID: BRIDGEID,
SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20},
}
with patch("pydeconz.DeconzSession.request", return_value=Mock(True)) as put_state:
await hass.services.async_call(
DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data
)
await hass.async_block_till_done()
put_state.assert_called_with(
"put", "/light/2", json={"on": True, "attr1": 10, "attr2": 20}
) | [
"async",
"def",
"test_configure_service_with_field",
"(",
"hass",
")",
":",
"await",
"setup_deconz_integration",
"(",
"hass",
")",
"data",
"=",
"{",
"SERVICE_FIELD",
":",
"\"/light/2\"",
",",
"CONF_BRIDGE_ID",
":",
"BRIDGEID",
",",
"SERVICE_DATA",
":",
"{",
"\"on\"",
":",
"True",
",",
"\"attr1\"",
":",
"10",
",",
"\"attr2\"",
":",
"20",
"}",
",",
"}",
"with",
"patch",
"(",
"\"pydeconz.DeconzSession.request\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"put_state",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_CONFIGURE_DEVICE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"put_state",
".",
"assert_called_with",
"(",
"\"put\"",
",",
"\"/light/2\"",
",",
"json",
"=",
"{",
"\"on\"",
":",
"True",
",",
"\"attr1\"",
":",
"10",
",",
"\"attr2\"",
":",
"20",
"}",
")"
] | [
117,
0
] | [
134,
9
] | python | en | ['en', 'en', 'en'] | True |
test_configure_service_with_entity | (hass) | Test that service invokes pydeconz with the correct path and data. | Test that service invokes pydeconz with the correct path and data. | async def test_configure_service_with_entity(hass):
"""Test that service invokes pydeconz with the correct path and data."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.deconz_ids["light.test"] = "/light/1"
data = {
SERVICE_ENTITY: "light.test",
SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20},
}
with patch("pydeconz.DeconzSession.request", return_value=Mock(True)) as put_state:
await hass.services.async_call(
DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data
)
await hass.async_block_till_done()
put_state.assert_called_with(
"put", "/light/1", json={"on": True, "attr1": 10, "attr2": 20}
) | [
"async",
"def",
"test_configure_service_with_entity",
"(",
"hass",
")",
":",
"config_entry",
"=",
"await",
"setup_deconz_integration",
"(",
"hass",
")",
"gateway",
"=",
"get_gateway_from_config_entry",
"(",
"hass",
",",
"config_entry",
")",
"gateway",
".",
"deconz_ids",
"[",
"\"light.test\"",
"]",
"=",
"\"/light/1\"",
"data",
"=",
"{",
"SERVICE_ENTITY",
":",
"\"light.test\"",
",",
"SERVICE_DATA",
":",
"{",
"\"on\"",
":",
"True",
",",
"\"attr1\"",
":",
"10",
",",
"\"attr2\"",
":",
"20",
"}",
",",
"}",
"with",
"patch",
"(",
"\"pydeconz.DeconzSession.request\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"put_state",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_CONFIGURE_DEVICE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"put_state",
".",
"assert_called_with",
"(",
"\"put\"",
",",
"\"/light/1\"",
",",
"json",
"=",
"{",
"\"on\"",
":",
"True",
",",
"\"attr1\"",
":",
"10",
",",
"\"attr2\"",
":",
"20",
"}",
")"
] | [
137,
0
] | [
155,
9
] | python | en | ['en', 'en', 'en'] | True |
test_configure_service_with_entity_and_field | (hass) | Test that service invokes pydeconz with the correct path and data. | Test that service invokes pydeconz with the correct path and data. | async def test_configure_service_with_entity_and_field(hass):
"""Test that service invokes pydeconz with the correct path and data."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.deconz_ids["light.test"] = "/light/1"
data = {
SERVICE_ENTITY: "light.test",
SERVICE_FIELD: "/state",
SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20},
}
with patch("pydeconz.DeconzSession.request", return_value=Mock(True)) as put_state:
await hass.services.async_call(
DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data
)
await hass.async_block_till_done()
put_state.assert_called_with(
"put", "/light/1/state", json={"on": True, "attr1": 10, "attr2": 20}
) | [
"async",
"def",
"test_configure_service_with_entity_and_field",
"(",
"hass",
")",
":",
"config_entry",
"=",
"await",
"setup_deconz_integration",
"(",
"hass",
")",
"gateway",
"=",
"get_gateway_from_config_entry",
"(",
"hass",
",",
"config_entry",
")",
"gateway",
".",
"deconz_ids",
"[",
"\"light.test\"",
"]",
"=",
"\"/light/1\"",
"data",
"=",
"{",
"SERVICE_ENTITY",
":",
"\"light.test\"",
",",
"SERVICE_FIELD",
":",
"\"/state\"",
",",
"SERVICE_DATA",
":",
"{",
"\"on\"",
":",
"True",
",",
"\"attr1\"",
":",
"10",
",",
"\"attr2\"",
":",
"20",
"}",
",",
"}",
"with",
"patch",
"(",
"\"pydeconz.DeconzSession.request\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"put_state",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_CONFIGURE_DEVICE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"put_state",
".",
"assert_called_with",
"(",
"\"put\"",
",",
"\"/light/1/state\"",
",",
"json",
"=",
"{",
"\"on\"",
":",
"True",
",",
"\"attr1\"",
":",
"10",
",",
"\"attr2\"",
":",
"20",
"}",
")"
] | [
158,
0
] | [
177,
9
] | python | en | ['en', 'en', 'en'] | True |
test_configure_service_with_faulty_field | (hass) | Test that service invokes pydeconz with the correct path and data. | Test that service invokes pydeconz with the correct path and data. | async def test_configure_service_with_faulty_field(hass):
"""Test that service invokes pydeconz with the correct path and data."""
await setup_deconz_integration(hass)
data = {SERVICE_FIELD: "light/2", SERVICE_DATA: {}}
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data
)
await hass.async_block_till_done() | [
"async",
"def",
"test_configure_service_with_faulty_field",
"(",
"hass",
")",
":",
"await",
"setup_deconz_integration",
"(",
"hass",
")",
"data",
"=",
"{",
"SERVICE_FIELD",
":",
"\"light/2\"",
",",
"SERVICE_DATA",
":",
"{",
"}",
"}",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"Invalid",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_CONFIGURE_DEVICE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")"
] | [
180,
0
] | [
190,
42
] | python | en | ['en', 'en', 'en'] | True |
test_configure_service_with_faulty_entity | (hass) | Test that service invokes pydeconz with the correct path and data. | Test that service invokes pydeconz with the correct path and data. | async def test_configure_service_with_faulty_entity(hass):
"""Test that service invokes pydeconz with the correct path and data."""
await setup_deconz_integration(hass)
data = {
SERVICE_ENTITY: "light.nonexisting",
SERVICE_DATA: {},
}
with patch("pydeconz.DeconzSession.request", return_value=Mock(True)) as put_state:
await hass.services.async_call(
DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data
)
await hass.async_block_till_done()
put_state.assert_not_called() | [
"async",
"def",
"test_configure_service_with_faulty_entity",
"(",
"hass",
")",
":",
"await",
"setup_deconz_integration",
"(",
"hass",
")",
"data",
"=",
"{",
"SERVICE_ENTITY",
":",
"\"light.nonexisting\"",
",",
"SERVICE_DATA",
":",
"{",
"}",
",",
"}",
"with",
"patch",
"(",
"\"pydeconz.DeconzSession.request\"",
",",
"return_value",
"=",
"Mock",
"(",
"True",
")",
")",
"as",
"put_state",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_CONFIGURE_DEVICE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"put_state",
".",
"assert_not_called",
"(",
")"
] | [
193,
0
] | [
207,
37
] | python | en | ['en', 'en', 'en'] | True |
test_service_refresh_devices | (hass) | Test that service can refresh devices. | Test that service can refresh devices. | async def test_service_refresh_devices(hass):
"""Test that service can refresh devices."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
data = {CONF_BRIDGE_ID: BRIDGEID}
with patch(
"pydeconz.DeconzSession.request",
return_value={"groups": GROUP, "lights": LIGHT, "sensors": SENSOR},
):
await hass.services.async_call(
DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data=data
)
await hass.async_block_till_done()
assert gateway.deconz_ids == {
"light.group_1_name": "/groups/1",
"light.light_1_name": "/lights/1",
"scene.group_1_name_scene_1": "/groups/1/scenes/1",
"sensor.sensor_1_name": "/sensors/1",
} | [
"async",
"def",
"test_service_refresh_devices",
"(",
"hass",
")",
":",
"config_entry",
"=",
"await",
"setup_deconz_integration",
"(",
"hass",
")",
"gateway",
"=",
"get_gateway_from_config_entry",
"(",
"hass",
",",
"config_entry",
")",
"data",
"=",
"{",
"CONF_BRIDGE_ID",
":",
"BRIDGEID",
"}",
"with",
"patch",
"(",
"\"pydeconz.DeconzSession.request\"",
",",
"return_value",
"=",
"{",
"\"groups\"",
":",
"GROUP",
",",
"\"lights\"",
":",
"LIGHT",
",",
"\"sensors\"",
":",
"SENSOR",
"}",
",",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_DEVICE_REFRESH",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"gateway",
".",
"deconz_ids",
"==",
"{",
"\"light.group_1_name\"",
":",
"\"/groups/1\"",
",",
"\"light.light_1_name\"",
":",
"\"/lights/1\"",
",",
"\"scene.group_1_name_scene_1\"",
":",
"\"/groups/1/scenes/1\"",
",",
"\"sensor.sensor_1_name\"",
":",
"\"/sensors/1\"",
",",
"}"
] | [
210,
0
] | [
231,
5
] | python | en | ['en', 'en', 'en'] | True |
test_remove_orphaned_entries_service | (hass) | Test service works and also don't remove more than expected. | Test service works and also don't remove more than expected. | async def test_remove_orphaned_entries_service(hass):
"""Test service works and also don't remove more than expected."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["lights"] = deepcopy(LIGHT)
data["sensors"] = deepcopy(SWITCH)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
data = {CONF_BRIDGE_ID: BRIDGEID}
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id, identifiers={("mac", "123")}
)
assert (
len(
[
entry
for entry in device_registry.devices.values()
if config_entry.entry_id in entry.config_entries
]
)
== 5 # Host, gateway, light, switch and orphan
)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entity_registry.async_get_or_create(
SENSOR_DOMAIN,
DECONZ_DOMAIN,
"12345",
suggested_object_id="Orphaned sensor",
config_entry=config_entry,
device_id=device.id,
)
assert (
len(async_entries_for_config_entry(entity_registry, config_entry.entry_id))
== 3 # Light, switch battery and orphan
)
await hass.services.async_call(
DECONZ_DOMAIN,
SERVICE_REMOVE_ORPHANED_ENTRIES,
service_data=data,
)
await hass.async_block_till_done()
assert (
len(
[
entry
for entry in device_registry.devices.values()
if config_entry.entry_id in entry.config_entries
]
)
== 4 # Host, gateway, light and switch
)
assert (
len(async_entries_for_config_entry(entity_registry, config_entry.entry_id))
== 2 # Light and switch battery
) | [
"async",
"def",
"test_remove_orphaned_entries_service",
"(",
"hass",
")",
":",
"data",
"=",
"deepcopy",
"(",
"DECONZ_WEB_REQUEST",
")",
"data",
"[",
"\"lights\"",
"]",
"=",
"deepcopy",
"(",
"LIGHT",
")",
"data",
"[",
"\"sensors\"",
"]",
"=",
"deepcopy",
"(",
"SWITCH",
")",
"config_entry",
"=",
"await",
"setup_deconz_integration",
"(",
"hass",
",",
"get_state_response",
"=",
"data",
")",
"data",
"=",
"{",
"CONF_BRIDGE_ID",
":",
"BRIDGEID",
"}",
"device_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"device_registry",
".",
"async_get_registry",
"(",
")",
"device",
"=",
"device_registry",
".",
"async_get_or_create",
"(",
"config_entry_id",
"=",
"config_entry",
".",
"entry_id",
",",
"identifiers",
"=",
"{",
"(",
"\"mac\"",
",",
"\"123\"",
")",
"}",
")",
"assert",
"(",
"len",
"(",
"[",
"entry",
"for",
"entry",
"in",
"device_registry",
".",
"devices",
".",
"values",
"(",
")",
"if",
"config_entry",
".",
"entry_id",
"in",
"entry",
".",
"config_entries",
"]",
")",
"==",
"5",
"# Host, gateway, light, switch and orphan",
")",
"entity_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"entity_registry",
".",
"async_get_registry",
"(",
")",
"entity_registry",
".",
"async_get_or_create",
"(",
"SENSOR_DOMAIN",
",",
"DECONZ_DOMAIN",
",",
"\"12345\"",
",",
"suggested_object_id",
"=",
"\"Orphaned sensor\"",
",",
"config_entry",
"=",
"config_entry",
",",
"device_id",
"=",
"device",
".",
"id",
",",
")",
"assert",
"(",
"len",
"(",
"async_entries_for_config_entry",
"(",
"entity_registry",
",",
"config_entry",
".",
"entry_id",
")",
")",
"==",
"3",
"# Light, switch battery and orphan",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DECONZ_DOMAIN",
",",
"SERVICE_REMOVE_ORPHANED_ENTRIES",
",",
"service_data",
"=",
"data",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"(",
"len",
"(",
"[",
"entry",
"for",
"entry",
"in",
"device_registry",
".",
"devices",
".",
"values",
"(",
")",
"if",
"config_entry",
".",
"entry_id",
"in",
"entry",
".",
"config_entries",
"]",
")",
"==",
"4",
"# Host, gateway, light and switch",
")",
"assert",
"(",
"len",
"(",
"async_entries_for_config_entry",
"(",
"entity_registry",
",",
"config_entry",
".",
"entry_id",
")",
")",
"==",
"2",
"# Light and switch battery",
")"
] | [
234,
0
] | [
295,
5
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up Fritz!Box call monitor sensor platform. | Set up Fritz!Box call monitor sensor platform. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Fritz!Box call monitor sensor platform."""
name = config[CONF_NAME]
host = config[CONF_HOST]
# Try to resolve a hostname; if it is already an IP, it will be returned as-is
try:
host = socket.gethostbyname(host)
except OSError:
_LOGGER.error("Could not resolve hostname %s", host)
return
port = config[CONF_PORT]
username = config[CONF_USERNAME]
password = config.get(CONF_PASSWORD)
phonebook_id = config[CONF_PHONEBOOK]
prefixes = config[CONF_PREFIXES]
try:
phonebook = FritzBoxPhonebook(
host=host,
port=port,
username=username,
password=password,
phonebook_id=phonebook_id,
prefixes=prefixes,
)
except: # noqa: E722 pylint: disable=bare-except
phonebook = None
_LOGGER.warning("Phonebook with ID %s not found on Fritz!Box", phonebook_id)
sensor = FritzBoxCallSensor(name=name, phonebook=phonebook)
add_entities([sensor])
monitor = FritzBoxCallMonitor(host=host, port=port, sensor=sensor)
monitor.connect()
def _stop_listener(_event):
monitor.stopped.set()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_listener)
return monitor.sock is not None | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"name",
"=",
"config",
"[",
"CONF_NAME",
"]",
"host",
"=",
"config",
"[",
"CONF_HOST",
"]",
"# Try to resolve a hostname; if it is already an IP, it will be returned as-is",
"try",
":",
"host",
"=",
"socket",
".",
"gethostbyname",
"(",
"host",
")",
"except",
"OSError",
":",
"_LOGGER",
".",
"error",
"(",
"\"Could not resolve hostname %s\"",
",",
"host",
")",
"return",
"port",
"=",
"config",
"[",
"CONF_PORT",
"]",
"username",
"=",
"config",
"[",
"CONF_USERNAME",
"]",
"password",
"=",
"config",
".",
"get",
"(",
"CONF_PASSWORD",
")",
"phonebook_id",
"=",
"config",
"[",
"CONF_PHONEBOOK",
"]",
"prefixes",
"=",
"config",
"[",
"CONF_PREFIXES",
"]",
"try",
":",
"phonebook",
"=",
"FritzBoxPhonebook",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"port",
",",
"username",
"=",
"username",
",",
"password",
"=",
"password",
",",
"phonebook_id",
"=",
"phonebook_id",
",",
"prefixes",
"=",
"prefixes",
",",
")",
"except",
":",
"# noqa: E722 pylint: disable=bare-except",
"phonebook",
"=",
"None",
"_LOGGER",
".",
"warning",
"(",
"\"Phonebook with ID %s not found on Fritz!Box\"",
",",
"phonebook_id",
")",
"sensor",
"=",
"FritzBoxCallSensor",
"(",
"name",
"=",
"name",
",",
"phonebook",
"=",
"phonebook",
")",
"add_entities",
"(",
"[",
"sensor",
"]",
")",
"monitor",
"=",
"FritzBoxCallMonitor",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"port",
",",
"sensor",
"=",
"sensor",
")",
"monitor",
".",
"connect",
"(",
")",
"def",
"_stop_listener",
"(",
"_event",
")",
":",
"monitor",
".",
"stopped",
".",
"set",
"(",
")",
"hass",
".",
"bus",
".",
"listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"_stop_listener",
")",
"return",
"monitor",
".",
"sock",
"is",
"not",
"None"
] | [
60,
0
] | [
101,
35
] | python | en | ['en', 'fil', 'en'] | True |
FritzBoxCallSensor.__init__ | (self, name, phonebook) | Initialize the sensor. | Initialize the sensor. | def __init__(self, name, phonebook):
"""Initialize the sensor."""
self._state = VALUE_DEFAULT
self._attributes = {}
self._name = name
self.phonebook = phonebook | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"phonebook",
")",
":",
"self",
".",
"_state",
"=",
"VALUE_DEFAULT",
"self",
".",
"_attributes",
"=",
"{",
"}",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"phonebook",
"=",
"phonebook"
] | [
107,
4
] | [
112,
34
] | python | en | ['en', 'en', 'en'] | True |
FritzBoxCallSensor.set_state | (self, state) | Set the state. | Set the state. | def set_state(self, state):
"""Set the state."""
self._state = state | [
"def",
"set_state",
"(",
"self",
",",
"state",
")",
":",
"self",
".",
"_state",
"=",
"state"
] | [
114,
4
] | [
116,
27
] | python | en | ['en', 'en', 'en'] | True |
FritzBoxCallSensor.set_attributes | (self, attributes) | Set the state attributes. | Set the state attributes. | def set_attributes(self, attributes):
"""Set the state attributes."""
self._attributes = attributes | [
"def",
"set_attributes",
"(",
"self",
",",
"attributes",
")",
":",
"self",
".",
"_attributes",
"=",
"attributes"
] | [
118,
4
] | [
120,
37
] | python | en | ['en', 'en', 'en'] | True |
FritzBoxCallSensor.should_poll | (self) | Only poll to update phonebook, if defined. | Only poll to update phonebook, if defined. | def should_poll(self):
"""Only poll to update phonebook, if defined."""
return self.phonebook is not None | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"self",
".",
"phonebook",
"is",
"not",
"None"
] | [
123,
4
] | [
125,
41
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.