Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
AtenSwitch.device_class
(self)
Return the class of this device, from component DEVICE_CLASSES.
Return the class of this device, from component DEVICE_CLASSES.
def device_class(self) -> str: """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_OUTLET
[ "def", "device_class", "(", "self", ")", "->", "str", ":", "return", "DEVICE_CLASS_OUTLET" ]
[ 89, 4 ]
[ 91, 34 ]
python
en
['en', 'en', 'en']
True
AtenSwitch.is_on
(self)
Return True if entity is on.
Return True if entity is on.
def is_on(self) -> bool: """Return True if entity is on.""" return self._enabled
[ "def", "is_on", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "_enabled" ]
[ 94, 4 ]
[ 96, 28 ]
python
en
['en', 'cy', 'en']
True
AtenSwitch.current_power_w
(self)
Return the current power usage in W.
Return the current power usage in W.
def current_power_w(self) -> float: """Return the current power usage in W.""" return self._outlet_power
[ "def", "current_power_w", "(", "self", ")", "->", "float", ":", "return", "self", ".", "_outlet_power" ]
[ 99, 4 ]
[ 101, 33 ]
python
en
['en', 'en', 'en']
True
AtenSwitch.async_turn_on
(self, **kwargs)
Turn the switch on.
Turn the switch on.
async def async_turn_on(self, **kwargs): """Turn the switch on.""" await self._device.setOutletStatus(self._outlet, "on") self._enabled = True
[ "async", "def", "async_turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "await", "self", ".", "_device", ".", "setOutletStatus", "(", "self", ".", "_outlet", ",", "\"on\"", ")", "self", ".", "_enabled", "=", "True" ]
[ 103, 4 ]
[ 106, 28 ]
python
en
['en', 'en', 'en']
True
AtenSwitch.async_turn_off
(self, **kwargs)
Turn the switch off.
Turn the switch off.
async def async_turn_off(self, **kwargs): """Turn the switch off.""" await self._device.setOutletStatus(self._outlet, "off") self._enabled = False
[ "async", "def", "async_turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "await", "self", ".", "_device", ".", "setOutletStatus", "(", "self", ".", "_outlet", ",", "\"off\"", ")", "self", ".", "_enabled", "=", "False" ]
[ 108, 4 ]
[ 111, 29 ]
python
en
['en', 'en', 'en']
True
AtenSwitch.async_update
(self)
Process update from entity.
Process update from entity.
async def async_update(self): """Process update from entity.""" status = await self._device.displayOutletStatus(self._outlet) if status == "on": self._enabled = True self._outlet_power = await self._device.outletPower(self._outlet) elif status == "off": self._enabled = False self._outlet_power = 0.0
[ "async", "def", "async_update", "(", "self", ")", ":", "status", "=", "await", "self", ".", "_device", ".", "displayOutletStatus", "(", "self", ".", "_outlet", ")", "if", "status", "==", "\"on\"", ":", "self", ".", "_enabled", "=", "True", "self", ".", "_outlet_power", "=", "await", "self", ".", "_device", ".", "outletPower", "(", "self", ".", "_outlet", ")", "elif", "status", "==", "\"off\"", ":", "self", ".", "_enabled", "=", "False", "self", ".", "_outlet_power", "=", "0.0" ]
[ 113, 4 ]
[ 121, 36 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow.__init__
(self)
Set up flow instance.
Set up flow instance.
def __init__(self): """Set up flow instance.""" self.addon_config = None self.network_key = None self.usb_path = None self.use_addon = False # If we install the add-on we should uninstall it on entry remove. self.integration_created_addon = False
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "addon_config", "=", "None", "self", ".", "network_key", "=", "None", "self", ".", "usb_path", "=", "None", "self", ".", "use_addon", "=", "False", "# If we install the add-on we should uninstall it on entry remove.", "self", ".", "integration_created_addon", "=", "False" ]
[ 30, 4 ]
[ 37, 46 ]
python
en
['en', 'da', 'en']
True
DomainConfigFlow.async_step_user
(self, user_input=None)
Handle the initial step.
Handle the initial step.
async def async_step_user(self, user_input=None): """Handle the initial step.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") # Currently all flow results need the MQTT integration. # This will change when we have the direct MQTT client connection. # When that is implemented, move this check to _async_use_mqtt_integration. if "mqtt" not in self.hass.config.components: return self.async_abort(reason="mqtt_required") if not self.hass.components.hassio.is_hassio(): return self._async_use_mqtt_integration() return await self.async_step_on_supervisor()
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "self", ".", "_async_current_entries", "(", ")", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"single_instance_allowed\"", ")", "# Currently all flow results need the MQTT integration.", "# This will change when we have the direct MQTT client connection.", "# When that is implemented, move this check to _async_use_mqtt_integration.", "if", "\"mqtt\"", "not", "in", "self", ".", "hass", ".", "config", ".", "components", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"mqtt_required\"", ")", "if", "not", "self", ".", "hass", ".", "components", ".", "hassio", ".", "is_hassio", "(", ")", ":", "return", "self", ".", "_async_use_mqtt_integration", "(", ")", "return", "await", "self", ".", "async_step_on_supervisor", "(", ")" ]
[ 39, 4 ]
[ 53, 52 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow._async_create_entry_from_vars
(self)
Return a config entry for the flow.
Return a config entry for the flow.
def _async_create_entry_from_vars(self): """Return a config entry for the flow.""" return self.async_create_entry( title=TITLE, data={ CONF_USB_PATH: self.usb_path, CONF_NETWORK_KEY: self.network_key, CONF_USE_ADDON: self.use_addon, CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon, }, )
[ "def", "_async_create_entry_from_vars", "(", "self", ")", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "TITLE", ",", "data", "=", "{", "CONF_USB_PATH", ":", "self", ".", "usb_path", ",", "CONF_NETWORK_KEY", ":", "self", ".", "network_key", ",", "CONF_USE_ADDON", ":", "self", ".", "use_addon", ",", "CONF_INTEGRATION_CREATED_ADDON", ":", "self", ".", "integration_created_addon", ",", "}", ",", ")" ]
[ 55, 4 ]
[ 65, 9 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow._async_use_mqtt_integration
(self)
Handle logic when using the MQTT integration. This is the entry point for the logic that is needed when this integration will depend on the MQTT integration.
Handle logic when using the MQTT integration.
def _async_use_mqtt_integration(self): """Handle logic when using the MQTT integration. This is the entry point for the logic that is needed when this integration will depend on the MQTT integration. """ return self._async_create_entry_from_vars()
[ "def", "_async_use_mqtt_integration", "(", "self", ")", ":", "return", "self", ".", "_async_create_entry_from_vars", "(", ")" ]
[ 68, 4 ]
[ 74, 51 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow.async_step_on_supervisor
(self, user_input=None)
Handle logic when on Supervisor host.
Handle logic when on Supervisor host.
async def async_step_on_supervisor(self, user_input=None): """Handle logic when on Supervisor host.""" if user_input is None: return self.async_show_form( step_id="on_supervisor", data_schema=ON_SUPERVISOR_SCHEMA ) if not user_input[CONF_USE_ADDON]: return self._async_create_entry_from_vars() self.use_addon = True if await self._async_is_addon_running(): return self._async_create_entry_from_vars() if await self._async_is_addon_installed(): return await self.async_step_start_addon() return await self.async_step_install_addon()
[ "async", "def", "async_step_on_supervisor", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "None", ":", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"on_supervisor\"", ",", "data_schema", "=", "ON_SUPERVISOR_SCHEMA", ")", "if", "not", "user_input", "[", "CONF_USE_ADDON", "]", ":", "return", "self", ".", "_async_create_entry_from_vars", "(", ")", "self", ".", "use_addon", "=", "True", "if", "await", "self", ".", "_async_is_addon_running", "(", ")", ":", "return", "self", ".", "_async_create_entry_from_vars", "(", ")", "if", "await", "self", ".", "_async_is_addon_installed", "(", ")", ":", "return", "await", "self", ".", "async_step_start_addon", "(", ")", "return", "await", "self", ".", "async_step_install_addon", "(", ")" ]
[ 76, 4 ]
[ 93, 52 ]
python
en
['en', 'no', 'en']
True
DomainConfigFlow.async_step_install_addon
(self)
Install OpenZWave add-on.
Install OpenZWave add-on.
async def async_step_install_addon(self): """Install OpenZWave add-on.""" try: await self.hass.components.hassio.async_install_addon("core_zwave") except self.hass.components.hassio.HassioAPIError as err: _LOGGER.error("Failed to install OpenZWave add-on: %s", err) return self.async_abort(reason="addon_install_failed") self.integration_created_addon = True return await self.async_step_start_addon()
[ "async", "def", "async_step_install_addon", "(", "self", ")", ":", "try", ":", "await", "self", ".", "hass", ".", "components", ".", "hassio", ".", "async_install_addon", "(", "\"core_zwave\"", ")", "except", "self", ".", "hass", ".", "components", ".", "hassio", ".", "HassioAPIError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to install OpenZWave add-on: %s\"", ",", "err", ")", "return", "self", ".", "async_abort", "(", "reason", "=", "\"addon_install_failed\"", ")", "self", ".", "integration_created_addon", "=", "True", "return", "await", "self", ".", "async_step_start_addon", "(", ")" ]
[ 95, 4 ]
[ 104, 50 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow.async_step_start_addon
(self, user_input=None)
Ask for config and start OpenZWave add-on.
Ask for config and start OpenZWave add-on.
async def async_step_start_addon(self, user_input=None): """Ask for config and start OpenZWave add-on.""" if self.addon_config is None: self.addon_config = await self._async_get_addon_config() errors = {} if user_input is not None: self.network_key = user_input[CONF_NETWORK_KEY] self.usb_path = user_input[CONF_USB_PATH] new_addon_config = {CONF_ADDON_DEVICE: self.usb_path} if self.network_key: new_addon_config[CONF_ADDON_NETWORK_KEY] = self.network_key if new_addon_config != self.addon_config: await self._async_set_addon_config(new_addon_config) try: await self.hass.components.hassio.async_start_addon("core_zwave") except self.hass.components.hassio.HassioAPIError as err: _LOGGER.error("Failed to start OpenZWave add-on: %s", err) errors["base"] = "addon_start_failed" else: return self._async_create_entry_from_vars() self.usb_path = self.addon_config.get(CONF_ADDON_DEVICE, "") self.network_key = self.addon_config.get(CONF_ADDON_NETWORK_KEY, "") data_schema = vol.Schema( { vol.Required(CONF_USB_PATH, default=self.usb_path): str, vol.Optional(CONF_NETWORK_KEY, default=self.network_key): str, } ) return self.async_show_form( step_id="start_addon", data_schema=data_schema, errors=errors )
[ "async", "def", "async_step_start_addon", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "self", ".", "addon_config", "is", "None", ":", "self", ".", "addon_config", "=", "await", "self", ".", "_async_get_addon_config", "(", ")", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "self", ".", "network_key", "=", "user_input", "[", "CONF_NETWORK_KEY", "]", "self", ".", "usb_path", "=", "user_input", "[", "CONF_USB_PATH", "]", "new_addon_config", "=", "{", "CONF_ADDON_DEVICE", ":", "self", ".", "usb_path", "}", "if", "self", ".", "network_key", ":", "new_addon_config", "[", "CONF_ADDON_NETWORK_KEY", "]", "=", "self", ".", "network_key", "if", "new_addon_config", "!=", "self", ".", "addon_config", ":", "await", "self", ".", "_async_set_addon_config", "(", "new_addon_config", ")", "try", ":", "await", "self", ".", "hass", ".", "components", ".", "hassio", ".", "async_start_addon", "(", "\"core_zwave\"", ")", "except", "self", ".", "hass", ".", "components", ".", "hassio", ".", "HassioAPIError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to start OpenZWave add-on: %s\"", ",", "err", ")", "errors", "[", "\"base\"", "]", "=", "\"addon_start_failed\"", "else", ":", "return", "self", ".", "_async_create_entry_from_vars", "(", ")", "self", ".", "usb_path", "=", "self", ".", "addon_config", ".", "get", "(", "CONF_ADDON_DEVICE", ",", "\"\"", ")", "self", ".", "network_key", "=", "self", ".", "addon_config", ".", "get", "(", "CONF_ADDON_NETWORK_KEY", ",", "\"\"", ")", "data_schema", "=", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_USB_PATH", ",", "default", "=", "self", ".", "usb_path", ")", ":", "str", ",", "vol", ".", "Optional", "(", "CONF_NETWORK_KEY", ",", "default", "=", "self", ".", "network_key", ")", ":", "str", ",", "}", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"start_addon\"", ",", "data_schema", "=", "data_schema", ",", "errors", "=", "errors", ")" ]
[ 106, 4 ]
[ 144, 9 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow._async_get_addon_info
(self)
Return and cache OpenZWave add-on info.
Return and cache OpenZWave add-on info.
async def _async_get_addon_info(self): """Return and cache OpenZWave add-on info.""" try: addon_info = await self.hass.components.hassio.async_get_addon_info( "core_zwave" ) except self.hass.components.hassio.HassioAPIError as err: _LOGGER.error("Failed to get OpenZWave add-on info: %s", err) raise AbortFlow("addon_info_failed") from err return addon_info
[ "async", "def", "_async_get_addon_info", "(", "self", ")", ":", "try", ":", "addon_info", "=", "await", "self", ".", "hass", ".", "components", ".", "hassio", ".", "async_get_addon_info", "(", "\"core_zwave\"", ")", "except", "self", ".", "hass", ".", "components", ".", "hassio", ".", "HassioAPIError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to get OpenZWave add-on info: %s\"", ",", "err", ")", "raise", "AbortFlow", "(", "\"addon_info_failed\"", ")", "from", "err", "return", "addon_info" ]
[ 146, 4 ]
[ 156, 25 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow._async_is_addon_running
(self)
Return True if OpenZWave add-on is running.
Return True if OpenZWave add-on is running.
async def _async_is_addon_running(self): """Return True if OpenZWave add-on is running.""" addon_info = await self._async_get_addon_info() return addon_info["state"] == "started"
[ "async", "def", "_async_is_addon_running", "(", "self", ")", ":", "addon_info", "=", "await", "self", ".", "_async_get_addon_info", "(", ")", "return", "addon_info", "[", "\"state\"", "]", "==", "\"started\"" ]
[ 158, 4 ]
[ 161, 47 ]
python
en
['en', 'de', 'en']
True
DomainConfigFlow._async_is_addon_installed
(self)
Return True if OpenZWave add-on is installed.
Return True if OpenZWave add-on is installed.
async def _async_is_addon_installed(self): """Return True if OpenZWave add-on is installed.""" addon_info = await self._async_get_addon_info() return addon_info["version"] is not None
[ "async", "def", "_async_is_addon_installed", "(", "self", ")", ":", "addon_info", "=", "await", "self", ".", "_async_get_addon_info", "(", ")", "return", "addon_info", "[", "\"version\"", "]", "is", "not", "None" ]
[ 163, 4 ]
[ 166, 48 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow._async_get_addon_config
(self)
Get OpenZWave add-on config.
Get OpenZWave add-on config.
async def _async_get_addon_config(self): """Get OpenZWave add-on config.""" addon_info = await self._async_get_addon_info() return addon_info["options"]
[ "async", "def", "_async_get_addon_config", "(", "self", ")", ":", "addon_info", "=", "await", "self", ".", "_async_get_addon_info", "(", ")", "return", "addon_info", "[", "\"options\"", "]" ]
[ 168, 4 ]
[ 171, 36 ]
python
en
['en', 'en', 'en']
True
DomainConfigFlow._async_set_addon_config
(self, config)
Set OpenZWave add-on config.
Set OpenZWave add-on config.
async def _async_set_addon_config(self, config): """Set OpenZWave add-on config.""" options = {"options": config} try: await self.hass.components.hassio.async_set_addon_options( "core_zwave", options ) except self.hass.components.hassio.HassioAPIError as err: _LOGGER.error("Failed to set OpenZWave add-on config: %s", err) raise AbortFlow("addon_set_config_failed") from err
[ "async", "def", "_async_set_addon_config", "(", "self", ",", "config", ")", ":", "options", "=", "{", "\"options\"", ":", "config", "}", "try", ":", "await", "self", ".", "hass", ".", "components", ".", "hassio", ".", "async_set_addon_options", "(", "\"core_zwave\"", ",", "options", ")", "except", "self", ".", "hass", ".", "components", ".", "hassio", ".", "HassioAPIError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to set OpenZWave add-on config: %s\"", ",", "err", ")", "raise", "AbortFlow", "(", "\"addon_set_config_failed\"", ")", "from", "err" ]
[ 173, 4 ]
[ 182, 63 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Etherscan.io sensors.
Set up the Etherscan.io sensors.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Etherscan.io sensors.""" address = config.get(CONF_ADDRESS) name = config.get(CONF_NAME) token = config.get(CONF_TOKEN) token_address = config.get(CONF_TOKEN_ADDRESS) if token: token = token.upper() if not name: name = "%s Balance" % token if not name: name = "ETH Balance" add_entities([EtherscanSensor(name, address, token, token_address)], True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "address", "=", "config", ".", "get", "(", "CONF_ADDRESS", ")", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "token", "=", "config", ".", "get", "(", "CONF_TOKEN", ")", "token_address", "=", "config", ".", "get", "(", "CONF_TOKEN_ADDRESS", ")", "if", "token", ":", "token", "=", "token", ".", "upper", "(", ")", "if", "not", "name", ":", "name", "=", "\"%s Balance\"", "%", "token", "if", "not", "name", ":", "name", "=", "\"ETH Balance\"", "add_entities", "(", "[", "EtherscanSensor", "(", "name", ",", "address", ",", "token", ",", "token_address", ")", "]", ",", "True", ")" ]
[ 27, 0 ]
[ 41, 78 ]
python
en
['en', 'en', 'en']
True
EtherscanSensor.__init__
(self, name, address, token, token_address)
Initialize the sensor.
Initialize the sensor.
def __init__(self, name, address, token, token_address): """Initialize the sensor.""" self._name = name self._address = address self._token_address = token_address self._token = token self._state = None self._unit_of_measurement = self._token or "ETH"
[ "def", "__init__", "(", "self", ",", "name", ",", "address", ",", "token", ",", "token_address", ")", ":", "self", ".", "_name", "=", "name", "self", ".", "_address", "=", "address", "self", ".", "_token_address", "=", "token_address", "self", ".", "_token", "=", "token", "self", ".", "_state", "=", "None", "self", ".", "_unit_of_measurement", "=", "self", ".", "_token", "or", "\"ETH\"" ]
[ 47, 4 ]
[ 54, 56 ]
python
en
['en', 'en', 'en']
True
EtherscanSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 57, 4 ]
[ 59, 25 ]
python
en
['en', 'mi', 'en']
True
EtherscanSensor.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 62, 4 ]
[ 64, 26 ]
python
en
['en', 'en', 'en']
True
EtherscanSensor.unit_of_measurement
(self)
Return the unit of measurement this sensor expresses itself in.
Return the unit of measurement this sensor expresses itself in.
def unit_of_measurement(self): """Return the unit of measurement this sensor expresses itself in.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 67, 4 ]
[ 69, 40 ]
python
en
['en', 'en', 'en']
True
EtherscanSensor.device_state_attributes
(self)
Return the state attributes of the sensor.
Return the state attributes of the sensor.
def device_state_attributes(self): """Return the state attributes of the sensor.""" return {ATTR_ATTRIBUTION: ATTRIBUTION}
[ "def", "device_state_attributes", "(", "self", ")", ":", "return", "{", "ATTR_ATTRIBUTION", ":", "ATTRIBUTION", "}" ]
[ 72, 4 ]
[ 74, 46 ]
python
en
['en', 'en', 'en']
True
EtherscanSensor.update
(self)
Get the latest state of the sensor.
Get the latest state of the sensor.
def update(self): """Get the latest state of the sensor.""" if self._token_address: self._state = get_balance(self._address, self._token_address) elif self._token: self._state = get_balance(self._address, self._token) else: self._state = get_balance(self._address)
[ "def", "update", "(", "self", ")", ":", "if", "self", ".", "_token_address", ":", "self", ".", "_state", "=", "get_balance", "(", "self", ".", "_address", ",", "self", ".", "_token_address", ")", "elif", "self", ".", "_token", ":", "self", ".", "_state", "=", "get_balance", "(", "self", ".", "_address", ",", "self", ".", "_token", ")", "else", ":", "self", ".", "_state", "=", "get_balance", "(", "self", ".", "_address", ")" ]
[ 76, 4 ]
[ 84, 52 ]
python
en
['en', 'en', 'en']
True
shift_tokens_right
(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int)
Shift input ids one token to the right.
Shift input ids one token to the right.
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int): """ Shift input ids one token to the right. """ shifted_input_ids = input_ids.new_zeros(input_ids.shape) shifted_input_ids[:, 1:] = input_ids[:, :-1].clone() shifted_input_ids[:, 0] = decoder_start_token_id assert pad_token_id is not None, "self.model.config.pad_token_id has to be defined." # replace possible -100 values in labels by `pad_token_id` shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) return shifted_input_ids
[ "def", "shift_tokens_right", "(", "input_ids", ":", "torch", ".", "Tensor", ",", "pad_token_id", ":", "int", ",", "decoder_start_token_id", ":", "int", ")", ":", "shifted_input_ids", "=", "input_ids", ".", "new_zeros", "(", "input_ids", ".", "shape", ")", "shifted_input_ids", "[", ":", ",", "1", ":", "]", "=", "input_ids", "[", ":", ",", ":", "-", "1", "]", ".", "clone", "(", ")", "shifted_input_ids", "[", ":", ",", "0", "]", "=", "decoder_start_token_id", "assert", "pad_token_id", "is", "not", "None", ",", "\"self.model.config.pad_token_id has to be defined.\"", "# replace possible -100 values in labels by `pad_token_id`", "shifted_input_ids", ".", "masked_fill_", "(", "shifted_input_ids", "==", "-", "100", ",", "pad_token_id", ")", "return", "shifted_input_ids" ]
[ 57, 0 ]
[ 69, 28 ]
python
en
['en', 'error', 'th']
False
_make_causal_mask
(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0)
Make causal mask used for bi-directional self-attention.
Make causal mask used for bi-directional self-attention.
def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz, tgt_len = input_ids_shape mask = torch.full((tgt_len, tgt_len), float("-inf")) mask_cond = torch.arange(mask.size(-1)) mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) mask = mask.to(dtype) if past_key_values_length > 0: mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length)
[ "def", "_make_causal_mask", "(", "input_ids_shape", ":", "torch", ".", "Size", ",", "dtype", ":", "torch", ".", "dtype", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "bsz", ",", "tgt_len", "=", "input_ids_shape", "mask", "=", "torch", ".", "full", "(", "(", "tgt_len", ",", "tgt_len", ")", ",", "float", "(", "\"-inf\"", ")", ")", "mask_cond", "=", "torch", ".", "arange", "(", "mask", ".", "size", "(", "-", "1", ")", ")", "mask", ".", "masked_fill_", "(", "mask_cond", "<", "(", "mask_cond", "+", "1", ")", ".", "view", "(", "mask", ".", "size", "(", "-", "1", ")", ",", "1", ")", ",", "0", ")", "mask", "=", "mask", ".", "to", "(", "dtype", ")", "if", "past_key_values_length", ">", "0", ":", "mask", "=", "torch", ".", "cat", "(", "[", "torch", ".", "zeros", "(", "tgt_len", ",", "past_key_values_length", ",", "dtype", "=", "dtype", ")", ",", "mask", "]", ",", "dim", "=", "-", "1", ")", "return", "mask", "[", "None", ",", "None", ",", ":", ",", ":", "]", ".", "expand", "(", "bsz", ",", "1", ",", "tgt_len", ",", "tgt_len", "+", "past_key_values_length", ")" ]
[ 73, 0 ]
[ 85, 91 ]
python
en
['en', 'error', 'th']
False
_expand_mask
(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None)
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ bsz, src_len = mask.size() tgt_len = tgt_len if tgt_len is not None else src_len expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) inverted_mask = 1.0 - expanded_mask return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min)
[ "def", "_expand_mask", "(", "mask", ":", "torch", ".", "Tensor", ",", "dtype", ":", "torch", ".", "dtype", ",", "tgt_len", ":", "Optional", "[", "int", "]", "=", "None", ")", ":", "bsz", ",", "src_len", "=", "mask", ".", "size", "(", ")", "tgt_len", "=", "tgt_len", "if", "tgt_len", "is", "not", "None", "else", "src_len", "expanded_mask", "=", "mask", "[", ":", ",", "None", ",", "None", ",", ":", "]", ".", "expand", "(", "bsz", ",", "1", ",", "tgt_len", ",", "src_len", ")", ".", "to", "(", "dtype", ")", "inverted_mask", "=", "1.0", "-", "expanded_mask", "return", "inverted_mask", ".", "masked_fill", "(", "inverted_mask", ".", "bool", "(", ")", ",", "torch", ".", "finfo", "(", "dtype", ")", ".", "min", ")" ]
[ 89, 0 ]
[ 100, 82 ]
python
en
['en', 'error', 'th']
False
Speech2TextSinusoidalPositionalEmbedding.get_embedding
(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None)
Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the description in Section 3.5 of "Attention Is All You Need".
Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the description in Section 3.5 of "Attention Is All You Need".
def get_embedding(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None): """ Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the description in Section 3.5 of "Attention Is All You Need". """ half_dim = embedding_dim // 2 emb = math.log(10000) / (half_dim - 1) emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb) emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0) emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1) if embedding_dim % 2 == 1: # zero pad emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1) if padding_idx is not None: emb[padding_idx, :] = 0 return emb
[ "def", "get_embedding", "(", "num_embeddings", ":", "int", ",", "embedding_dim", ":", "int", ",", "padding_idx", ":", "Optional", "[", "int", "]", "=", "None", ")", ":", "half_dim", "=", "embedding_dim", "//", "2", "emb", "=", "math", ".", "log", "(", "10000", ")", "/", "(", "half_dim", "-", "1", ")", "emb", "=", "torch", ".", "exp", "(", "torch", ".", "arange", "(", "half_dim", ",", "dtype", "=", "torch", ".", "float", ")", "*", "-", "emb", ")", "emb", "=", "torch", ".", "arange", "(", "num_embeddings", ",", "dtype", "=", "torch", ".", "float", ")", ".", "unsqueeze", "(", "1", ")", "*", "emb", ".", "unsqueeze", "(", "0", ")", "emb", "=", "torch", ".", "cat", "(", "[", "torch", ".", "sin", "(", "emb", ")", ",", "torch", ".", "cos", "(", "emb", ")", "]", ",", "dim", "=", "1", ")", ".", "view", "(", "num_embeddings", ",", "-", "1", ")", "if", "embedding_dim", "%", "2", "==", "1", ":", "# zero pad", "emb", "=", "torch", ".", "cat", "(", "[", "emb", ",", "torch", ".", "zeros", "(", "num_embeddings", ",", "1", ")", "]", ",", "dim", "=", "1", ")", "if", "padding_idx", "is", "not", "None", ":", "emb", "[", "padding_idx", ",", ":", "]", "=", "0", "return", "emb" ]
[ 159, 4 ]
[ 174, 18 ]
python
en
['en', 'error', 'th']
False
Speech2TextSinusoidalPositionalEmbedding.create_position_ids_from_input_ids
( self, input_ids: torch.Tensor, padding_idx: int, past_key_values_length: Optional[int] = 0 )
Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols are ignored. This is modified from fairseq's `utils.make_positions`. Args: x: torch.Tensor x: Returns: torch.Tensor
Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols are ignored. This is modified from fairseq's `utils.make_positions`.
def create_position_ids_from_input_ids( self, input_ids: torch.Tensor, padding_idx: int, past_key_values_length: Optional[int] = 0 ): """ Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols are ignored. This is modified from fairseq's `utils.make_positions`. Args: x: torch.Tensor x: Returns: torch.Tensor """ # The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA. mask = input_ids.ne(padding_idx).int() incremental_indices = (torch.cumsum(mask, dim=1).type_as(mask) + past_key_values_length) * mask return incremental_indices.long() + padding_idx
[ "def", "create_position_ids_from_input_ids", "(", "self", ",", "input_ids", ":", "torch", ".", "Tensor", ",", "padding_idx", ":", "int", ",", "past_key_values_length", ":", "Optional", "[", "int", "]", "=", "0", ")", ":", "# The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA.", "mask", "=", "input_ids", ".", "ne", "(", "padding_idx", ")", ".", "int", "(", ")", "incremental_indices", "=", "(", "torch", ".", "cumsum", "(", "mask", ",", "dim", "=", "1", ")", ".", "type_as", "(", "mask", ")", "+", "past_key_values_length", ")", "*", "mask", "return", "incremental_indices", ".", "long", "(", ")", "+", "padding_idx" ]
[ 191, 4 ]
[ 205, 55 ]
python
en
['en', 'error', 'th']
False
Speech2TextAttention.forward
( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, )
Input shape: Batch x Time x Channel
Input shape: Batch x Time x Channel
def forward( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim = hidden_states.size() # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = torch.cat([past_key_value[0], key_states], dim=2) value_states = torch.cat([past_key_value[1], value_states], dim=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) key_states = key_states.view(*proj_shape) value_states = value_states.view(*proj_shape) src_len = key_states.size(1) attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) assert attn_weights.size() == ( bsz * self.num_heads, tgt_len, src_len, ), f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}" if attention_mask is not None: assert attention_mask.size() == ( bsz, 1, tgt_len, src_len, ), f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = F.softmax(attn_weights, dim=-1) if layer_head_mask is not None: assert layer_head_mask.size() == ( self.num_heads, ), f"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}" attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) if output_attentions: # this operation is a bit akward, but it's required to # make sure that attn_weights keeps its gradient. # In order to do so, attn_weights have to reshaped # twice and have to be reused in the following attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) else: attn_weights_reshaped = None attn_probs = F.dropout(attn_weights, p=self.dropout, training=self.training) attn_output = torch.bmm(attn_probs, value_states) assert attn_output.size() == ( bsz * self.num_heads, tgt_len, self.head_dim, ), f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}" attn_output = ( attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) .transpose(1, 2) .reshape(bsz, tgt_len, embed_dim) ) attn_output = self.out_proj(attn_output) return attn_output, attn_weights_reshaped, past_key_value
[ "def", "forward", "(", "self", ",", "hidden_states", ":", "torch", ".", "Tensor", ",", "key_value_states", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "torch", ".", "Tensor", "]", "]", "=", "None", ",", "attention_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "output_attentions", ":", "bool", "=", "False", ",", ")", "->", "Tuple", "[", "torch", ".", "Tensor", ",", "Optional", "[", "torch", ".", "Tensor", "]", ",", "Optional", "[", "Tuple", "[", "torch", ".", "Tensor", "]", "]", "]", ":", "# if key_value_states are provided this layer is used as a cross-attention layer", "# for the decoder", "is_cross_attention", "=", "key_value_states", "is", "not", "None", "bsz", ",", "tgt_len", ",", "embed_dim", "=", "hidden_states", ".", "size", "(", ")", "# get query proj", "query_states", "=", "self", ".", "q_proj", "(", "hidden_states", ")", "*", "self", ".", "scaling", "# get key, value proj", "if", "is_cross_attention", "and", "past_key_value", "is", "not", "None", ":", "# reuse k,v, cross_attentions", "key_states", "=", "past_key_value", "[", "0", "]", "value_states", "=", "past_key_value", "[", "1", "]", "elif", "is_cross_attention", ":", "# cross_attentions", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "elif", "past_key_value", "is", "not", "None", ":", "# reuse k, v, self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "key_states", "=", "torch", ".", "cat", "(", "[", "past_key_value", "[", "0", "]", ",", "key_states", "]", ",", "dim", "=", "2", ")", "value_states", "=", "torch", ".", "cat", "(", "[", "past_key_value", "[", "1", "]", ",", "value_states", "]", ",", "dim", "=", "2", ")", "else", ":", "# self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "if", "self", ".", "is_decoder", ":", "# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.", "# Further calls to cross_attention layer can then reuse all cross-attention", "# key/value_states (first \"if\" case)", "# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of", "# all previous decoder key/value_states. Further calls to uni-directional self-attention", "# can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)", "# if encoder bi-directional self-attention `past_key_value` is always `None`", "past_key_value", "=", "(", "key_states", ",", "value_states", ")", "proj_shape", "=", "(", "bsz", "*", "self", ".", "num_heads", ",", "-", "1", ",", "self", ".", "head_dim", ")", "query_states", "=", "self", ".", "_shape", "(", "query_states", ",", "tgt_len", ",", "bsz", ")", ".", "view", "(", "*", "proj_shape", ")", "key_states", "=", "key_states", ".", "view", "(", "*", "proj_shape", ")", "value_states", "=", "value_states", ".", "view", "(", "*", "proj_shape", ")", "src_len", "=", "key_states", ".", "size", "(", "1", ")", "attn_weights", "=", "torch", ".", "bmm", "(", "query_states", ",", "key_states", ".", "transpose", "(", "1", ",", "2", ")", ")", "assert", "attn_weights", ".", "size", "(", ")", "==", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ",", ")", ",", "f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}\"", "if", "attention_mask", "is", "not", "None", ":", "assert", "attention_mask", ".", "size", "(", ")", "==", "(", "bsz", ",", "1", ",", "tgt_len", ",", "src_len", ",", ")", ",", "f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}\"", "attn_weights", "=", "attn_weights", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "+", "attention_mask", "attn_weights", "=", "attn_weights", ".", "view", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "attn_weights", "=", "F", ".", "softmax", "(", "attn_weights", ",", "dim", "=", "-", "1", ")", "if", "layer_head_mask", "is", "not", "None", ":", "assert", "layer_head_mask", ".", "size", "(", ")", "==", "(", "self", ".", "num_heads", ",", ")", ",", "f\"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}\"", "attn_weights", "=", "layer_head_mask", ".", "view", "(", "1", ",", "-", "1", ",", "1", ",", "1", ")", "*", "attn_weights", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "attn_weights", "=", "attn_weights", ".", "view", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "if", "output_attentions", ":", "# this operation is a bit akward, but it's required to", "# make sure that attn_weights keeps its gradient.", "# In order to do so, attn_weights have to reshaped", "# twice and have to be reused in the following", "attn_weights_reshaped", "=", "attn_weights", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "attn_weights", "=", "attn_weights_reshaped", ".", "view", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "else", ":", "attn_weights_reshaped", "=", "None", "attn_probs", "=", "F", ".", "dropout", "(", "attn_weights", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "attn_output", "=", "torch", ".", "bmm", "(", "attn_probs", ",", "value_states", ")", "assert", "attn_output", ".", "size", "(", ")", "==", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", ",", ")", ",", "f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}\"", "attn_output", "=", "(", "attn_output", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", ")", ".", "transpose", "(", "1", ",", "2", ")", ".", "reshape", "(", "bsz", ",", "tgt_len", ",", "embed_dim", ")", ")", "attn_output", "=", "self", ".", "out_proj", "(", "attn_output", ")", "return", "attn_output", ",", "attn_weights_reshaped", ",", "past_key_value" ]
[ 239, 4 ]
[ 348, 65 ]
python
en
['en', 'pl', 'en']
True
Speech2TextEncoderLayer.forward
( self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, layer_head_mask: torch.Tensor, output_attentions: bool = False, )
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
def forward( self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, layer_head_mask: torch.Tensor, output_attentions: bool = False, ): """ Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) hidden_states, attn_weights, _ = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states if hidden_states.dtype == torch.float16 and ( torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any() ): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs
[ "def", "forward", "(", "self", ",", "hidden_states", ":", "torch", ".", "Tensor", ",", "attention_mask", ":", "torch", ".", "Tensor", ",", "layer_head_mask", ":", "torch", ".", "Tensor", ",", "output_attentions", ":", "bool", "=", "False", ",", ")", ":", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "hidden_states", ",", "attn_weights", ",", "_", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "activation_dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "if", "hidden_states", ".", "dtype", "==", "torch", ".", "float16", "and", "(", "torch", ".", "isinf", "(", "hidden_states", ")", ".", "any", "(", ")", "or", "torch", ".", "isnan", "(", "hidden_states", ")", ".", "any", "(", ")", ")", ":", "clamp_value", "=", "torch", ".", "finfo", "(", "hidden_states", ".", "dtype", ")", ".", "max", "-", "1000", "hidden_states", "=", "torch", ".", "clamp", "(", "hidden_states", ",", "min", "=", "-", "clamp_value", ",", "max", "=", "clamp_value", ")", "outputs", "=", "(", "hidden_states", ",", ")", "if", "output_attentions", ":", "outputs", "+=", "(", "attn_weights", ",", ")", "return", "outputs" ]
[ 368, 4 ]
[ 416, 22 ]
python
en
['en', 'error', 'th']
False
Speech2TextDecoderLayer.forward
( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, encoder_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, )
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape :obj:`(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`torch.FloatTensor`): mask for encoder attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape :obj:`(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`torch.FloatTensor`): mask for encoder attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, encoder_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, ): """ Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape :obj:`(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`torch.FloatTensor`): mask for encoder attention heads in a given layer of size :obj:`(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states # Cross-Attention Block cross_attn_present_key_value = None cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=layer_head_mask, past_key_value=cross_attn_past_key_value, output_attentions=output_attentions, ) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states outputs = (hidden_states,) if output_attentions: outputs += (self_attn_weights, cross_attn_weights) if use_cache: outputs += (present_key_value,) return outputs
[ "def", "forward", "(", "self", ",", "hidden_states", ":", "torch", ".", "Tensor", ",", "attention_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "encoder_hidden_states", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "encoder_attention_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "encoder_layer_head_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "torch", ".", "Tensor", "]", "]", "=", "None", ",", "output_attentions", ":", "Optional", "[", "bool", "]", "=", "False", ",", "use_cache", ":", "Optional", "[", "bool", "]", "=", "True", ",", ")", ":", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "# Self Attention", "# decoder uni-directional self-attention cached key/values tuple is at positions 1,2", "self_attn_past_key_value", "=", "past_key_value", "[", ":", "2", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "# add present self-attn cache to positions 1,2 of present_key_value tuple", "hidden_states", ",", "self_attn_weights", ",", "present_key_value", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "past_key_value", "=", "self_attn_past_key_value", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "# Cross-Attention Block", "cross_attn_present_key_value", "=", "None", "cross_attn_weights", "=", "None", "if", "encoder_hidden_states", "is", "not", "None", ":", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "encoder_attn_layer_norm", "(", "hidden_states", ")", "# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple", "cross_attn_past_key_value", "=", "past_key_value", "[", "-", "2", ":", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "hidden_states", ",", "cross_attn_weights", ",", "cross_attn_present_key_value", "=", "self", ".", "encoder_attn", "(", "hidden_states", "=", "hidden_states", ",", "key_value_states", "=", "encoder_hidden_states", ",", "attention_mask", "=", "encoder_attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", "past_key_value", "=", "cross_attn_past_key_value", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "# add cross-attn to positions 3,4 of present_key_value tuple", "present_key_value", "=", "present_key_value", "+", "cross_attn_present_key_value", "# Fully Connected", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "activation_dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "outputs", "=", "(", "hidden_states", ",", ")", "if", "output_attentions", ":", "outputs", "+=", "(", "self_attn_weights", ",", "cross_attn_weights", ")", "if", "use_cache", ":", "outputs", "+=", "(", "present_key_value", ",", ")", "return", "outputs" ]
[ 446, 4 ]
[ 532, 22 ]
python
en
['en', 'error', 'th']
False
Speech2TextPreTrainedModel._get_subsampled_output_lengths
(self, input_lengths: torch.LongTensor)
Computes the output length of the convolutional layers
Computes the output length of the convolutional layers
def _get_subsampled_output_lengths(self, input_lengths: torch.LongTensor): """ Computes the output length of the convolutional layers """ for i in range(self.config.num_conv_layers): input_lengths = (input_lengths - 1) // 2 + 1 return input_lengths
[ "def", "_get_subsampled_output_lengths", "(", "self", ",", "input_lengths", ":", "torch", ".", "LongTensor", ")", ":", "for", "i", "in", "range", "(", "self", ".", "config", ".", "num_conv_layers", ")", ":", "input_lengths", "=", "(", "input_lengths", "-", "1", ")", "//", "2", "+", "1", "return", "input_lengths" ]
[ 550, 4 ]
[ 558, 28 ]
python
en
['en', 'error', 'th']
False
Speech2TextEncoder.forward
( self, input_features, attention_mask=None, head_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, )
r""" Args: input_features (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length, feature_size)`): Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be obtained by loading a ``.flac`` or ``.wav`` audio file into an array of type :obj:`List[float]` or a :obj:`numpy.ndarray`, *e.g.* via the soundfile library (``pip install soundfile``). To prepare the array into :obj:`input_features`, the :class:`~transformers.Speech2TextTokenizer` should be used for extracting the fbank features, padding and conversion into a tensor of type :obj:`torch.FloatTensor`. See :meth:`~transformers.Speech2TextTokenizer.__call__` attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing convolution and attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
r""" Args: input_features (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length, feature_size)`): Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be obtained by loading a ``.flac`` or ``.wav`` audio file into an array of type :obj:`List[float]` or a :obj:`numpy.ndarray`, *e.g.* via the soundfile library (``pip install soundfile``). To prepare the array into :obj:`input_features`, the :class:`~transformers.Speech2TextTokenizer` should be used for extracting the fbank features, padding and conversion into a tensor of type :obj:`torch.FloatTensor`. See :meth:`~transformers.Speech2TextTokenizer.__call__` attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing convolution and attention on padding token indices. Mask values selected in ``[0, 1]``:
def forward( self, input_features, attention_mask=None, head_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_features (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length, feature_size)`): Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be obtained by loading a ``.flac`` or ``.wav`` audio file into an array of type :obj:`List[float]` or a :obj:`numpy.ndarray`, *e.g.* via the soundfile library (``pip install soundfile``). To prepare the array into :obj:`input_features`, the :class:`~transformers.Speech2TextTokenizer` should be used for extracting the fbank features, padding and conversion into a tensor of type :obj:`torch.FloatTensor`. See :meth:`~transformers.Speech2TextTokenizer.__call__` attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing convolution and attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if attention_mask is not None: attention_mask = self._get_subsampled_encoder_attn_mask(attention_mask) inputs_embeds = self.conv(input_features) inputs_embeds = self.embed_scale * inputs_embeds if attention_mask is None: padding_mask = torch.zeros_like(inputs_embeds, dtype=torch.long) else: padding_mask = attention_mask.ne(1).long() embed_pos = self.embed_positions(padding_mask) hidden_states = inputs_embeds + embed_pos hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) # expand attention_mask if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype) encoder_states = () if output_hidden_states else None all_attentions = () if output_attentions else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: assert head_mask.size()[0] == ( len(self.layers) ), f"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}." for idx, encoder_layer in enumerate(self.layers): if output_hidden_states: encoder_states = encoder_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): # skip the layer layer_outputs = (None, None) else: if getattr(self.config, "gradient_checkpointing", False) and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(encoder_layer), hidden_states, attention_mask, (head_mask[idx] if head_mask is not None else None), ) else: layer_outputs = encoder_layer( hidden_states, attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), output_attentions=output_attentions, ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states: encoder_states = encoder_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions )
[ "def", "forward", "(", "self", ",", "input_features", ",", "attention_mask", "=", "None", ",", "head_mask", "=", "None", ",", "output_attentions", "=", "None", ",", "output_hidden_states", "=", "None", ",", "return_dict", "=", "None", ",", ")", ":", "output_attentions", "=", "output_attentions", "if", "output_attentions", "is", "not", "None", "else", "self", ".", "config", ".", "output_attentions", "output_hidden_states", "=", "(", "output_hidden_states", "if", "output_hidden_states", "is", "not", "None", "else", "self", ".", "config", ".", "output_hidden_states", ")", "return_dict", "=", "return_dict", "if", "return_dict", "is", "not", "None", "else", "self", ".", "config", ".", "use_return_dict", "if", "attention_mask", "is", "not", "None", ":", "attention_mask", "=", "self", ".", "_get_subsampled_encoder_attn_mask", "(", "attention_mask", ")", "inputs_embeds", "=", "self", ".", "conv", "(", "input_features", ")", "inputs_embeds", "=", "self", ".", "embed_scale", "*", "inputs_embeds", "if", "attention_mask", "is", "None", ":", "padding_mask", "=", "torch", ".", "zeros_like", "(", "inputs_embeds", ",", "dtype", "=", "torch", ".", "long", ")", "else", ":", "padding_mask", "=", "attention_mask", ".", "ne", "(", "1", ")", ".", "long", "(", ")", "embed_pos", "=", "self", ".", "embed_positions", "(", "padding_mask", ")", "hidden_states", "=", "inputs_embeds", "+", "embed_pos", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "# expand attention_mask", "if", "attention_mask", "is", "not", "None", ":", "# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]", "attention_mask", "=", "_expand_mask", "(", "attention_mask", ",", "inputs_embeds", ".", "dtype", ")", "encoder_states", "=", "(", ")", "if", "output_hidden_states", "else", "None", "all_attentions", "=", "(", ")", "if", "output_attentions", "else", "None", "# check if head_mask has a correct number of layers specified if desired", "if", "head_mask", "is", "not", "None", ":", "assert", "head_mask", ".", "size", "(", ")", "[", "0", "]", "==", "(", "len", "(", "self", ".", "layers", ")", ")", ",", "f\"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}.\"", "for", "idx", ",", "encoder_layer", "in", "enumerate", "(", "self", ".", "layers", ")", ":", "if", "output_hidden_states", ":", "encoder_states", "=", "encoder_states", "+", "(", "hidden_states", ",", ")", "# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)", "dropout_probability", "=", "random", ".", "uniform", "(", "0", ",", "1", ")", "if", "self", ".", "training", "and", "(", "dropout_probability", "<", "self", ".", "layerdrop", ")", ":", "# skip the layer", "layer_outputs", "=", "(", "None", ",", "None", ")", "else", ":", "if", "getattr", "(", "self", ".", "config", ",", "\"gradient_checkpointing\"", ",", "False", ")", "and", "self", ".", "training", ":", "def", "create_custom_forward", "(", "module", ")", ":", "def", "custom_forward", "(", "*", "inputs", ")", ":", "return", "module", "(", "*", "inputs", ",", "output_attentions", ")", "return", "custom_forward", "layer_outputs", "=", "torch", ".", "utils", ".", "checkpoint", ".", "checkpoint", "(", "create_custom_forward", "(", "encoder_layer", ")", ",", "hidden_states", ",", "attention_mask", ",", "(", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ")", ",", ")", "else", ":", "layer_outputs", "=", "encoder_layer", "(", "hidden_states", ",", "attention_mask", ",", "layer_head_mask", "=", "(", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ")", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "layer_outputs", "[", "0", "]", "if", "output_attentions", ":", "all_attentions", "=", "all_attentions", "+", "(", "layer_outputs", "[", "1", "]", ",", ")", "hidden_states", "=", "self", ".", "layer_norm", "(", "hidden_states", ")", "if", "output_hidden_states", ":", "encoder_states", "=", "encoder_states", "+", "(", "hidden_states", ",", ")", "if", "not", "return_dict", ":", "return", "tuple", "(", "v", "for", "v", "in", "[", "hidden_states", ",", "encoder_states", ",", "all_attentions", "]", "if", "v", "is", "not", "None", ")", "return", "BaseModelOutput", "(", "last_hidden_state", "=", "hidden_states", ",", "hidden_states", "=", "encoder_states", ",", "attentions", "=", "all_attentions", ")" ]
[ 699, 4 ]
[ 818, 9 ]
python
cy
['en', 'cy', 'hi']
False
Speech2TextDecoder.forward
( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, encoder_head_mask=None, past_key_values=None, inputs_embeds=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, )
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.Speech2TextTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. encoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention on hidden heads. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`. inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert :obj:`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it.
def forward( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, encoder_head_mask=None, past_key_values=None, inputs_embeds=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.Speech2TextTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. encoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention on hidden heads. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`. inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert :obj:`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") # past_key_values_length past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale attention_mask = self._prepare_decoder_attention_mask( attention_mask, input_shape, inputs_embeds, past_key_values_length ) # expand encoder attention mask if encoder_hidden_states is not None and encoder_attention_mask is not None: encoder_attention_mask = self._get_subsampled_encoder_attn_mask(encoder_attention_mask) # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) # embed positions positions = self.embed_positions(input_ids, past_key_values_length=past_key_values_length) hidden_states = inputs_embeds + positions hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None next_decoder_cache = () if use_cache else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: assert head_mask.size()[0] == ( len(self.layers) ), f"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}." for idx, decoder_layer in enumerate(self.layers): # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) if output_hidden_states: all_hidden_states += (hidden_states,) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): continue past_key_value = past_key_values[idx] if past_key_values is not None else None if getattr(self.config, "gradient_checkpointing", False) and self.training: if use_cache: logger.warn( "`use_cache = True` is incompatible with `config.gradient_checkpointing = True`. Setting `use_cache = False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): # None for past_key_value return module(*inputs, output_attentions, use_cache) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(decoder_layer), hidden_states, attention_mask, encoder_hidden_states, encoder_attention_mask, head_mask[idx] if head_mask is not None else None, encoder_head_mask[idx] if encoder_head_mask is not None else None, None, ) else: layer_outputs = decoder_layer( hidden_states, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), encoder_layer_head_mask=(encoder_head_mask[idx] if encoder_head_mask is not None else None), past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, ) hidden_states = layer_outputs[0] if use_cache: next_decoder_cache += (layer_outputs[3 if output_attentions else 1],) if output_attentions: all_self_attns += (layer_outputs[1],) if encoder_hidden_states is not None: all_cross_attentions += (layer_outputs[2],) hidden_states = self.layer_norm(hidden_states) # add hidden states from the last decoder layer if output_hidden_states: all_hidden_states += (hidden_states,) next_cache = next_decoder_cache if use_cache else None if not return_dict: return tuple( v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=next_cache, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attentions, )
[ "def", "forward", "(", "self", ",", "input_ids", "=", "None", ",", "attention_mask", "=", "None", ",", "encoder_hidden_states", "=", "None", ",", "encoder_attention_mask", "=", "None", ",", "head_mask", "=", "None", ",", "encoder_head_mask", "=", "None", ",", "past_key_values", "=", "None", ",", "inputs_embeds", "=", "None", ",", "use_cache", "=", "None", ",", "output_attentions", "=", "None", ",", "output_hidden_states", "=", "None", ",", "return_dict", "=", "None", ",", ")", ":", "output_attentions", "=", "output_attentions", "if", "output_attentions", "is", "not", "None", "else", "self", ".", "config", ".", "output_attentions", "output_hidden_states", "=", "(", "output_hidden_states", "if", "output_hidden_states", "is", "not", "None", "else", "self", ".", "config", ".", "output_hidden_states", ")", "use_cache", "=", "use_cache", "if", "use_cache", "is", "not", "None", "else", "self", ".", "config", ".", "use_cache", "return_dict", "=", "return_dict", "if", "return_dict", "is", "not", "None", "else", "self", ".", "config", ".", "use_return_dict", "# retrieve input_ids and inputs_embeds", "if", "input_ids", "is", "not", "None", "and", "inputs_embeds", "is", "not", "None", ":", "raise", "ValueError", "(", "\"You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time\"", ")", "elif", "input_ids", "is", "not", "None", ":", "input_shape", "=", "input_ids", ".", "size", "(", ")", "input_ids", "=", "input_ids", ".", "view", "(", "-", "1", ",", "input_shape", "[", "-", "1", "]", ")", "elif", "inputs_embeds", "is", "not", "None", ":", "input_shape", "=", "inputs_embeds", ".", "size", "(", ")", "[", ":", "-", "1", "]", "else", ":", "raise", "ValueError", "(", "\"You have to specify either decoder_input_ids or decoder_inputs_embeds\"", ")", "# past_key_values_length", "past_key_values_length", "=", "past_key_values", "[", "0", "]", "[", "0", "]", ".", "shape", "[", "2", "]", "if", "past_key_values", "is", "not", "None", "else", "0", "if", "inputs_embeds", "is", "None", ":", "inputs_embeds", "=", "self", ".", "embed_tokens", "(", "input_ids", ")", "*", "self", ".", "embed_scale", "attention_mask", "=", "self", ".", "_prepare_decoder_attention_mask", "(", "attention_mask", ",", "input_shape", ",", "inputs_embeds", ",", "past_key_values_length", ")", "# expand encoder attention mask", "if", "encoder_hidden_states", "is", "not", "None", "and", "encoder_attention_mask", "is", "not", "None", ":", "encoder_attention_mask", "=", "self", ".", "_get_subsampled_encoder_attn_mask", "(", "encoder_attention_mask", ")", "# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]", "encoder_attention_mask", "=", "_expand_mask", "(", "encoder_attention_mask", ",", "inputs_embeds", ".", "dtype", ",", "tgt_len", "=", "input_shape", "[", "-", "1", "]", ")", "# embed positions", "positions", "=", "self", ".", "embed_positions", "(", "input_ids", ",", "past_key_values_length", "=", "past_key_values_length", ")", "hidden_states", "=", "inputs_embeds", "+", "positions", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "# decoder layers", "all_hidden_states", "=", "(", ")", "if", "output_hidden_states", "else", "None", "all_self_attns", "=", "(", ")", "if", "output_attentions", "else", "None", "all_cross_attentions", "=", "(", ")", "if", "(", "output_attentions", "and", "encoder_hidden_states", "is", "not", "None", ")", "else", "None", "next_decoder_cache", "=", "(", ")", "if", "use_cache", "else", "None", "# check if head_mask has a correct number of layers specified if desired", "if", "head_mask", "is", "not", "None", ":", "assert", "head_mask", ".", "size", "(", ")", "[", "0", "]", "==", "(", "len", "(", "self", ".", "layers", ")", ")", ",", "f\"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}.\"", "for", "idx", ",", "decoder_layer", "in", "enumerate", "(", "self", ".", "layers", ")", ":", "# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)", "if", "output_hidden_states", ":", "all_hidden_states", "+=", "(", "hidden_states", ",", ")", "dropout_probability", "=", "random", ".", "uniform", "(", "0", ",", "1", ")", "if", "self", ".", "training", "and", "(", "dropout_probability", "<", "self", ".", "layerdrop", ")", ":", "continue", "past_key_value", "=", "past_key_values", "[", "idx", "]", "if", "past_key_values", "is", "not", "None", "else", "None", "if", "getattr", "(", "self", ".", "config", ",", "\"gradient_checkpointing\"", ",", "False", ")", "and", "self", ".", "training", ":", "if", "use_cache", ":", "logger", ".", "warn", "(", "\"`use_cache = True` is incompatible with `config.gradient_checkpointing = True`. Setting `use_cache = False`...\"", ")", "use_cache", "=", "False", "def", "create_custom_forward", "(", "module", ")", ":", "def", "custom_forward", "(", "*", "inputs", ")", ":", "# None for past_key_value", "return", "module", "(", "*", "inputs", ",", "output_attentions", ",", "use_cache", ")", "return", "custom_forward", "layer_outputs", "=", "torch", ".", "utils", ".", "checkpoint", ".", "checkpoint", "(", "create_custom_forward", "(", "decoder_layer", ")", ",", "hidden_states", ",", "attention_mask", ",", "encoder_hidden_states", ",", "encoder_attention_mask", ",", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ",", "encoder_head_mask", "[", "idx", "]", "if", "encoder_head_mask", "is", "not", "None", "else", "None", ",", "None", ",", ")", "else", ":", "layer_outputs", "=", "decoder_layer", "(", "hidden_states", ",", "attention_mask", "=", "attention_mask", ",", "encoder_hidden_states", "=", "encoder_hidden_states", ",", "encoder_attention_mask", "=", "encoder_attention_mask", ",", "layer_head_mask", "=", "(", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ")", ",", "encoder_layer_head_mask", "=", "(", "encoder_head_mask", "[", "idx", "]", "if", "encoder_head_mask", "is", "not", "None", "else", "None", ")", ",", "past_key_value", "=", "past_key_value", ",", "output_attentions", "=", "output_attentions", ",", "use_cache", "=", "use_cache", ",", ")", "hidden_states", "=", "layer_outputs", "[", "0", "]", "if", "use_cache", ":", "next_decoder_cache", "+=", "(", "layer_outputs", "[", "3", "if", "output_attentions", "else", "1", "]", ",", ")", "if", "output_attentions", ":", "all_self_attns", "+=", "(", "layer_outputs", "[", "1", "]", ",", ")", "if", "encoder_hidden_states", "is", "not", "None", ":", "all_cross_attentions", "+=", "(", "layer_outputs", "[", "2", "]", ",", ")", "hidden_states", "=", "self", ".", "layer_norm", "(", "hidden_states", ")", "# add hidden states from the last decoder layer", "if", "output_hidden_states", ":", "all_hidden_states", "+=", "(", "hidden_states", ",", ")", "next_cache", "=", "next_decoder_cache", "if", "use_cache", "else", "None", "if", "not", "return_dict", ":", "return", "tuple", "(", "v", "for", "v", "in", "[", "hidden_states", ",", "next_cache", ",", "all_hidden_states", ",", "all_self_attns", ",", "all_cross_attentions", "]", "if", "v", "is", "not", "None", ")", "return", "BaseModelOutputWithPastAndCrossAttentions", "(", "last_hidden_state", "=", "hidden_states", ",", "past_key_values", "=", "next_cache", ",", "hidden_states", "=", "all_hidden_states", ",", "attentions", "=", "all_self_attns", ",", "cross_attentions", "=", "all_cross_attentions", ",", ")" ]
[ 874, 4 ]
[ 1081, 9 ]
python
cy
['en', 'cy', 'hi']
False
PretrainedConfig.use_return_dict
(self)
:obj:`bool`: Whether or not return :class:`~transformers.file_utils.ModelOutput` instead of tuples.
:obj:`bool`: Whether or not return :class:`~transformers.file_utils.ModelOutput` instead of tuples.
def use_return_dict(self) -> bool: """ :obj:`bool`: Whether or not return :class:`~transformers.file_utils.ModelOutput` instead of tuples. """ # If torchscript is set, force `return_dict=False` to avoid jit errors return self.return_dict and not self.torchscript
[ "def", "use_return_dict", "(", "self", ")", "->", "bool", ":", "# If torchscript is set, force `return_dict=False` to avoid jit errors", "return", "self", ".", "return_dict", "and", "not", "self", ".", "torchscript" ]
[ 281, 4 ]
[ 286, 56 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.num_labels
(self)
:obj:`int`: The number of labels for classification models.
:obj:`int`: The number of labels for classification models.
def num_labels(self) -> int: """ :obj:`int`: The number of labels for classification models. """ return len(self.id2label)
[ "def", "num_labels", "(", "self", ")", "->", "int", ":", "return", "len", "(", "self", ".", "id2label", ")" ]
[ 289, 4 ]
[ 293, 33 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.save_pretrained
(self, save_directory: Union[str, os.PathLike])
Save a configuration object to the directory ``save_directory``, so that it can be re-loaded using the :func:`~transformers.PretrainedConfig.from_pretrained` class method. Args: save_directory (:obj:`str` or :obj:`os.PathLike`): Directory where the configuration JSON file will be saved (will be created if it does not exist).
Save a configuration object to the directory ``save_directory``, so that it can be re-loaded using the :func:`~transformers.PretrainedConfig.from_pretrained` class method.
def save_pretrained(self, save_directory: Union[str, os.PathLike]): """ Save a configuration object to the directory ``save_directory``, so that it can be re-loaded using the :func:`~transformers.PretrainedConfig.from_pretrained` class method. Args: save_directory (:obj:`str` or :obj:`os.PathLike`): Directory where the configuration JSON file will be saved (will be created if it does not exist). """ if os.path.isfile(save_directory): raise AssertionError("Provided path ({}) should be a directory, not a file".format(save_directory)) os.makedirs(save_directory, exist_ok=True) # If we save using the predefined names, we can load using `from_pretrained` output_config_file = os.path.join(save_directory, CONFIG_NAME) self.to_json_file(output_config_file, use_diff=True) logger.info(f"Configuration saved in {output_config_file}")
[ "def", "save_pretrained", "(", "self", ",", "save_directory", ":", "Union", "[", "str", ",", "os", ".", "PathLike", "]", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "save_directory", ")", ":", "raise", "AssertionError", "(", "\"Provided path ({}) should be a directory, not a file\"", ".", "format", "(", "save_directory", ")", ")", "os", ".", "makedirs", "(", "save_directory", ",", "exist_ok", "=", "True", ")", "# If we save using the predefined names, we can load using `from_pretrained`", "output_config_file", "=", "os", ".", "path", ".", "join", "(", "save_directory", ",", "CONFIG_NAME", ")", "self", ".", "to_json_file", "(", "output_config_file", ",", "use_diff", "=", "True", ")", "logger", ".", "info", "(", "f\"Configuration saved in {output_config_file}\"", ")" ]
[ 301, 4 ]
[ 317, 67 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.from_pretrained
(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs)
r""" Instantiate a :class:`~transformers.PretrainedConfig` (or a derived class) from a pretrained model configuration. Args: pretrained_model_name_or_path (:obj:`str` or :obj:`os.PathLike`): This can be either: - a string, the `model id` of a pretrained model configuration hosted inside a model repo on huggingface.co. Valid model ids can be located at the root-level, like ``bert-base-uncased``, or namespaced under a user or organization name, like ``dbmdz/bert-base-german-cased``. - a path to a `directory` containing a configuration file saved using the :func:`~transformers.PretrainedConfig.save_pretrained` method, e.g., ``./my_model_directory/``. - a path or url to a saved configuration JSON `file`, e.g., ``./my_model_directory/configuration.json``. cache_dir (:obj:`str` or :obj:`os.PathLike`, `optional`): Path to a directory in which a downloaded pretrained model configuration should be cached if the standard cache should not be used. force_download (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to force to (re-)download the configuration files and override the cached versions if they exist. resume_download (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to delete incompletely received file. Attempts to resume the download if such a file exists. proxies (:obj:`Dict[str, str]`, `optional`): A dictionary of proxy servers to use by protocol or endpoint, e.g., :obj:`{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (:obj:`str` or `bool`, `optional`): The token to use as HTTP bearer authorization for remote files. If :obj:`True`, will use the token generated when running :obj:`transformers-cli login` (stored in :obj:`~/.huggingface`). revision(:obj:`str`, `optional`, defaults to :obj:`"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so ``revision`` can be any identifier allowed by git. return_unused_kwargs (:obj:`bool`, `optional`, defaults to :obj:`False`): If :obj:`False`, then this function returns just the final configuration object. If :obj:`True`, then this functions returns a :obj:`Tuple(config, unused_kwargs)` where `unused_kwargs` is a dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the part of ``kwargs`` which has not been used to update ``config`` and is otherwise ignored. kwargs (:obj:`Dict[str, Any]`, `optional`): The values in kwargs of any keys which are configuration attributes will be used to override the loaded values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled by the ``return_unused_kwargs`` keyword parameter. .. note:: Passing :obj:`use_auth_token=True` is required when you want to use a private model. Returns: :class:`PretrainedConfig`: The configuration object instantiated from this pretrained model. Examples:: # We can't instantiate directly the base class `PretrainedConfig` so let's show the examples on a # derived class: BertConfig config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from huggingface.co and cache. config = BertConfig.from_pretrained('./test/saved_model/') # E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')` config = BertConfig.from_pretrained('./test/saved_model/my_configuration.json') config = BertConfig.from_pretrained('bert-base-uncased', output_attentions=True, foo=False) assert config.output_attentions == True config, unused_kwargs = BertConfig.from_pretrained('bert-base-uncased', output_attentions=True, foo=False, return_unused_kwargs=True) assert config.output_attentions == True assert unused_kwargs == {'foo': False}
r""" Instantiate a :class:`~transformers.PretrainedConfig` (or a derived class) from a pretrained model configuration.
def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig": r""" Instantiate a :class:`~transformers.PretrainedConfig` (or a derived class) from a pretrained model configuration. Args: pretrained_model_name_or_path (:obj:`str` or :obj:`os.PathLike`): This can be either: - a string, the `model id` of a pretrained model configuration hosted inside a model repo on huggingface.co. Valid model ids can be located at the root-level, like ``bert-base-uncased``, or namespaced under a user or organization name, like ``dbmdz/bert-base-german-cased``. - a path to a `directory` containing a configuration file saved using the :func:`~transformers.PretrainedConfig.save_pretrained` method, e.g., ``./my_model_directory/``. - a path or url to a saved configuration JSON `file`, e.g., ``./my_model_directory/configuration.json``. cache_dir (:obj:`str` or :obj:`os.PathLike`, `optional`): Path to a directory in which a downloaded pretrained model configuration should be cached if the standard cache should not be used. force_download (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to force to (re-)download the configuration files and override the cached versions if they exist. resume_download (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to delete incompletely received file. Attempts to resume the download if such a file exists. proxies (:obj:`Dict[str, str]`, `optional`): A dictionary of proxy servers to use by protocol or endpoint, e.g., :obj:`{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (:obj:`str` or `bool`, `optional`): The token to use as HTTP bearer authorization for remote files. If :obj:`True`, will use the token generated when running :obj:`transformers-cli login` (stored in :obj:`~/.huggingface`). revision(:obj:`str`, `optional`, defaults to :obj:`"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so ``revision`` can be any identifier allowed by git. return_unused_kwargs (:obj:`bool`, `optional`, defaults to :obj:`False`): If :obj:`False`, then this function returns just the final configuration object. If :obj:`True`, then this functions returns a :obj:`Tuple(config, unused_kwargs)` where `unused_kwargs` is a dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the part of ``kwargs`` which has not been used to update ``config`` and is otherwise ignored. kwargs (:obj:`Dict[str, Any]`, `optional`): The values in kwargs of any keys which are configuration attributes will be used to override the loaded values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled by the ``return_unused_kwargs`` keyword parameter. .. note:: Passing :obj:`use_auth_token=True` is required when you want to use a private model. Returns: :class:`PretrainedConfig`: The configuration object instantiated from this pretrained model. Examples:: # We can't instantiate directly the base class `PretrainedConfig` so let's show the examples on a # derived class: BertConfig config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from huggingface.co and cache. config = BertConfig.from_pretrained('./test/saved_model/') # E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')` config = BertConfig.from_pretrained('./test/saved_model/my_configuration.json') config = BertConfig.from_pretrained('bert-base-uncased', output_attentions=True, foo=False) assert config.output_attentions == True config, unused_kwargs = BertConfig.from_pretrained('bert-base-uncased', output_attentions=True, foo=False, return_unused_kwargs=True) assert config.output_attentions == True assert unused_kwargs == {'foo': False} """ config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs) if config_dict.get("model_type", False) and hasattr(cls, "model_type"): assert ( config_dict["model_type"] == cls.model_type ), f"You tried to initiate a model of type '{cls.model_type}' with a pretrained model of type '{config_dict['model_type']}'" return cls.from_dict(config_dict, **kwargs)
[ "def", "from_pretrained", "(", "cls", ",", "pretrained_model_name_or_path", ":", "Union", "[", "str", ",", "os", ".", "PathLike", "]", ",", "*", "*", "kwargs", ")", "->", "\"PretrainedConfig\"", ":", "config_dict", ",", "kwargs", "=", "cls", ".", "get_config_dict", "(", "pretrained_model_name_or_path", ",", "*", "*", "kwargs", ")", "if", "config_dict", ".", "get", "(", "\"model_type\"", ",", "False", ")", "and", "hasattr", "(", "cls", ",", "\"model_type\"", ")", ":", "assert", "(", "config_dict", "[", "\"model_type\"", "]", "==", "cls", ".", "model_type", ")", ",", "f\"You tried to initiate a model of type '{cls.model_type}' with a pretrained model of type '{config_dict['model_type']}'\"", "return", "cls", ".", "from_dict", "(", "config_dict", ",", "*", "*", "kwargs", ")" ]
[ 320, 4 ]
[ 395, 51 ]
python
cy
['en', 'cy', 'hi']
False
PretrainedConfig.get_config_dict
( cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs )
From a ``pretrained_model_name_or_path``, resolve to a dictionary of parameters, to be used for instantiating a :class:`~transformers.PretrainedConfig` using ``from_dict``. Parameters: pretrained_model_name_or_path (:obj:`str` or :obj:`os.PathLike`): The identifier of the pre-trained checkpoint from which we want the dictionary of parameters. Returns: :obj:`Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object.
From a ``pretrained_model_name_or_path``, resolve to a dictionary of parameters, to be used for instantiating a :class:`~transformers.PretrainedConfig` using ``from_dict``.
def get_config_dict( cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs ) -> Tuple[Dict[str, Any], Dict[str, Any]]: """ From a ``pretrained_model_name_or_path``, resolve to a dictionary of parameters, to be used for instantiating a :class:`~transformers.PretrainedConfig` using ``from_dict``. Parameters: pretrained_model_name_or_path (:obj:`str` or :obj:`os.PathLike`): The identifier of the pre-trained checkpoint from which we want the dictionary of parameters. Returns: :obj:`Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object. """ cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) use_auth_token = kwargs.pop("use_auth_token", None) local_files_only = kwargs.pop("local_files_only", False) revision = kwargs.pop("revision", None) if is_offline_mode() and not local_files_only: logger.info("Offline mode: forcing local_files_only=True") local_files_only = True pretrained_model_name_or_path = str(pretrained_model_name_or_path) if os.path.isdir(pretrained_model_name_or_path): config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME) elif os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path): config_file = pretrained_model_name_or_path else: config_file = hf_bucket_url( pretrained_model_name_or_path, filename=CONFIG_NAME, revision=revision, mirror=None ) try: # Load from URL or cache if already cached resolved_config_file = cached_path( config_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies, resume_download=resume_download, local_files_only=local_files_only, use_auth_token=use_auth_token, ) # Load config dict config_dict = cls._dict_from_json_file(resolved_config_file) except EnvironmentError as err: logger.error(err) msg = ( f"Can't load config for '{pretrained_model_name_or_path}'. Make sure that:\n\n" f"- '{pretrained_model_name_or_path}' is a correct model identifier listed on 'https://huggingface.co/models'\n\n" f"- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file\n\n" ) raise EnvironmentError(msg) except json.JSONDecodeError: msg = ( "Couldn't reach server at '{}' to download configuration file or " "configuration file is not a valid JSON file. " "Please check network or file content here: {}.".format(config_file, resolved_config_file) ) raise EnvironmentError(msg) if resolved_config_file == config_file: logger.info("loading configuration file {}".format(config_file)) else: logger.info("loading configuration file {} from cache at {}".format(config_file, resolved_config_file)) return config_dict, kwargs
[ "def", "get_config_dict", "(", "cls", ",", "pretrained_model_name_or_path", ":", "Union", "[", "str", ",", "os", ".", "PathLike", "]", ",", "*", "*", "kwargs", ")", "->", "Tuple", "[", "Dict", "[", "str", ",", "Any", "]", ",", "Dict", "[", "str", ",", "Any", "]", "]", ":", "cache_dir", "=", "kwargs", ".", "pop", "(", "\"cache_dir\"", ",", "None", ")", "force_download", "=", "kwargs", ".", "pop", "(", "\"force_download\"", ",", "False", ")", "resume_download", "=", "kwargs", ".", "pop", "(", "\"resume_download\"", ",", "False", ")", "proxies", "=", "kwargs", ".", "pop", "(", "\"proxies\"", ",", "None", ")", "use_auth_token", "=", "kwargs", ".", "pop", "(", "\"use_auth_token\"", ",", "None", ")", "local_files_only", "=", "kwargs", ".", "pop", "(", "\"local_files_only\"", ",", "False", ")", "revision", "=", "kwargs", ".", "pop", "(", "\"revision\"", ",", "None", ")", "if", "is_offline_mode", "(", ")", "and", "not", "local_files_only", ":", "logger", ".", "info", "(", "\"Offline mode: forcing local_files_only=True\"", ")", "local_files_only", "=", "True", "pretrained_model_name_or_path", "=", "str", "(", "pretrained_model_name_or_path", ")", "if", "os", ".", "path", ".", "isdir", "(", "pretrained_model_name_or_path", ")", ":", "config_file", "=", "os", ".", "path", ".", "join", "(", "pretrained_model_name_or_path", ",", "CONFIG_NAME", ")", "elif", "os", ".", "path", ".", "isfile", "(", "pretrained_model_name_or_path", ")", "or", "is_remote_url", "(", "pretrained_model_name_or_path", ")", ":", "config_file", "=", "pretrained_model_name_or_path", "else", ":", "config_file", "=", "hf_bucket_url", "(", "pretrained_model_name_or_path", ",", "filename", "=", "CONFIG_NAME", ",", "revision", "=", "revision", ",", "mirror", "=", "None", ")", "try", ":", "# Load from URL or cache if already cached", "resolved_config_file", "=", "cached_path", "(", "config_file", ",", "cache_dir", "=", "cache_dir", ",", "force_download", "=", "force_download", ",", "proxies", "=", "proxies", ",", "resume_download", "=", "resume_download", ",", "local_files_only", "=", "local_files_only", ",", "use_auth_token", "=", "use_auth_token", ",", ")", "# Load config dict", "config_dict", "=", "cls", ".", "_dict_from_json_file", "(", "resolved_config_file", ")", "except", "EnvironmentError", "as", "err", ":", "logger", ".", "error", "(", "err", ")", "msg", "=", "(", "f\"Can't load config for '{pretrained_model_name_or_path}'. Make sure that:\\n\\n\"", "f\"- '{pretrained_model_name_or_path}' is a correct model identifier listed on 'https://huggingface.co/models'\\n\\n\"", "f\"- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file\\n\\n\"", ")", "raise", "EnvironmentError", "(", "msg", ")", "except", "json", ".", "JSONDecodeError", ":", "msg", "=", "(", "\"Couldn't reach server at '{}' to download configuration file or \"", "\"configuration file is not a valid JSON file. \"", "\"Please check network or file content here: {}.\"", ".", "format", "(", "config_file", ",", "resolved_config_file", ")", ")", "raise", "EnvironmentError", "(", "msg", ")", "if", "resolved_config_file", "==", "config_file", ":", "logger", ".", "info", "(", "\"loading configuration file {}\"", ".", "format", "(", "config_file", ")", ")", "else", ":", "logger", ".", "info", "(", "\"loading configuration file {} from cache at {}\"", ".", "format", "(", "config_file", ",", "resolved_config_file", ")", ")", "return", "config_dict", ",", "kwargs" ]
[ 398, 4 ]
[ 473, 34 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.from_dict
(cls, config_dict: Dict[str, Any], **kwargs)
Instantiates a :class:`~transformers.PretrainedConfig` from a Python dictionary of parameters. Args: config_dict (:obj:`Dict[str, Any]`): Dictionary that will be used to instantiate the configuration object. Such a dictionary can be retrieved from a pretrained checkpoint by leveraging the :func:`~transformers.PretrainedConfig.get_config_dict` method. kwargs (:obj:`Dict[str, Any]`): Additional parameters from which to initialize the configuration object. Returns: :class:`PretrainedConfig`: The configuration object instantiated from those parameters.
Instantiates a :class:`~transformers.PretrainedConfig` from a Python dictionary of parameters.
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig": """ Instantiates a :class:`~transformers.PretrainedConfig` from a Python dictionary of parameters. Args: config_dict (:obj:`Dict[str, Any]`): Dictionary that will be used to instantiate the configuration object. Such a dictionary can be retrieved from a pretrained checkpoint by leveraging the :func:`~transformers.PretrainedConfig.get_config_dict` method. kwargs (:obj:`Dict[str, Any]`): Additional parameters from which to initialize the configuration object. Returns: :class:`PretrainedConfig`: The configuration object instantiated from those parameters. """ return_unused_kwargs = kwargs.pop("return_unused_kwargs", False) config = cls(**config_dict) if hasattr(config, "pruned_heads"): config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items()) # Update config with kwargs if needed to_remove = [] for key, value in kwargs.items(): if hasattr(config, key): setattr(config, key, value) to_remove.append(key) for key in to_remove: kwargs.pop(key, None) logger.info("Model config %s", str(config)) if return_unused_kwargs: return config, kwargs else: return config
[ "def", "from_dict", "(", "cls", ",", "config_dict", ":", "Dict", "[", "str", ",", "Any", "]", ",", "*", "*", "kwargs", ")", "->", "\"PretrainedConfig\"", ":", "return_unused_kwargs", "=", "kwargs", ".", "pop", "(", "\"return_unused_kwargs\"", ",", "False", ")", "config", "=", "cls", "(", "*", "*", "config_dict", ")", "if", "hasattr", "(", "config", ",", "\"pruned_heads\"", ")", ":", "config", ".", "pruned_heads", "=", "dict", "(", "(", "int", "(", "key", ")", ",", "value", ")", "for", "key", ",", "value", "in", "config", ".", "pruned_heads", ".", "items", "(", ")", ")", "# Update config with kwargs if needed", "to_remove", "=", "[", "]", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "if", "hasattr", "(", "config", ",", "key", ")", ":", "setattr", "(", "config", ",", "key", ",", "value", ")", "to_remove", ".", "append", "(", "key", ")", "for", "key", "in", "to_remove", ":", "kwargs", ".", "pop", "(", "key", ",", "None", ")", "logger", ".", "info", "(", "\"Model config %s\"", ",", "str", "(", "config", ")", ")", "if", "return_unused_kwargs", ":", "return", "config", ",", "kwargs", "else", ":", "return", "config" ]
[ 476, 4 ]
[ 511, 25 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.from_json_file
(cls, json_file: Union[str, os.PathLike])
Instantiates a :class:`~transformers.PretrainedConfig` from the path to a JSON file of parameters. Args: json_file (:obj:`str` or :obj:`os.PathLike`): Path to the JSON file containing the parameters. Returns: :class:`PretrainedConfig`: The configuration object instantiated from that JSON file.
Instantiates a :class:`~transformers.PretrainedConfig` from the path to a JSON file of parameters.
def from_json_file(cls, json_file: Union[str, os.PathLike]) -> "PretrainedConfig": """ Instantiates a :class:`~transformers.PretrainedConfig` from the path to a JSON file of parameters. Args: json_file (:obj:`str` or :obj:`os.PathLike`): Path to the JSON file containing the parameters. Returns: :class:`PretrainedConfig`: The configuration object instantiated from that JSON file. """ config_dict = cls._dict_from_json_file(json_file) return cls(**config_dict)
[ "def", "from_json_file", "(", "cls", ",", "json_file", ":", "Union", "[", "str", ",", "os", ".", "PathLike", "]", ")", "->", "\"PretrainedConfig\"", ":", "config_dict", "=", "cls", ".", "_dict_from_json_file", "(", "json_file", ")", "return", "cls", "(", "*", "*", "config_dict", ")" ]
[ 514, 4 ]
[ 527, 33 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.to_diff_dict
(self)
Removes all attributes from config which correspond to the default config attributes for better readability and serializes to a Python dictionary. Returns: :obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance,
Removes all attributes from config which correspond to the default config attributes for better readability and serializes to a Python dictionary.
def to_diff_dict(self) -> Dict[str, Any]: """ Removes all attributes from config which correspond to the default config attributes for better readability and serializes to a Python dictionary. Returns: :obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance, """ config_dict = self.to_dict() # get the default config dict default_config_dict = PretrainedConfig().to_dict() # get class specific config dict class_config_dict = self.__class__().to_dict() if not self.is_composition else {} serializable_config_dict = {} # only serialize values that differ from the default config for key, value in config_dict.items(): if ( key not in default_config_dict or key == "transformers_version" or value != default_config_dict[key] or (key in class_config_dict and value != class_config_dict[key]) ): serializable_config_dict[key] = value return serializable_config_dict
[ "def", "to_diff_dict", "(", "self", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "config_dict", "=", "self", ".", "to_dict", "(", ")", "# get the default config dict", "default_config_dict", "=", "PretrainedConfig", "(", ")", ".", "to_dict", "(", ")", "# get class specific config dict", "class_config_dict", "=", "self", ".", "__class__", "(", ")", ".", "to_dict", "(", ")", "if", "not", "self", ".", "is_composition", "else", "{", "}", "serializable_config_dict", "=", "{", "}", "# only serialize values that differ from the default config", "for", "key", ",", "value", "in", "config_dict", ".", "items", "(", ")", ":", "if", "(", "key", "not", "in", "default_config_dict", "or", "key", "==", "\"transformers_version\"", "or", "value", "!=", "default_config_dict", "[", "key", "]", "or", "(", "key", "in", "class_config_dict", "and", "value", "!=", "class_config_dict", "[", "key", "]", ")", ")", ":", "serializable_config_dict", "[", "key", "]", "=", "value", "return", "serializable_config_dict" ]
[ 541, 4 ]
[ 569, 39 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.to_dict
(self)
Serializes this instance to a Python dictionary. Returns: :obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance.
Serializes this instance to a Python dictionary.
def to_dict(self) -> Dict[str, Any]: """ Serializes this instance to a Python dictionary. Returns: :obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance. """ output = copy.deepcopy(self.__dict__) if hasattr(self.__class__, "model_type"): output["model_type"] = self.__class__.model_type # Transformers version when serializing the model output["transformers_version"] = __version__ return output
[ "def", "to_dict", "(", "self", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "output", "=", "copy", ".", "deepcopy", "(", "self", ".", "__dict__", ")", "if", "hasattr", "(", "self", ".", "__class__", ",", "\"model_type\"", ")", ":", "output", "[", "\"model_type\"", "]", "=", "self", ".", "__class__", ".", "model_type", "# Transformers version when serializing the model", "output", "[", "\"transformers_version\"", "]", "=", "__version__", "return", "output" ]
[ 571, 4 ]
[ 585, 21 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.to_json_string
(self, use_diff: bool = True)
Serializes this instance to a JSON string. Args: use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`): If set to ``True``, only the difference between the config instance and the default ``PretrainedConfig()`` is serialized to JSON string. Returns: :obj:`str`: String containing all the attributes that make up this configuration instance in JSON format.
Serializes this instance to a JSON string.
def to_json_string(self, use_diff: bool = True) -> str: """ Serializes this instance to a JSON string. Args: use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`): If set to ``True``, only the difference between the config instance and the default ``PretrainedConfig()`` is serialized to JSON string. Returns: :obj:`str`: String containing all the attributes that make up this configuration instance in JSON format. """ if use_diff is True: config_dict = self.to_diff_dict() else: config_dict = self.to_dict() return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
[ "def", "to_json_string", "(", "self", ",", "use_diff", ":", "bool", "=", "True", ")", "->", "str", ":", "if", "use_diff", "is", "True", ":", "config_dict", "=", "self", ".", "to_diff_dict", "(", ")", "else", ":", "config_dict", "=", "self", ".", "to_dict", "(", ")", "return", "json", ".", "dumps", "(", "config_dict", ",", "indent", "=", "2", ",", "sort_keys", "=", "True", ")", "+", "\"\\n\"" ]
[ 587, 4 ]
[ 603, 71 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.to_json_file
(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True)
Save this instance to a JSON file. Args: json_file_path (:obj:`str` or :obj:`os.PathLike`): Path to the JSON file in which this configuration instance's parameters will be saved. use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`): If set to ``True``, only the difference between the config instance and the default ``PretrainedConfig()`` is serialized to JSON file.
Save this instance to a JSON file.
def to_json_file(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True): """ Save this instance to a JSON file. Args: json_file_path (:obj:`str` or :obj:`os.PathLike`): Path to the JSON file in which this configuration instance's parameters will be saved. use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`): If set to ``True``, only the difference between the config instance and the default ``PretrainedConfig()`` is serialized to JSON file. """ with open(json_file_path, "w", encoding="utf-8") as writer: writer.write(self.to_json_string(use_diff=use_diff))
[ "def", "to_json_file", "(", "self", ",", "json_file_path", ":", "Union", "[", "str", ",", "os", ".", "PathLike", "]", ",", "use_diff", ":", "bool", "=", "True", ")", ":", "with", "open", "(", "json_file_path", ",", "\"w\"", ",", "encoding", "=", "\"utf-8\"", ")", "as", "writer", ":", "writer", ".", "write", "(", "self", ".", "to_json_string", "(", "use_diff", "=", "use_diff", ")", ")" ]
[ 605, 4 ]
[ 617, 64 ]
python
en
['en', 'error', 'th']
False
PretrainedConfig.update
(self, config_dict: Dict[str, Any])
Updates attributes of this class with attributes from ``config_dict``. Args: config_dict (:obj:`Dict[str, Any]`): Dictionary of attributes that shall be updated for this class.
Updates attributes of this class with attributes from ``config_dict``.
def update(self, config_dict: Dict[str, Any]): """ Updates attributes of this class with attributes from ``config_dict``. Args: config_dict (:obj:`Dict[str, Any]`): Dictionary of attributes that shall be updated for this class. """ for key, value in config_dict.items(): setattr(self, key, value)
[ "def", "update", "(", "self", ",", "config_dict", ":", "Dict", "[", "str", ",", "Any", "]", ")", ":", "for", "key", ",", "value", "in", "config_dict", ".", "items", "(", ")", ":", "setattr", "(", "self", ",", "key", ",", "value", ")" ]
[ 619, 4 ]
[ 627, 37 ]
python
en
['en', 'error', 'th']
False
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up a Foscam IP Camera.
Set up a Foscam IP Camera.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up a Foscam IP Camera.""" async def async_handle_ptz(service): """Handle PTZ service call.""" movement = service.data[ATTR_MOVEMENT] travel_time = service.data[ATTR_TRAVELTIME] entity_ids = await async_extract_entity_ids(hass, service) if not entity_ids: return _LOGGER.debug("Moving '%s' camera(s): %s", movement, entity_ids) all_cameras = hass.data[FOSCAM_DATA][FOSCAM_ENTITIES] target_cameras = [ camera for camera in all_cameras if camera.entity_id in entity_ids ] for camera in target_cameras: await camera.async_perform_ptz(movement, travel_time) hass.services.async_register( FOSCAM_DOMAIN, SERVICE_PTZ, async_handle_ptz, schema=SERVICE_PTZ_SCHEMA ) camera = FoscamCamera( config[CONF_IP], config[CONF_PORT], config[CONF_USERNAME], config[CONF_PASSWORD], verbose=False, ) rtsp_port = config.get(CONF_RTSP_PORT) if not rtsp_port: ret, response = await hass.async_add_executor_job(camera.get_port_info) if ret == 0: rtsp_port = response.get("rtspPort") or response.get("mediaPort") ret, response = await hass.async_add_executor_job(camera.get_motion_detect_config) motion_status = False if ret != 0 and response == 1: motion_status = True async_add_entities( [ HassFoscamCamera( camera, config[CONF_NAME], config[CONF_USERNAME], config[CONF_PASSWORD], rtsp_port, motion_status, ) ] )
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "async", "def", "async_handle_ptz", "(", "service", ")", ":", "\"\"\"Handle PTZ service call.\"\"\"", "movement", "=", "service", ".", "data", "[", "ATTR_MOVEMENT", "]", "travel_time", "=", "service", ".", "data", "[", "ATTR_TRAVELTIME", "]", "entity_ids", "=", "await", "async_extract_entity_ids", "(", "hass", ",", "service", ")", "if", "not", "entity_ids", ":", "return", "_LOGGER", ".", "debug", "(", "\"Moving '%s' camera(s): %s\"", ",", "movement", ",", "entity_ids", ")", "all_cameras", "=", "hass", ".", "data", "[", "FOSCAM_DATA", "]", "[", "FOSCAM_ENTITIES", "]", "target_cameras", "=", "[", "camera", "for", "camera", "in", "all_cameras", "if", "camera", ".", "entity_id", "in", "entity_ids", "]", "for", "camera", "in", "target_cameras", ":", "await", "camera", ".", "async_perform_ptz", "(", "movement", ",", "travel_time", ")", "hass", ".", "services", ".", "async_register", "(", "FOSCAM_DOMAIN", ",", "SERVICE_PTZ", ",", "async_handle_ptz", ",", "schema", "=", "SERVICE_PTZ_SCHEMA", ")", "camera", "=", "FoscamCamera", "(", "config", "[", "CONF_IP", "]", ",", "config", "[", "CONF_PORT", "]", ",", "config", "[", "CONF_USERNAME", "]", ",", "config", "[", "CONF_PASSWORD", "]", ",", "verbose", "=", "False", ",", ")", "rtsp_port", "=", "config", ".", "get", "(", "CONF_RTSP_PORT", ")", "if", "not", "rtsp_port", ":", "ret", ",", "response", "=", "await", "hass", ".", "async_add_executor_job", "(", "camera", ".", "get_port_info", ")", "if", "ret", "==", "0", ":", "rtsp_port", "=", "response", ".", "get", "(", "\"rtspPort\"", ")", "or", "response", ".", "get", "(", "\"mediaPort\"", ")", "ret", ",", "response", "=", "await", "hass", ".", "async_add_executor_job", "(", "camera", ".", "get_motion_detect_config", ")", "motion_status", "=", "False", "if", "ret", "!=", "0", "and", "response", "==", "1", ":", "motion_status", "=", "True", "async_add_entities", "(", "[", "HassFoscamCamera", "(", "camera", ",", "config", "[", "CONF_NAME", "]", ",", "config", "[", "CONF_USERNAME", "]", ",", "config", "[", "CONF_PASSWORD", "]", ",", "rtsp_port", ",", "motion_status", ",", ")", "]", ")" ]
[ 90, 0 ]
[ 148, 5 ]
python
en
['en', 'pt', 'en']
True
HassFoscamCamera.__init__
(self, camera, name, username, password, rtsp_port, motion_status)
Initialize a Foscam camera.
Initialize a Foscam camera.
def __init__(self, camera, name, username, password, rtsp_port, motion_status): """Initialize a Foscam camera.""" super().__init__() self._foscam_session = camera self._name = name self._username = username self._password = password self._rtsp_port = rtsp_port self._motion_status = motion_status
[ "def", "__init__", "(", "self", ",", "camera", ",", "name", ",", "username", ",", "password", ",", "rtsp_port", ",", "motion_status", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "_foscam_session", "=", "camera", "self", ".", "_name", "=", "name", "self", ".", "_username", "=", "username", "self", ".", "_password", "=", "password", "self", ".", "_rtsp_port", "=", "rtsp_port", "self", ".", "_motion_status", "=", "motion_status" ]
[ 154, 4 ]
[ 163, 43 ]
python
en
['en', 'pt', 'it']
False
HassFoscamCamera.async_added_to_hass
(self)
Handle entity addition to hass.
Handle entity addition to hass.
async def async_added_to_hass(self): """Handle entity addition to hass.""" entities = self.hass.data.setdefault(FOSCAM_DATA, {}).setdefault( FOSCAM_ENTITIES, [] ) entities.append(self)
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "entities", "=", "self", ".", "hass", ".", "data", ".", "setdefault", "(", "FOSCAM_DATA", ",", "{", "}", ")", ".", "setdefault", "(", "FOSCAM_ENTITIES", ",", "[", "]", ")", "entities", ".", "append", "(", "self", ")" ]
[ 165, 4 ]
[ 170, 29 ]
python
en
['en', 'en', 'en']
True
HassFoscamCamera.camera_image
(self)
Return a still image response from the camera.
Return a still image response from the camera.
def camera_image(self): """Return a still image response from the camera.""" # Send the request to snap a picture and return raw jpg data # Handle exception if host is not reachable or url failed result, response = self._foscam_session.snap_picture_2() if result != 0: return None return response
[ "def", "camera_image", "(", "self", ")", ":", "# Send the request to snap a picture and return raw jpg data", "# Handle exception if host is not reachable or url failed", "result", ",", "response", "=", "self", ".", "_foscam_session", ".", "snap_picture_2", "(", ")", "if", "result", "!=", "0", ":", "return", "None", "return", "response" ]
[ 172, 4 ]
[ 180, 23 ]
python
en
['en', 'en', 'en']
True
HassFoscamCamera.supported_features
(self)
Return supported features.
Return supported features.
def supported_features(self): """Return supported features.""" if self._rtsp_port: return SUPPORT_STREAM return 0
[ "def", "supported_features", "(", "self", ")", ":", "if", "self", ".", "_rtsp_port", ":", "return", "SUPPORT_STREAM", "return", "0" ]
[ 183, 4 ]
[ 187, 16 ]
python
en
['en', 'en', 'en']
True
HassFoscamCamera.stream_source
(self)
Return the stream source.
Return the stream source.
async def stream_source(self): """Return the stream source.""" if self._rtsp_port: return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/videoMain" return None
[ "async", "def", "stream_source", "(", "self", ")", ":", "if", "self", ".", "_rtsp_port", ":", "return", "f\"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/videoMain\"", "return", "None" ]
[ 189, 4 ]
[ 193, 19 ]
python
en
['en', 'ig', 'en']
True
HassFoscamCamera.motion_detection_enabled
(self)
Camera Motion Detection Status.
Camera Motion Detection Status.
def motion_detection_enabled(self): """Camera Motion Detection Status.""" return self._motion_status
[ "def", "motion_detection_enabled", "(", "self", ")", ":", "return", "self", ".", "_motion_status" ]
[ 196, 4 ]
[ 198, 34 ]
python
en
['sv', 'ja', 'en']
False
HassFoscamCamera.enable_motion_detection
(self)
Enable motion detection in camera.
Enable motion detection in camera.
def enable_motion_detection(self): """Enable motion detection in camera.""" try: ret = self._foscam_session.enable_motion_detection() if ret != 0: return self._motion_status = True except TypeError: _LOGGER.debug("Communication problem")
[ "def", "enable_motion_detection", "(", "self", ")", ":", "try", ":", "ret", "=", "self", ".", "_foscam_session", ".", "enable_motion_detection", "(", ")", "if", "ret", "!=", "0", ":", "return", "self", ".", "_motion_status", "=", "True", "except", "TypeError", ":", "_LOGGER", ".", "debug", "(", "\"Communication problem\"", ")" ]
[ 200, 4 ]
[ 210, 50 ]
python
en
['it', 'en', 'en']
True
HassFoscamCamera.disable_motion_detection
(self)
Disable motion detection.
Disable motion detection.
def disable_motion_detection(self): """Disable motion detection.""" try: ret = self._foscam_session.disable_motion_detection() if ret != 0: return self._motion_status = False except TypeError: _LOGGER.debug("Communication problem")
[ "def", "disable_motion_detection", "(", "self", ")", ":", "try", ":", "ret", "=", "self", ".", "_foscam_session", ".", "disable_motion_detection", "(", ")", "if", "ret", "!=", "0", ":", "return", "self", ".", "_motion_status", "=", "False", "except", "TypeError", ":", "_LOGGER", ".", "debug", "(", "\"Communication problem\"", ")" ]
[ 212, 4 ]
[ 222, 50 ]
python
en
['fr', 'en', 'en']
True
HassFoscamCamera.async_perform_ptz
(self, movement, travel_time)
Perform a PTZ action on the camera.
Perform a PTZ action on the camera.
async def async_perform_ptz(self, movement, travel_time): """Perform a PTZ action on the camera.""" _LOGGER.debug("PTZ action '%s' on %s", movement, self._name) movement_function = getattr(self._foscam_session, MOVEMENT_ATTRS[movement]) ret, _ = await self.hass.async_add_executor_job(movement_function) if ret != 0: _LOGGER.error("Error moving %s '%s': %s", movement, self._name, ret) return await asyncio.sleep(travel_time) ret, _ = await self.hass.async_add_executor_job( self._foscam_session.ptz_stop_run ) if ret != 0: _LOGGER.error("Error stopping movement on '%s': %s", self._name, ret) return
[ "async", "def", "async_perform_ptz", "(", "self", ",", "movement", ",", "travel_time", ")", ":", "_LOGGER", ".", "debug", "(", "\"PTZ action '%s' on %s\"", ",", "movement", ",", "self", ".", "_name", ")", "movement_function", "=", "getattr", "(", "self", ".", "_foscam_session", ",", "MOVEMENT_ATTRS", "[", "movement", "]", ")", "ret", ",", "_", "=", "await", "self", ".", "hass", ".", "async_add_executor_job", "(", "movement_function", ")", "if", "ret", "!=", "0", ":", "_LOGGER", ".", "error", "(", "\"Error moving %s '%s': %s\"", ",", "movement", ",", "self", ".", "_name", ",", "ret", ")", "return", "await", "asyncio", ".", "sleep", "(", "travel_time", ")", "ret", ",", "_", "=", "await", "self", ".", "hass", ".", "async_add_executor_job", "(", "self", ".", "_foscam_session", ".", "ptz_stop_run", ")", "if", "ret", "!=", "0", ":", "_LOGGER", ".", "error", "(", "\"Error stopping movement on '%s': %s\"", ",", "self", ".", "_name", ",", "ret", ")", "return" ]
[ 224, 4 ]
[ 244, 18 ]
python
en
['en', 'en', 'en']
True
HassFoscamCamera.name
(self)
Return the name of this camera.
Return the name of this camera.
def name(self): """Return the name of this camera.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 247, 4 ]
[ 249, 25 ]
python
en
['en', 'en', 'en']
True
mock_session_response
(*args, **kwargs)
Mock data generation for session response.
Mock data generation for session response.
def mock_session_response(*args, **kwargs): """Mock data generation for session response.""" class MockSessionResponse: def __init__(self, text, status_code): self.text = text self.status_code = status_code # Username: foo # Password: bar if args[0].headers["Authorization"] != "Basic Zm9vOmJhcg==": return MockSessionResponse(None, 401) if "gimmie_bad_data" in args[0].body: return MockSessionResponse("This shouldn't (wldev = be here.;", 200) if "gimmie_good_data" in args[0].body: return MockSessionResponse( "wldev = [ ['eth1','F4:F5:D8:AA:AA:AA'," "-42,5500,1000,7043,0],['eth1','58:EF:68:00:00:00'," "-42,5500,1000,7043,0]];\n" "dhcpd_lease = [ ['chromecast','172.10.10.5','F4:F5:D8:AA:AA:AA'," "'0 days, 16:17:08'],['wemo','172.10.10.6','58:EF:68:00:00:00'," "'0 days, 12:09:08']];", 200, ) return MockSessionResponse(None, 200)
[ "def", "mock_session_response", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "class", "MockSessionResponse", ":", "def", "__init__", "(", "self", ",", "text", ",", "status_code", ")", ":", "self", ".", "text", "=", "text", "self", ".", "status_code", "=", "status_code", "# Username: foo", "# Password: bar", "if", "args", "[", "0", "]", ".", "headers", "[", "\"Authorization\"", "]", "!=", "\"Basic Zm9vOmJhcg==\"", ":", "return", "MockSessionResponse", "(", "None", ",", "401", ")", "if", "\"gimmie_bad_data\"", "in", "args", "[", "0", "]", ".", "body", ":", "return", "MockSessionResponse", "(", "\"This shouldn't (wldev = be here.;\"", ",", "200", ")", "if", "\"gimmie_good_data\"", "in", "args", "[", "0", "]", ".", "body", ":", "return", "MockSessionResponse", "(", "\"wldev = [ ['eth1','F4:F5:D8:AA:AA:AA',\"", "\"-42,5500,1000,7043,0],['eth1','58:EF:68:00:00:00',\"", "\"-42,5500,1000,7043,0]];\\n\"", "\"dhcpd_lease = [ ['chromecast','172.10.10.5','F4:F5:D8:AA:AA:AA',\"", "\"'0 days, 16:17:08'],['wemo','172.10.10.6','58:EF:68:00:00:00',\"", "\"'0 days, 12:09:08']];\"", ",", "200", ",", ")", "return", "MockSessionResponse", "(", "None", ",", "200", ")" ]
[ 21, 0 ]
[ 46, 41 ]
python
en
['en', 'no', 'en']
True
mock_exception_logger
()
Mock pyunifi.
Mock pyunifi.
def mock_exception_logger(): """Mock pyunifi.""" with mock.patch( "homeassistant.components.tomato.device_tracker._LOGGER.exception" ) as mock_exception_logger: yield mock_exception_logger
[ "def", "mock_exception_logger", "(", ")", ":", "with", "mock", ".", "patch", "(", "\"homeassistant.components.tomato.device_tracker._LOGGER.exception\"", ")", "as", "mock_exception_logger", ":", "yield", "mock_exception_logger" ]
[ 50, 0 ]
[ 55, 35 ]
python
en
['en', 'tr', 'it']
False
mock_session_send
()
Mock requests.Session().send.
Mock requests.Session().send.
def mock_session_send(): """Mock requests.Session().send.""" with mock.patch("requests.Session.send") as mock_session_send: yield mock_session_send
[ "def", "mock_session_send", "(", ")", ":", "with", "mock", ".", "patch", "(", "\"requests.Session.send\"", ")", "as", "mock_session_send", ":", "yield", "mock_session_send" ]
[ 59, 0 ]
[ 62, 31 ]
python
en
['en', 'da', 'en']
False
test_config_missing_optional_params
(hass, mock_session_send)
Test the setup without optional parameters.
Test the setup without optional parameters.
def test_config_missing_optional_params(hass, mock_session_send): """Test the setup without optional parameters.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "password", tomato.CONF_HTTP_ID: "1234567890", } ) } result = tomato.get_scanner(hass, config) assert result.req.url == "http://tomato-router:80/update.cgi" assert result.req.headers == { "Content-Length": "32", "Content-Type": "application/x-www-form-urlencoded", "Authorization": "Basic Zm9vOnBhc3N3b3Jk", } assert "_http_id=1234567890" in result.req.body assert "exec=devlist" in result.req.body
[ "def", "test_config_missing_optional_params", "(", "hass", ",", "mock_session_send", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"1234567890\"", ",", "}", ")", "}", "result", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "result", ".", "req", ".", "url", "==", "\"http://tomato-router:80/update.cgi\"", "assert", "result", ".", "req", ".", "headers", "==", "{", "\"Content-Length\"", ":", "\"32\"", ",", "\"Content-Type\"", ":", "\"application/x-www-form-urlencoded\"", ",", "\"Authorization\"", ":", "\"Basic Zm9vOnBhc3N3b3Jk\"", ",", "}", "assert", "\"_http_id=1234567890\"", "in", "result", ".", "req", ".", "body", "assert", "\"exec=devlist\"", "in", "result", ".", "req", ".", "body" ]
[ 65, 0 ]
[ 86, 44 ]
python
en
['en', 'en', 'en']
True
test_config_default_nonssl_port
(hass, mock_session_send)
Test the setup without a default port set without ssl enabled.
Test the setup without a default port set without ssl enabled.
def test_config_default_nonssl_port(hass, mock_session_send): """Test the setup without a default port set without ssl enabled.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "password", tomato.CONF_HTTP_ID: "1234567890", } ) } result = tomato.get_scanner(hass, config) assert result.req.url == "http://tomato-router:80/update.cgi"
[ "def", "test_config_default_nonssl_port", "(", "hass", ",", "mock_session_send", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"1234567890\"", ",", "}", ")", "}", "result", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "result", ".", "req", ".", "url", "==", "\"http://tomato-router:80/update.cgi\"" ]
[ 91, 0 ]
[ 105, 65 ]
python
en
['en', 'en', 'en']
True
test_config_default_ssl_port
(hass, mock_session_send)
Test the setup without a default port set with ssl enabled.
Test the setup without a default port set with ssl enabled.
def test_config_default_ssl_port(hass, mock_session_send): """Test the setup without a default port set with ssl enabled.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_SSL: True, CONF_USERNAME: "foo", CONF_PASSWORD: "password", tomato.CONF_HTTP_ID: "1234567890", } ) } result = tomato.get_scanner(hass, config) assert result.req.url == "https://tomato-router:443/update.cgi"
[ "def", "test_config_default_ssl_port", "(", "hass", ",", "mock_session_send", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_SSL", ":", "True", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"1234567890\"", ",", "}", ")", "}", "result", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "result", ".", "req", ".", "url", "==", "\"https://tomato-router:443/update.cgi\"" ]
[ 110, 0 ]
[ 125, 67 ]
python
en
['en', 'en', 'en']
True
test_config_verify_ssl_but_no_ssl_enabled
(hass, mock_session_send)
Test the setup with a string with ssl_verify but ssl not enabled.
Test the setup with a string with ssl_verify but ssl not enabled.
def test_config_verify_ssl_but_no_ssl_enabled(hass, mock_session_send): """Test the setup with a string with ssl_verify but ssl not enabled.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: False, CONF_VERIFY_SSL: "/test/tomato.crt", CONF_USERNAME: "foo", CONF_PASSWORD: "password", tomato.CONF_HTTP_ID: "1234567890", } ) } result = tomato.get_scanner(hass, config) assert result.req.url == "http://tomato-router:1234/update.cgi" assert result.req.headers == { "Content-Length": "32", "Content-Type": "application/x-www-form-urlencoded", "Authorization": "Basic Zm9vOnBhc3N3b3Jk", } assert "_http_id=1234567890" in result.req.body assert "exec=devlist" in result.req.body assert mock_session_send.call_count == 1 assert mock_session_send.mock_calls[0] == mock.call(result.req, timeout=3)
[ "def", "test_config_verify_ssl_but_no_ssl_enabled", "(", "hass", ",", "mock_session_send", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "False", ",", "CONF_VERIFY_SSL", ":", "\"/test/tomato.crt\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"1234567890\"", ",", "}", ")", "}", "result", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "result", ".", "req", ".", "url", "==", "\"http://tomato-router:1234/update.cgi\"", "assert", "result", ".", "req", ".", "headers", "==", "{", "\"Content-Length\"", ":", "\"32\"", ",", "\"Content-Type\"", ":", "\"application/x-www-form-urlencoded\"", ",", "\"Authorization\"", ":", "\"Basic Zm9vOnBhc3N3b3Jk\"", ",", "}", "assert", "\"_http_id=1234567890\"", "in", "result", ".", "req", ".", "body", "assert", "\"exec=devlist\"", "in", "result", ".", "req", ".", "body", "assert", "mock_session_send", ".", "call_count", "==", "1", "assert", "mock_session_send", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "result", ".", "req", ",", "timeout", "=", "3", ")" ]
[ 130, 0 ]
[ 156, 78 ]
python
en
['en', 'en', 'en']
True
test_config_valid_verify_ssl_path
(hass, mock_session_send)
Test the setup with a string for ssl_verify. Representing the absolute path to a CA certificate bundle.
Test the setup with a string for ssl_verify.
def test_config_valid_verify_ssl_path(hass, mock_session_send): """Test the setup with a string for ssl_verify. Representing the absolute path to a CA certificate bundle. """ config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, CONF_VERIFY_SSL: "/test/tomato.crt", CONF_USERNAME: "bar", CONF_PASSWORD: "foo", tomato.CONF_HTTP_ID: "0987654321", } ) } result = tomato.get_scanner(hass, config) assert result.req.url == "https://tomato-router:1234/update.cgi" assert result.req.headers == { "Content-Length": "32", "Content-Type": "application/x-www-form-urlencoded", "Authorization": "Basic YmFyOmZvbw==", } assert "_http_id=0987654321" in result.req.body assert "exec=devlist" in result.req.body assert mock_session_send.call_count == 1 assert mock_session_send.mock_calls[0] == mock.call( result.req, timeout=3, verify="/test/tomato.crt" )
[ "def", "test_config_valid_verify_ssl_path", "(", "hass", ",", "mock_session_send", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"/test/tomato.crt\"", ",", "CONF_USERNAME", ":", "\"bar\"", ",", "CONF_PASSWORD", ":", "\"foo\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"0987654321\"", ",", "}", ")", "}", "result", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "result", ".", "req", ".", "url", "==", "\"https://tomato-router:1234/update.cgi\"", "assert", "result", ".", "req", ".", "headers", "==", "{", "\"Content-Length\"", ":", "\"32\"", ",", "\"Content-Type\"", ":", "\"application/x-www-form-urlencoded\"", ",", "\"Authorization\"", ":", "\"Basic YmFyOmZvbw==\"", ",", "}", "assert", "\"_http_id=0987654321\"", "in", "result", ".", "req", ".", "body", "assert", "\"exec=devlist\"", "in", "result", ".", "req", ".", "body", "assert", "mock_session_send", ".", "call_count", "==", "1", "assert", "mock_session_send", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "result", ".", "req", ",", "timeout", "=", "3", ",", "verify", "=", "\"/test/tomato.crt\"", ")" ]
[ 161, 0 ]
[ 192, 5 ]
python
en
['en', 'pt', 'en']
True
test_config_valid_verify_ssl_bool
(hass, mock_session_send)
Test the setup with a bool for ssl_verify.
Test the setup with a bool for ssl_verify.
def test_config_valid_verify_ssl_bool(hass, mock_session_send): """Test the setup with a bool for ssl_verify.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, CONF_VERIFY_SSL: "False", CONF_USERNAME: "bar", CONF_PASSWORD: "foo", tomato.CONF_HTTP_ID: "0987654321", } ) } result = tomato.get_scanner(hass, config) assert result.req.url == "https://tomato-router:1234/update.cgi" assert result.req.headers == { "Content-Length": "32", "Content-Type": "application/x-www-form-urlencoded", "Authorization": "Basic YmFyOmZvbw==", } assert "_http_id=0987654321" in result.req.body assert "exec=devlist" in result.req.body assert mock_session_send.call_count == 1 assert mock_session_send.mock_calls[0] == mock.call( result.req, timeout=3, verify=False )
[ "def", "test_config_valid_verify_ssl_bool", "(", "hass", ",", "mock_session_send", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"False\"", ",", "CONF_USERNAME", ":", "\"bar\"", ",", "CONF_PASSWORD", ":", "\"foo\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"0987654321\"", ",", "}", ")", "}", "result", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "result", ".", "req", ".", "url", "==", "\"https://tomato-router:1234/update.cgi\"", "assert", "result", ".", "req", ".", "headers", "==", "{", "\"Content-Length\"", ":", "\"32\"", ",", "\"Content-Type\"", ":", "\"application/x-www-form-urlencoded\"", ",", "\"Authorization\"", ":", "\"Basic YmFyOmZvbw==\"", ",", "}", "assert", "\"_http_id=0987654321\"", "in", "result", ".", "req", ".", "body", "assert", "\"exec=devlist\"", "in", "result", ".", "req", ".", "body", "assert", "mock_session_send", ".", "call_count", "==", "1", "assert", "mock_session_send", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "result", ".", "req", ",", "timeout", "=", "3", ",", "verify", "=", "False", ")" ]
[ 195, 0 ]
[ 223, 5 ]
python
en
['en', 'en', 'en']
True
test_config_errors
()
Test for configuration errors.
Test for configuration errors.
def test_config_errors(): """Test for configuration errors.""" with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, # No Host, CONF_PORT: 1234, CONF_SSL: True, CONF_VERIFY_SSL: "False", CONF_USERNAME: "bar", CONF_PASSWORD: "foo", tomato.CONF_HTTP_ID: "0987654321", } ) with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: -123456789, # Bad Port CONF_SSL: True, CONF_VERIFY_SSL: "False", CONF_USERNAME: "bar", CONF_PASSWORD: "foo", tomato.CONF_HTTP_ID: "0987654321", } ) with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, CONF_VERIFY_SSL: "False", # No Username CONF_PASSWORD: "foo", tomato.CONF_HTTP_ID: "0987654321", } ) with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, CONF_VERIFY_SSL: "False", CONF_USERNAME: "bar", # No Password tomato.CONF_HTTP_ID: "0987654321", } ) with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, CONF_VERIFY_SSL: "False", CONF_USERNAME: "bar", CONF_PASSWORD: "foo", # No HTTP_ID } )
[ "def", "test_config_errors", "(", ")", ":", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "# No Host,", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"False\"", ",", "CONF_USERNAME", ":", "\"bar\"", ",", "CONF_PASSWORD", ":", "\"foo\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"0987654321\"", ",", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "-", "123456789", ",", "# Bad Port", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"False\"", ",", "CONF_USERNAME", ":", "\"bar\"", ",", "CONF_PASSWORD", ":", "\"foo\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"0987654321\"", ",", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"False\"", ",", "# No Username", "CONF_PASSWORD", ":", "\"foo\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"0987654321\"", ",", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"False\"", ",", "CONF_USERNAME", ":", "\"bar\"", ",", "# No Password", "tomato", ".", "CONF_HTTP_ID", ":", "\"0987654321\"", ",", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_PORT", ":", "1234", ",", "CONF_SSL", ":", "True", ",", "CONF_VERIFY_SSL", ":", "\"False\"", ",", "CONF_USERNAME", ":", "\"bar\"", ",", "CONF_PASSWORD", ":", "\"foo\"", ",", "# No HTTP_ID", "}", ")" ]
[ 226, 0 ]
[ 292, 9 ]
python
en
['da', 'en', 'en']
True
test_config_bad_credentials
(hass, mock_exception_logger)
Test the setup with bad credentials.
Test the setup with bad credentials.
def test_config_bad_credentials(hass, mock_exception_logger): """Test the setup with bad credentials.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "i_am", CONF_PASSWORD: "an_imposter", tomato.CONF_HTTP_ID: "1234", } ) } tomato.get_scanner(hass, config) assert mock_exception_logger.call_count == 1 assert mock_exception_logger.mock_calls[0] == mock.call( "Failed to authenticate, please check your username and password" )
[ "def", "test_config_bad_credentials", "(", "hass", ",", "mock_exception_logger", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"i_am\"", ",", "CONF_PASSWORD", ":", "\"an_imposter\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"1234\"", ",", "}", ")", "}", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "mock_exception_logger", ".", "call_count", "==", "1", "assert", "mock_exception_logger", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "\"Failed to authenticate, please check your username and password\"", ")" ]
[ 296, 0 ]
[ 315, 5 ]
python
en
['en', 'en', 'en']
True
test_bad_response
(hass, mock_exception_logger)
Test the setup with bad response from router.
Test the setup with bad response from router.
def test_bad_response(hass, mock_exception_logger): """Test the setup with bad response from router.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", tomato.CONF_HTTP_ID: "gimmie_bad_data", } ) } tomato.get_scanner(hass, config) assert mock_exception_logger.call_count == 1 assert mock_exception_logger.mock_calls[0] == mock.call( "Failed to parse response from router" )
[ "def", "test_bad_response", "(", "hass", ",", "mock_exception_logger", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"bar\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"gimmie_bad_data\"", ",", "}", ")", "}", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "mock_exception_logger", ".", "call_count", "==", "1", "assert", "mock_exception_logger", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "\"Failed to parse response from router\"", ")" ]
[ 319, 0 ]
[ 338, 5 ]
python
en
['en', 'en', 'en']
True
test_scan_devices
(hass, mock_exception_logger)
Test scanning for new devices.
Test scanning for new devices.
def test_scan_devices(hass, mock_exception_logger): """Test scanning for new devices.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", tomato.CONF_HTTP_ID: "gimmie_good_data", } ) } scanner = tomato.get_scanner(hass, config) assert scanner.scan_devices() == ["F4:F5:D8:AA:AA:AA", "58:EF:68:00:00:00"]
[ "def", "test_scan_devices", "(", "hass", ",", "mock_exception_logger", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"bar\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"gimmie_good_data\"", ",", "}", ")", "}", "scanner", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "scanner", ".", "scan_devices", "(", ")", "==", "[", "\"F4:F5:D8:AA:AA:AA\"", ",", "\"58:EF:68:00:00:00\"", "]" ]
[ 342, 0 ]
[ 357, 79 ]
python
en
['en', 'en', 'en']
True
test_bad_connection
(hass, mock_exception_logger)
Test the router with a connection error.
Test the router with a connection error.
def test_bad_connection(hass, mock_exception_logger): """Test the router with a connection error.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", tomato.CONF_HTTP_ID: "gimmie_good_data", } ) } with requests_mock.Mocker() as adapter: adapter.register_uri( "POST", "http://tomato-router:80/update.cgi", exc=requests.exceptions.ConnectionError, ), tomato.get_scanner(hass, config) assert mock_exception_logger.call_count == 1 assert mock_exception_logger.mock_calls[0] == mock.call( "Failed to connect to the router or invalid http_id supplied" )
[ "def", "test_bad_connection", "(", "hass", ",", "mock_exception_logger", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"bar\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"gimmie_good_data\"", ",", "}", ")", "}", "with", "requests_mock", ".", "Mocker", "(", ")", "as", "adapter", ":", "adapter", ".", "register_uri", "(", "\"POST\"", ",", "\"http://tomato-router:80/update.cgi\"", ",", "exc", "=", "requests", ".", "exceptions", ".", "ConnectionError", ",", ")", ",", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "mock_exception_logger", ".", "call_count", "==", "1", "assert", "mock_exception_logger", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "\"Failed to connect to the router or invalid http_id supplied\"", ")" ]
[ 361, 0 ]
[ 385, 5 ]
python
en
['en', 'en', 'en']
True
test_router_timeout
(hass, mock_exception_logger)
Test the router with a timeout error.
Test the router with a timeout error.
def test_router_timeout(hass, mock_exception_logger): """Test the router with a timeout error.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", tomato.CONF_HTTP_ID: "gimmie_good_data", } ) } with requests_mock.Mocker() as adapter: adapter.register_uri( "POST", "http://tomato-router:80/update.cgi", exc=requests.exceptions.Timeout, ), tomato.get_scanner(hass, config) assert mock_exception_logger.call_count == 1 assert mock_exception_logger.mock_calls[0] == mock.call( "Connection to the router timed out" )
[ "def", "test_router_timeout", "(", "hass", ",", "mock_exception_logger", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"bar\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"gimmie_good_data\"", ",", "}", ")", "}", "with", "requests_mock", ".", "Mocker", "(", ")", "as", "adapter", ":", "adapter", ".", "register_uri", "(", "\"POST\"", ",", "\"http://tomato-router:80/update.cgi\"", ",", "exc", "=", "requests", ".", "exceptions", ".", "Timeout", ",", ")", ",", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "mock_exception_logger", ".", "call_count", "==", "1", "assert", "mock_exception_logger", ".", "mock_calls", "[", "0", "]", "==", "mock", ".", "call", "(", "\"Connection to the router timed out\"", ")" ]
[ 389, 0 ]
[ 413, 5 ]
python
en
['en', 'en', 'en']
True
test_get_device_name
(hass, mock_exception_logger)
Test getting device names.
Test getting device names.
def test_get_device_name(hass, mock_exception_logger): """Test getting device names.""" config = { DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", tomato.CONF_HTTP_ID: "gimmie_good_data", } ) } scanner = tomato.get_scanner(hass, config) assert scanner.get_device_name("F4:F5:D8:AA:AA:AA") == "chromecast" assert scanner.get_device_name("58:EF:68:00:00:00") == "wemo" assert scanner.get_device_name("AA:BB:CC:00:00:00") is None
[ "def", "test_get_device_name", "(", "hass", ",", "mock_exception_logger", ")", ":", "config", "=", "{", "DOMAIN", ":", "tomato", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "tomato", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"tomato-router\"", ",", "CONF_USERNAME", ":", "\"foo\"", ",", "CONF_PASSWORD", ":", "\"bar\"", ",", "tomato", ".", "CONF_HTTP_ID", ":", "\"gimmie_good_data\"", ",", "}", ")", "}", "scanner", "=", "tomato", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "scanner", ".", "get_device_name", "(", "\"F4:F5:D8:AA:AA:AA\"", ")", "==", "\"chromecast\"", "assert", "scanner", ".", "get_device_name", "(", "\"58:EF:68:00:00:00\"", ")", "==", "\"wemo\"", "assert", "scanner", ".", "get_device_name", "(", "\"AA:BB:CC:00:00:00\"", ")", "is", "None" ]
[ 417, 0 ]
[ 434, 63 ]
python
en
['de', 'en', 'en']
True
host_port
(data)
Return a list with host and port.
Return a list with host and port.
def host_port(data): """Return a list with host and port.""" return (data[CONF_HOST], data[CONF_PORT])
[ "def", "host_port", "(", "data", ")", ":", "return", "(", "data", "[", "CONF_HOST", "]", ",", "data", "[", "CONF_PORT", "]", ")" ]
[ 26, 0 ]
[ 28, 45 ]
python
en
['en', 'en', 'en']
True
create_schema
(previous_input=None)
Create a schema with given values as default.
Create a schema with given values as default.
def create_schema(previous_input=None): """Create a schema with given values as default.""" if previous_input is not None: host, port = host_port(previous_input) else: host = DEFAULT_HOST port = DEFAULT_PORT return vol.Schema( { vol.Required(CONF_HOST, default=host): str, vol.Required(CONF_PORT, default=port): int, } )
[ "def", "create_schema", "(", "previous_input", "=", "None", ")", ":", "if", "previous_input", "is", "not", "None", ":", "host", ",", "port", "=", "host_port", "(", "previous_input", ")", "else", ":", "host", "=", "DEFAULT_HOST", "port", "=", "DEFAULT_PORT", "return", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_HOST", ",", "default", "=", "host", ")", ":", "str", ",", "vol", ".", "Required", "(", "CONF_PORT", ",", "default", "=", "port", ")", ":", "int", ",", "}", ")" ]
[ 31, 0 ]
[ 44, 5 ]
python
en
['en', 'en', 'en']
True
BleBoxConfigFlow.__init__
(self)
Initialize the BleBox config flow.
Initialize the BleBox config flow.
def __init__(self): """Initialize the BleBox config flow.""" self.device_config = {}
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "device_config", "=", "{", "}" ]
[ 60, 4 ]
[ 62, 31 ]
python
en
['en', 'en', 'en']
True
BleBoxConfigFlow.handle_step_exception
( self, step, exception, schema, host, port, message_id, log_fn )
Handle step exceptions.
Handle step exceptions.
def handle_step_exception( self, step, exception, schema, host, port, message_id, log_fn ): """Handle step exceptions.""" log_fn("%s at %s:%d (%s)", LOG_MSG[message_id], host, port, exception) return self.async_show_form( step_id="user", data_schema=schema, errors={"base": message_id}, description_placeholders={"address": f"{host}:{port}"}, )
[ "def", "handle_step_exception", "(", "self", ",", "step", ",", "exception", ",", "schema", ",", "host", ",", "port", ",", "message_id", ",", "log_fn", ")", ":", "log_fn", "(", "\"%s at %s:%d (%s)\"", ",", "LOG_MSG", "[", "message_id", "]", ",", "host", ",", "port", ",", "exception", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "schema", ",", "errors", "=", "{", "\"base\"", ":", "message_id", "}", ",", "description_placeholders", "=", "{", "\"address\"", ":", "f\"{host}:{port}\"", "}", ",", ")" ]
[ 64, 4 ]
[ 76, 9 ]
python
en
['en', 'en', 'en']
True
BleBoxConfigFlow.async_step_user
(self, user_input=None)
Handle initial user-triggered config step.
Handle initial user-triggered config step.
async def async_step_user(self, user_input=None): """Handle initial user-triggered config step.""" hass = self.hass schema = create_schema(user_input) if user_input is None: return self.async_show_form( step_id="user", data_schema=schema, errors={}, description_placeholders={}, ) addr = host_port(user_input) for entry in hass.config_entries.async_entries(DOMAIN): if addr == host_port(entry.data): host, port = addr return self.async_abort( reason=ADDRESS_ALREADY_CONFIGURED, description_placeholders={"address": f"{host}:{port}"}, ) websession = async_get_clientsession(hass) api_host = ApiHost(*addr, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER) try: product = await Products.async_from_host(api_host) except UnsupportedBoxVersion as ex: return self.handle_step_exception( "user", ex, schema, *addr, UNSUPPORTED_VERSION, _LOGGER.debug ) except Error as ex: return self.handle_step_exception( "user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.warning ) except RuntimeError as ex: return self.handle_step_exception( "user", ex, schema, *addr, UNKNOWN, _LOGGER.error ) # Check if configured but IP changed since await self.async_set_unique_id(product.unique_id) self._abort_if_unique_id_configured() return self.async_create_entry(title=product.name, data=user_input)
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "hass", "=", "self", ".", "hass", "schema", "=", "create_schema", "(", "user_input", ")", "if", "user_input", "is", "None", ":", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "schema", ",", "errors", "=", "{", "}", ",", "description_placeholders", "=", "{", "}", ",", ")", "addr", "=", "host_port", "(", "user_input", ")", "for", "entry", "in", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", ":", "if", "addr", "==", "host_port", "(", "entry", ".", "data", ")", ":", "host", ",", "port", "=", "addr", "return", "self", ".", "async_abort", "(", "reason", "=", "ADDRESS_ALREADY_CONFIGURED", ",", "description_placeholders", "=", "{", "\"address\"", ":", "f\"{host}:{port}\"", "}", ",", ")", "websession", "=", "async_get_clientsession", "(", "hass", ")", "api_host", "=", "ApiHost", "(", "*", "addr", ",", "DEFAULT_SETUP_TIMEOUT", ",", "websession", ",", "hass", ".", "loop", ",", "_LOGGER", ")", "try", ":", "product", "=", "await", "Products", ".", "async_from_host", "(", "api_host", ")", "except", "UnsupportedBoxVersion", "as", "ex", ":", "return", "self", ".", "handle_step_exception", "(", "\"user\"", ",", "ex", ",", "schema", ",", "*", "addr", ",", "UNSUPPORTED_VERSION", ",", "_LOGGER", ".", "debug", ")", "except", "Error", "as", "ex", ":", "return", "self", ".", "handle_step_exception", "(", "\"user\"", ",", "ex", ",", "schema", ",", "*", "addr", ",", "CANNOT_CONNECT", ",", "_LOGGER", ".", "warning", ")", "except", "RuntimeError", "as", "ex", ":", "return", "self", ".", "handle_step_exception", "(", "\"user\"", ",", "ex", ",", "schema", ",", "*", "addr", ",", "UNKNOWN", ",", "_LOGGER", ".", "error", ")", "# Check if configured but IP changed since", "await", "self", ".", "async_set_unique_id", "(", "product", ".", "unique_id", ")", "self", ".", "_abort_if_unique_id_configured", "(", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "product", ".", "name", ",", "data", "=", "user_input", ")" ]
[ 78, 4 ]
[ 127, 75 ]
python
en
['da', 'en', 'en']
True
test_jewish_calendar_min_config
(hass)
Test minimum jewish calendar configuration.
Test minimum jewish calendar configuration.
async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None
[ "async", "def", "test_jewish_calendar_min_config", "(", "hass", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "jewish_calendar", ".", "DOMAIN", ",", "{", "\"jewish_calendar\"", ":", "{", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.jewish_calendar_date\"", ")", "is", "not", "None" ]
[ 19, 0 ]
[ 25, 69 ]
python
de
['de', 'la', 'en']
False
test_jewish_calendar_hebrew
(hass)
Test jewish calendar sensor with language set to hebrew.
Test jewish calendar sensor with language set to hebrew.
async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None
[ "async", "def", "test_jewish_calendar_hebrew", "(", "hass", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "jewish_calendar", ".", "DOMAIN", ",", "{", "\"jewish_calendar\"", ":", "{", "\"language\"", ":", "\"hebrew\"", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.jewish_calendar_date\"", ")", "is", "not", "None" ]
[ 28, 0 ]
[ 34, 69 ]
python
en
['en', 'su', 'en']
True
test_jewish_calendar_sensor
( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, )
Test Jewish calendar sensor output.
Test Jewish calendar sensor output.
async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1
[ "async", "def", "test_jewish_calendar_sensor", "(", "hass", ",", "legacy_patchable_time", ",", "now", ",", "tzname", ",", "latitude", ",", "longitude", ",", "language", ",", "sensor", ",", "diaspora", ",", "result", ",", ")", ":", "time_zone", "=", "dt_util", ".", "get_time_zone", "(", "tzname", ")", "test_time", "=", "time_zone", ".", "localize", "(", "now", ")", "hass", ".", "config", ".", "time_zone", "=", "time_zone", "hass", ".", "config", ".", "latitude", "=", "latitude", "hass", ".", "config", ".", "longitude", "=", "longitude", "with", "alter_time", "(", "test_time", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "jewish_calendar", ".", "DOMAIN", ",", "{", "\"jewish_calendar\"", ":", "{", "\"name\"", ":", "\"test\"", ",", "\"language\"", ":", "language", ",", "\"diaspora\"", ":", "diaspora", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "future", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "30", ")", "async_fire_time_changed", "(", "hass", ",", "future", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "result", "=", "(", "dt_util", ".", "as_utc", "(", "time_zone", ".", "localize", "(", "result", ")", ")", "if", "isinstance", "(", "result", ",", "dt", ")", "else", "result", ")", "sensor_object", "=", "hass", ".", "states", ".", "get", "(", "f\"sensor.test_{sensor}\"", ")", "assert", "sensor_object", ".", "state", "==", "str", "(", "result", ")", "if", "sensor", "==", "\"holiday\"", ":", "assert", "sensor_object", ".", "attributes", ".", "get", "(", "\"id\"", ")", "==", "\"rosh_hashana_i\"", "assert", "sensor_object", ".", "attributes", ".", "get", "(", "\"type\"", ")", "==", "\"YOM_TOV\"", "assert", "sensor_object", ".", "attributes", ".", "get", "(", "\"type_id\"", ")", "==", "1" ]
[ 150, 0 ]
[ 198, 59 ]
python
en
['en', 'su', 'en']
True
test_shabbat_times_sensor
( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, )
Test sensor output for upcoming shabbat/yomtov times.
Test sensor output for upcoming shabbat/yomtov times.
async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid
[ "async", "def", "test_shabbat_times_sensor", "(", "hass", ",", "legacy_patchable_time", ",", "language", ",", "now", ",", "candle_lighting", ",", "havdalah", ",", "diaspora", ",", "tzname", ",", "latitude", ",", "longitude", ",", "result", ",", ")", ":", "time_zone", "=", "dt_util", ".", "get_time_zone", "(", "tzname", ")", "test_time", "=", "time_zone", ".", "localize", "(", "now", ")", "hass", ".", "config", ".", "time_zone", "=", "time_zone", "hass", ".", "config", ".", "latitude", "=", "latitude", "hass", ".", "config", ".", "longitude", "=", "longitude", "registry", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "with", "alter_time", "(", "test_time", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "jewish_calendar", ".", "DOMAIN", ",", "{", "\"jewish_calendar\"", ":", "{", "\"name\"", ":", "\"test\"", ",", "\"language\"", ":", "language", ",", "\"diaspora\"", ":", "diaspora", ",", "\"candle_lighting_minutes_before_sunset\"", ":", "candle_lighting", ",", "\"havdalah_minutes_after_sunset\"", ":", "havdalah", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "future", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "30", ")", "async_fire_time_changed", "(", "hass", ",", "future", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "for", "sensor_type", ",", "result_value", "in", "result", ".", "items", "(", ")", ":", "if", "not", "sensor_type", ".", "startswith", "(", "language", ")", ":", "print", "(", "f\"Not checking {sensor_type} for {language}\"", ")", "continue", "sensor_type", "=", "sensor_type", ".", "replace", "(", "f\"{language}_\"", ",", "\"\"", ")", "result_value", "=", "(", "dt_util", ".", "as_utc", "(", "result_value", ")", "if", "isinstance", "(", "result_value", ",", "dt", ")", "else", "result_value", ")", "assert", "hass", ".", "states", ".", "get", "(", "f\"sensor.test_{sensor_type}\"", ")", ".", "state", "==", "str", "(", "result_value", ")", ",", "f\"Value for {sensor_type}\"", "entity", "=", "registry", ".", "async_get", "(", "f\"sensor.test_{sensor_type}\"", ")", "target_sensor_type", "=", "sensor_type", ".", "replace", "(", "\"parshat_hashavua\"", ",", "\"weekly_portion\"", ")", "target_uid", "=", "\"_\"", ".", "join", "(", "map", "(", "str", ",", "[", "latitude", ",", "longitude", ",", "time_zone", ",", "HDATE_DEFAULT_ALTITUDE", ",", "diaspora", ",", "language", ",", "candle_lighting", ",", "havdalah", ",", "target_sensor_type", ",", "]", ",", ")", ")", "assert", "entity", ".", "unique_id", "==", "target_uid" ]
[ 492, 0 ]
[ 570, 45 ]
python
en
['en', 'en', 'en']
True
test_omer_sensor
(hass, legacy_patchable_time, test_time, result)
Test Omer Count sensor output.
Test Omer Count sensor output.
async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result
[ "async", "def", "test_omer_sensor", "(", "hass", ",", "legacy_patchable_time", ",", "test_time", ",", "result", ")", ":", "test_time", "=", "hass", ".", "config", ".", "time_zone", ".", "localize", "(", "test_time", ")", "with", "alter_time", "(", "test_time", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "jewish_calendar", ".", "DOMAIN", ",", "{", "\"jewish_calendar\"", ":", "{", "\"name\"", ":", "\"test\"", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "future", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "30", ")", "async_fire_time_changed", "(", "hass", ",", "future", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test_day_of_the_omer\"", ")", ".", "state", "==", "result" ]
[ 592, 0 ]
[ 606, 73 ]
python
da
['sv', 'da', 'en']
False
test_dafyomi_sensor
(hass, legacy_patchable_time, test_time, result)
Test Daf Yomi sensor output.
Test Daf Yomi sensor output.
async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
[ "async", "def", "test_dafyomi_sensor", "(", "hass", ",", "legacy_patchable_time", ",", "test_time", ",", "result", ")", ":", "test_time", "=", "hass", ".", "config", ".", "time_zone", ".", "localize", "(", "test_time", ")", "with", "alter_time", "(", "test_time", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "jewish_calendar", ".", "DOMAIN", ",", "{", "\"jewish_calendar\"", ":", "{", "\"name\"", ":", "\"test\"", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "future", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "30", ")", "async_fire_time_changed", "(", "hass", ",", "future", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test_daf_yomi\"", ")", ".", "state", "==", "result" ]
[ 626, 0 ]
[ 640, 66 ]
python
da
['da', 'is', 'en']
False
test_reloadable
(hass)
Test that we can reload.
Test that we can reload.
async def test_reloadable(hass): """Test that we can reload.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( hass, "sensor", { "sensor": { "platform": DOMAIN, "sensors": { "state": { "value_template": "{{ states.sensor.test_sensor.state }}" }, }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("sensor.state").state == "mytest" assert len(hass.states.async_all()) == 2 yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "template/sensor_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 3 assert hass.states.get("sensor.state") is None assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off" assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
[ "async", "def", "test_reloadable", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test_sensor\"", ",", "\"mytest\"", ")", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ states.sensor.test_sensor.state }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", ".", "state", "==", "\"mytest\"", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"template/sensor_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "3", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.watching_tv_in_master_bedroom\"", ")", ".", "state", "==", "\"off\"", "assert", "float", "(", "hass", ".", "states", ".", "get", "(", "\"sensor.combined_sensor_energy_usage\"", ")", ".", "state", ")", "==", "0" ]
[ 13, 0 ]
[ 57, 83 ]
python
en
['en', 'en', 'en']
True
test_reloadable_can_remove
(hass)
Test that we can reload and remove all template sensors.
Test that we can reload and remove all template sensors.
async def test_reloadable_can_remove(hass): """Test that we can reload and remove all template sensors.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( hass, "sensor", { "sensor": { "platform": DOMAIN, "sensors": { "state": { "value_template": "{{ states.sensor.test_sensor.state }}" }, }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("sensor.state").state == "mytest" assert len(hass.states.async_all()) == 2 yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "template/empty_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 1
[ "async", "def", "test_reloadable_can_remove", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test_sensor\"", ",", "\"mytest\"", ")", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ states.sensor.test_sensor.state }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", ".", "state", "==", "\"mytest\"", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"template/empty_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "1" ]
[ 60, 0 ]
[ 100, 44 ]
python
en
['en', 'en', 'en']
True
test_reloadable_stops_on_invalid_config
(hass)
Test we stop the reload if configuration.yaml is completely broken.
Test we stop the reload if configuration.yaml is completely broken.
async def test_reloadable_stops_on_invalid_config(hass): """Test we stop the reload if configuration.yaml is completely broken.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( hass, "sensor", { "sensor": { "platform": DOMAIN, "sensors": { "state": { "value_template": "{{ states.sensor.test_sensor.state }}" }, }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("sensor.state").state == "mytest" assert len(hass.states.async_all()) == 2 yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "template/configuration.yaml.corrupt", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert hass.states.get("sensor.state").state == "mytest" assert len(hass.states.async_all()) == 2
[ "async", "def", "test_reloadable_stops_on_invalid_config", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test_sensor\"", ",", "\"mytest\"", ")", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ states.sensor.test_sensor.state }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", ".", "state", "==", "\"mytest\"", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"template/configuration.yaml.corrupt\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", ".", "state", "==", "\"mytest\"", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2" ]
[ 103, 0 ]
[ 145, 44 ]
python
en
['en', 'en', 'en']
True
test_reloadable_handles_partial_valid_config
(hass)
Test we can still setup valid sensors when configuration.yaml has a broken entry.
Test we can still setup valid sensors when configuration.yaml has a broken entry.
async def test_reloadable_handles_partial_valid_config(hass): """Test we can still setup valid sensors when configuration.yaml has a broken entry.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( hass, "sensor", { "sensor": { "platform": DOMAIN, "sensors": { "state": { "value_template": "{{ states.sensor.test_sensor.state }}" }, }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("sensor.state").state == "mytest" assert len(hass.states.async_all()) == 2 yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "template/broken_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 3 assert hass.states.get("sensor.state") is None assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off" assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
[ "async", "def", "test_reloadable_handles_partial_valid_config", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test_sensor\"", ",", "\"mytest\"", ")", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ states.sensor.test_sensor.state }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", ".", "state", "==", "\"mytest\"", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"template/broken_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "3", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.watching_tv_in_master_bedroom\"", ")", ".", "state", "==", "\"off\"", "assert", "float", "(", "hass", ".", "states", ".", "get", "(", "\"sensor.combined_sensor_energy_usage\"", ")", ".", "state", ")", "==", "0" ]
[ 148, 0 ]
[ 193, 83 ]
python
en
['en', 'en', 'en']
True
test_reloadable_multiple_platforms
(hass)
Test that we can reload.
Test that we can reload.
async def test_reloadable_multiple_platforms(hass): """Test that we can reload.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( hass, "sensor", { "sensor": { "platform": DOMAIN, "sensors": { "state": { "value_template": "{{ states.sensor.test_sensor.state }}" }, }, } }, ) await async_setup_component( hass, "binary_sensor", { "binary_sensor": { "platform": DOMAIN, "sensors": { "state": { "value_template": "{{ states.sensor.test_sensor.state }}" }, }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("sensor.state").state == "mytest" assert hass.states.get("binary_sensor.state").state == "off" assert len(hass.states.async_all()) == 3 yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "template/sensor_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 3 assert hass.states.get("sensor.state") is None assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off" assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
[ "async", "def", "test_reloadable_multiple_platforms", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test_sensor\"", ",", "\"mytest\"", ")", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ states.sensor.test_sensor.state }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "async_setup_component", "(", "hass", ",", "\"binary_sensor\"", ",", "{", "\"binary_sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ states.sensor.test_sensor.state }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", ".", "state", "==", "\"mytest\"", "assert", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.state\"", ")", ".", "state", "==", "\"off\"", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "3", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"template/sensor_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "3", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.state\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.watching_tv_in_master_bedroom\"", ")", ".", "state", "==", "\"off\"", "assert", "float", "(", "hass", ".", "states", ".", "get", "(", "\"sensor.combined_sensor_energy_usage\"", ")", ".", "state", ")", "==", "0" ]
[ 196, 0 ]
[ 256, 83 ]
python
en
['en', 'en', 'en']
True
test_reload_sensors_that_reference_other_template_sensors
(hass)
Test that we can reload sensor that reference other template sensors.
Test that we can reload sensor that reference other template sensors.
async def test_reload_sensors_that_reference_other_template_sensors(hass): """Test that we can reload sensor that reference other template sensors.""" await async_setup_component( hass, "sensor", { "sensor": { "platform": DOMAIN, "sensors": { "state": {"value_template": "{{ 1 }}"}, }, } }, ) await hass.async_block_till_done() yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "template/ref_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 3 await hass.async_block_till_done() next_time = dt_util.utcnow() + timedelta(seconds=1.2) with patch( "homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time ): async_fire_time_changed(hass, next_time) await hass.async_block_till_done() assert hass.states.get("sensor.test1").state == "3" assert hass.states.get("sensor.test2").state == "1" assert hass.states.get("sensor.test3").state == "2"
[ "async", "def", "test_reload_sensors_that_reference_other_template_sensors", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"sensors\"", ":", "{", "\"state\"", ":", "{", "\"value_template\"", ":", "\"{{ 1 }}\"", "}", ",", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"template/ref_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "3", "await", "hass", ".", "async_block_till_done", "(", ")", "next_time", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "1.2", ")", "with", "patch", "(", "\"homeassistant.helpers.ratelimit.dt_util.utcnow\"", ",", "return_value", "=", "next_time", ")", ":", "async_fire_time_changed", "(", "hass", ",", "next_time", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test1\"", ")", ".", "state", "==", "\"3\"", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test2\"", ")", ".", "state", "==", "\"1\"", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test3\"", ")", ".", "state", "==", "\"2\"" ]
[ 259, 0 ]
[ 301, 55 ]
python
en
['en', 'en', 'en']
True
test_static_vars
()
Test static vars.
Test static vars.
async def test_static_vars(): """Test static vars.""" orig = {"hello": "world"} var = cv.SCRIPT_VARIABLES_SCHEMA(orig) rendered = var.async_render(None, None) assert rendered is not orig assert rendered == orig
[ "async", "def", "test_static_vars", "(", ")", ":", "orig", "=", "{", "\"hello\"", ":", "\"world\"", "}", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "orig", ")", "rendered", "=", "var", ".", "async_render", "(", "None", ",", "None", ")", "assert", "rendered", "is", "not", "orig", "assert", "rendered", "==", "orig" ]
[ 6, 0 ]
[ 12, 27 ]
python
en
['en', 'hi-Latn', 'en']
True
test_static_vars_run_args
()
Test static vars.
Test static vars.
async def test_static_vars_run_args(): """Test static vars.""" orig = {"hello": "world"} orig_copy = dict(orig) var = cv.SCRIPT_VARIABLES_SCHEMA(orig) rendered = var.async_render(None, {"hello": "override", "run": "var"}) assert rendered == {"hello": "override", "run": "var"} # Make sure we don't change original vars assert orig == orig_copy
[ "async", "def", "test_static_vars_run_args", "(", ")", ":", "orig", "=", "{", "\"hello\"", ":", "\"world\"", "}", "orig_copy", "=", "dict", "(", "orig", ")", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "orig", ")", "rendered", "=", "var", ".", "async_render", "(", "None", ",", "{", "\"hello\"", ":", "\"override\"", ",", "\"run\"", ":", "\"var\"", "}", ")", "assert", "rendered", "==", "{", "\"hello\"", ":", "\"override\"", ",", "\"run\"", ":", "\"var\"", "}", "# Make sure we don't change original vars", "assert", "orig", "==", "orig_copy" ]
[ 15, 0 ]
[ 23, 28 ]
python
en
['en', 'hi-Latn', 'en']
True
test_static_vars_no_default
()
Test static vars.
Test static vars.
async def test_static_vars_no_default(): """Test static vars.""" orig = {"hello": "world"} var = cv.SCRIPT_VARIABLES_SCHEMA(orig) rendered = var.async_render(None, None, render_as_defaults=False) assert rendered is not orig assert rendered == orig
[ "async", "def", "test_static_vars_no_default", "(", ")", ":", "orig", "=", "{", "\"hello\"", ":", "\"world\"", "}", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "orig", ")", "rendered", "=", "var", ".", "async_render", "(", "None", ",", "None", ",", "render_as_defaults", "=", "False", ")", "assert", "rendered", "is", "not", "orig", "assert", "rendered", "==", "orig" ]
[ 26, 0 ]
[ 32, 27 ]
python
en
['en', 'hi-Latn', 'en']
True
test_static_vars_run_args_no_default
()
Test static vars.
Test static vars.
async def test_static_vars_run_args_no_default(): """Test static vars.""" orig = {"hello": "world"} orig_copy = dict(orig) var = cv.SCRIPT_VARIABLES_SCHEMA(orig) rendered = var.async_render( None, {"hello": "override", "run": "var"}, render_as_defaults=False ) assert rendered == {"hello": "world", "run": "var"} # Make sure we don't change original vars assert orig == orig_copy
[ "async", "def", "test_static_vars_run_args_no_default", "(", ")", ":", "orig", "=", "{", "\"hello\"", ":", "\"world\"", "}", "orig_copy", "=", "dict", "(", "orig", ")", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "orig", ")", "rendered", "=", "var", ".", "async_render", "(", "None", ",", "{", "\"hello\"", ":", "\"override\"", ",", "\"run\"", ":", "\"var\"", "}", ",", "render_as_defaults", "=", "False", ")", "assert", "rendered", "==", "{", "\"hello\"", ":", "\"world\"", ",", "\"run\"", ":", "\"var\"", "}", "# Make sure we don't change original vars", "assert", "orig", "==", "orig_copy" ]
[ 35, 0 ]
[ 45, 28 ]
python
en
['en', 'hi-Latn', 'en']
True
test_template_vars
(hass)
Test template vars.
Test template vars.
async def test_template_vars(hass): """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ 1 + 1 }}"}) rendered = var.async_render(hass, None) assert rendered == {"hello": 2}
[ "async", "def", "test_template_vars", "(", "hass", ")", ":", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "{", "\"hello\"", ":", "\"{{ 1 + 1 }}\"", "}", ")", "rendered", "=", "var", ".", "async_render", "(", "hass", ",", "None", ")", "assert", "rendered", "==", "{", "\"hello\"", ":", "2", "}" ]
[ 48, 0 ]
[ 52, 35 ]
python
en
['en', 'et', 'en']
True
test_template_vars_run_args
(hass)
Test template vars.
Test template vars.
async def test_template_vars_run_args(hass): """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA( { "something": "{{ run_var_ex + 1 }}", "something_2": "{{ run_var_ex + 1 }}", } ) rendered = var.async_render( hass, { "run_var_ex": 5, "something_2": 1, }, ) assert rendered == { "run_var_ex": 5, "something": 6, "something_2": 1, }
[ "async", "def", "test_template_vars_run_args", "(", "hass", ")", ":", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "{", "\"something\"", ":", "\"{{ run_var_ex + 1 }}\"", ",", "\"something_2\"", ":", "\"{{ run_var_ex + 1 }}\"", ",", "}", ")", "rendered", "=", "var", ".", "async_render", "(", "hass", ",", "{", "\"run_var_ex\"", ":", "5", ",", "\"something_2\"", ":", "1", ",", "}", ",", ")", "assert", "rendered", "==", "{", "\"run_var_ex\"", ":", "5", ",", "\"something\"", ":", "6", ",", "\"something_2\"", ":", "1", ",", "}" ]
[ 55, 0 ]
[ 74, 5 ]
python
en
['en', 'et', 'en']
True
test_template_vars_no_default
(hass)
Test template vars.
Test template vars.
async def test_template_vars_no_default(hass): """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ 1 + 1 }}"}) rendered = var.async_render(hass, None, render_as_defaults=False) assert rendered == {"hello": 2}
[ "async", "def", "test_template_vars_no_default", "(", "hass", ")", ":", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "{", "\"hello\"", ":", "\"{{ 1 + 1 }}\"", "}", ")", "rendered", "=", "var", ".", "async_render", "(", "hass", ",", "None", ",", "render_as_defaults", "=", "False", ")", "assert", "rendered", "==", "{", "\"hello\"", ":", "2", "}" ]
[ 77, 0 ]
[ 81, 35 ]
python
en
['en', 'et', 'en']
True
test_template_vars_run_args_no_default
(hass)
Test template vars.
Test template vars.
async def test_template_vars_run_args_no_default(hass): """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA( { "something": "{{ run_var_ex + 1 }}", "something_2": "{{ run_var_ex + 1 }}", } ) rendered = var.async_render( hass, { "run_var_ex": 5, "something_2": 1, }, render_as_defaults=False, ) assert rendered == { "run_var_ex": 5, "something": 6, "something_2": 6, }
[ "async", "def", "test_template_vars_run_args_no_default", "(", "hass", ")", ":", "var", "=", "cv", ".", "SCRIPT_VARIABLES_SCHEMA", "(", "{", "\"something\"", ":", "\"{{ run_var_ex + 1 }}\"", ",", "\"something_2\"", ":", "\"{{ run_var_ex + 1 }}\"", ",", "}", ")", "rendered", "=", "var", ".", "async_render", "(", "hass", ",", "{", "\"run_var_ex\"", ":", "5", ",", "\"something_2\"", ":", "1", ",", "}", ",", "render_as_defaults", "=", "False", ",", ")", "assert", "rendered", "==", "{", "\"run_var_ex\"", ":", "5", ",", "\"something\"", ":", "6", ",", "\"something_2\"", ":", "6", ",", "}" ]
[ 84, 0 ]
[ 104, 5 ]
python
en
['en', 'et', 'en']
True