body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
a786b55e16fcfbd357863ec2c0be79aa3510912e7a5aab0d21a2087ba37141c8
|
@terminal_unit_air_outlet_node_name.setter
def terminal_unit_air_outlet_node_name(self, value=None):
'Corresponds to IDD field `Terminal Unit Air Outlet Node Name`'
self['Terminal Unit Air Outlet Node Name'] = value
|
Corresponds to IDD field `Terminal Unit Air Outlet Node Name`
|
pyidf/zone_hvac_forced_air_units.py
|
terminal_unit_air_outlet_node_name
|
marcelosalles/pyidf
| 19 |
python
|
@terminal_unit_air_outlet_node_name.setter
def terminal_unit_air_outlet_node_name(self, value=None):
self['Terminal Unit Air Outlet Node Name'] = value
|
@terminal_unit_air_outlet_node_name.setter
def terminal_unit_air_outlet_node_name(self, value=None):
self['Terminal Unit Air Outlet Node Name'] = value<|docstring|>Corresponds to IDD field `Terminal Unit Air Outlet Node Name`<|endoftext|>
|
d0227e7673c0812796657eaef9c5b2a698b3c7eb98bf5a9761b3bdc271ffe171
|
@property
def cooling_supply_air_flow_rate(self):
'field `Cooling Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set\n\n '
return self['Cooling Supply Air Flow Rate']
|
field `Cooling Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def cooling_supply_air_flow_rate(self):
'field `Cooling Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set\n\n '
return self['Cooling Supply Air Flow Rate']
|
@property
def cooling_supply_air_flow_rate(self):
'field `Cooling Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set\n\n '
return self['Cooling Supply Air Flow Rate']<|docstring|>field `Cooling Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set<|endoftext|>
|
a4aebf58e628d38393fcbd6d5eccf477ed690a6e904118c854bb1841d0c351a3
|
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
'Corresponds to IDD field `Cooling Supply Air Flow Rate`'
self['Cooling Supply Air Flow Rate'] = value
|
Corresponds to IDD field `Cooling Supply Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
self['Cooling Supply Air Flow Rate'] = value
|
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
self['Cooling Supply Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `Cooling Supply Air Flow Rate`<|endoftext|>
|
1964d3eb5d37042be62afec57803d4f7044754440bef5d6b28efe660c71c0899
|
@property
def no_cooling_supply_air_flow_rate(self):
'field `No Cooling Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Cooling Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_cooling_supply_air_flow_rate` or None if not set\n\n '
return self['No Cooling Supply Air Flow Rate']
|
field `No Cooling Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_cooling_supply_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
no_cooling_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def no_cooling_supply_air_flow_rate(self):
'field `No Cooling Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Cooling Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_cooling_supply_air_flow_rate` or None if not set\n\n '
return self['No Cooling Supply Air Flow Rate']
|
@property
def no_cooling_supply_air_flow_rate(self):
'field `No Cooling Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Cooling Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_cooling_supply_air_flow_rate` or None if not set\n\n '
return self['No Cooling Supply Air Flow Rate']<|docstring|>field `No Cooling Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_cooling_supply_air_flow_rate` or None if not set<|endoftext|>
|
02288d94ac18fdf3391e4d37403211ca16877b599802924fdab6cefef6645472
|
@no_cooling_supply_air_flow_rate.setter
def no_cooling_supply_air_flow_rate(self, value=None):
'Corresponds to IDD field `No Cooling Supply Air Flow Rate`'
self['No Cooling Supply Air Flow Rate'] = value
|
Corresponds to IDD field `No Cooling Supply Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
no_cooling_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@no_cooling_supply_air_flow_rate.setter
def no_cooling_supply_air_flow_rate(self, value=None):
self['No Cooling Supply Air Flow Rate'] = value
|
@no_cooling_supply_air_flow_rate.setter
def no_cooling_supply_air_flow_rate(self, value=None):
self['No Cooling Supply Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `No Cooling Supply Air Flow Rate`<|endoftext|>
|
7a820d9d799050ebe1184a422612e7d91f8b5490babb65f709ecff07fb36d40f
|
@property
def heating_supply_air_flow_rate(self):
'field `Heating Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set\n\n '
return self['Heating Supply Air Flow Rate']
|
field `Heating Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
heating_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def heating_supply_air_flow_rate(self):
'field `Heating Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set\n\n '
return self['Heating Supply Air Flow Rate']
|
@property
def heating_supply_air_flow_rate(self):
'field `Heating Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set\n\n '
return self['Heating Supply Air Flow Rate']<|docstring|>field `Heating Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set<|endoftext|>
|
c54a695ddd4655044a9b3de7a19e0e9645d0d28ff1fbcd3214301cdaa90ca148
|
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
'Corresponds to IDD field `Heating Supply Air Flow Rate`'
self['Heating Supply Air Flow Rate'] = value
|
Corresponds to IDD field `Heating Supply Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
heating_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
self['Heating Supply Air Flow Rate'] = value
|
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
self['Heating Supply Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `Heating Supply Air Flow Rate`<|endoftext|>
|
5e176cad650cb18ec7c24488768083f35c1dabfb2451d6788e9dd968d9e2e810
|
@property
def no_heating_supply_air_flow_rate(self):
'field `No Heating Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Heating Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_heating_supply_air_flow_rate` or None if not set\n\n '
return self['No Heating Supply Air Flow Rate']
|
field `No Heating Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_heating_supply_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
no_heating_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def no_heating_supply_air_flow_rate(self):
'field `No Heating Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Heating Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_heating_supply_air_flow_rate` or None if not set\n\n '
return self['No Heating Supply Air Flow Rate']
|
@property
def no_heating_supply_air_flow_rate(self):
'field `No Heating Supply Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Heating Supply Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_heating_supply_air_flow_rate` or None if not set\n\n '
return self['No Heating Supply Air Flow Rate']<|docstring|>field `No Heating Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_heating_supply_air_flow_rate` or None if not set<|endoftext|>
|
02bfcf9999e3534bfacb0b21ae68d8173271dca01d159217f9994b706ea4ff94
|
@no_heating_supply_air_flow_rate.setter
def no_heating_supply_air_flow_rate(self, value=None):
'Corresponds to IDD field `No Heating Supply Air Flow Rate`'
self['No Heating Supply Air Flow Rate'] = value
|
Corresponds to IDD field `No Heating Supply Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
no_heating_supply_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@no_heating_supply_air_flow_rate.setter
def no_heating_supply_air_flow_rate(self, value=None):
self['No Heating Supply Air Flow Rate'] = value
|
@no_heating_supply_air_flow_rate.setter
def no_heating_supply_air_flow_rate(self, value=None):
self['No Heating Supply Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `No Heating Supply Air Flow Rate`<|endoftext|>
|
21873c6990eea1da449ec8fb855c22f2215c917cf2da9c1c498085377d738a01
|
@property
def cooling_outdoor_air_flow_rate(self):
'field `Cooling Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set\n\n '
return self['Cooling Outdoor Air Flow Rate']
|
field `Cooling Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_outdoor_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def cooling_outdoor_air_flow_rate(self):
'field `Cooling Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set\n\n '
return self['Cooling Outdoor Air Flow Rate']
|
@property
def cooling_outdoor_air_flow_rate(self):
'field `Cooling Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set\n\n '
return self['Cooling Outdoor Air Flow Rate']<|docstring|>field `Cooling Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set<|endoftext|>
|
1b41ca81a941adbc452df87f64dbfe4d134b455cb4415a505b9a1614e2c9618f
|
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
'Corresponds to IDD field `Cooling Outdoor Air Flow Rate`'
self['Cooling Outdoor Air Flow Rate'] = value
|
Corresponds to IDD field `Cooling Outdoor Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_outdoor_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
self['Cooling Outdoor Air Flow Rate'] = value
|
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
self['Cooling Outdoor Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `Cooling Outdoor Air Flow Rate`<|endoftext|>
|
04e597071508ebc276369320d7bf7108f52a1859d3be6ab130b6b082afae1b53
|
@property
def heating_outdoor_air_flow_rate(self):
'field `Heating Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set\n\n '
return self['Heating Outdoor Air Flow Rate']
|
field `Heating Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
heating_outdoor_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def heating_outdoor_air_flow_rate(self):
'field `Heating Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set\n\n '
return self['Heating Outdoor Air Flow Rate']
|
@property
def heating_outdoor_air_flow_rate(self):
'field `Heating Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set\n\n '
return self['Heating Outdoor Air Flow Rate']<|docstring|>field `Heating Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set<|endoftext|>
|
59e5751915474a50827d6549106ce17267bf1fcccf8f99b789ff9260822c6311
|
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
'Corresponds to IDD field `Heating Outdoor Air Flow Rate`'
self['Heating Outdoor Air Flow Rate'] = value
|
Corresponds to IDD field `Heating Outdoor Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
heating_outdoor_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
self['Heating Outdoor Air Flow Rate'] = value
|
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
self['Heating Outdoor Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `Heating Outdoor Air Flow Rate`<|endoftext|>
|
588e330d9150247b96c7ea270d01e7ae86bbb2d04f2acd993e00eec5af3cc2fd
|
@property
def no_load_outdoor_air_flow_rate(self):
'field `No Load Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set\n\n '
return self['No Load Outdoor Air Flow Rate']
|
field `No Load Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
no_load_outdoor_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@property
def no_load_outdoor_air_flow_rate(self):
'field `No Load Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set\n\n '
return self['No Load Outdoor Air Flow Rate']
|
@property
def no_load_outdoor_air_flow_rate(self):
'field `No Load Outdoor Air Flow Rate`\n\n | Units: m3/s\n\n Args:\n value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set\n\n '
return self['No Load Outdoor Air Flow Rate']<|docstring|>field `No Load Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set<|endoftext|>
|
3166bfcb4b40e96f666cd8c40653f918f6d61b1de0b2d9de9175dab374284ab7
|
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
'Corresponds to IDD field `No Load Outdoor Air Flow Rate`'
self['No Load Outdoor Air Flow Rate'] = value
|
Corresponds to IDD field `No Load Outdoor Air Flow Rate`
|
pyidf/zone_hvac_forced_air_units.py
|
no_load_outdoor_air_flow_rate
|
marcelosalles/pyidf
| 19 |
python
|
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
self['No Load Outdoor Air Flow Rate'] = value
|
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
self['No Load Outdoor Air Flow Rate'] = value<|docstring|>Corresponds to IDD field `No Load Outdoor Air Flow Rate`<|endoftext|>
|
94dcfa62fa6703bb88e18a3692f4c23bafe7e6b43ef4178e2dd5fc8d66ae29f2
|
@property
def supply_air_fan_operating_mode_schedule_name(self):
'field `Supply Air Fan Operating Mode Schedule Name`\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set\n\n '
return self['Supply Air Fan Operating Mode Schedule Name']
|
field `Supply Air Fan Operating Mode Schedule Name`
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_operating_mode_schedule_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def supply_air_fan_operating_mode_schedule_name(self):
'field `Supply Air Fan Operating Mode Schedule Name`\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set\n\n '
return self['Supply Air Fan Operating Mode Schedule Name']
|
@property
def supply_air_fan_operating_mode_schedule_name(self):
'field `Supply Air Fan Operating Mode Schedule Name`\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set\n\n '
return self['Supply Air Fan Operating Mode Schedule Name']<|docstring|>field `Supply Air Fan Operating Mode Schedule Name`
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set<|endoftext|>
|
f3a6dd9132f73d7963590301279b60da5d636dd5f69b40f1870b5a66c208de55
|
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
'Corresponds to IDD field `Supply Air Fan Operating Mode Schedule\n Name`'
self['Supply Air Fan Operating Mode Schedule Name'] = value
|
Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_operating_mode_schedule_name
|
marcelosalles/pyidf
| 19 |
python
|
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
'Corresponds to IDD field `Supply Air Fan Operating Mode Schedule\n Name`'
self['Supply Air Fan Operating Mode Schedule Name'] = value
|
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
'Corresponds to IDD field `Supply Air Fan Operating Mode Schedule\n Name`'
self['Supply Air Fan Operating Mode Schedule Name'] = value<|docstring|>Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`<|endoftext|>
|
81a8fa93fede114f8a4e7c459506b4e4fded15baa48f59d1c0c6e16e79345908
|
@property
def supply_air_fan_placement(self):
'field `Supply Air Fan Placement`\n\n | Select fan placement as either blow through or draw through.\n | Default value: BlowThrough\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Placement`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_placement` or None if not set\n\n '
return self['Supply Air Fan Placement']
|
field `Supply Air Fan Placement`
| Select fan placement as either blow through or draw through.
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Supply Air Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_placement` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_placement
|
marcelosalles/pyidf
| 19 |
python
|
@property
def supply_air_fan_placement(self):
'field `Supply Air Fan Placement`\n\n | Select fan placement as either blow through or draw through.\n | Default value: BlowThrough\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Placement`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_placement` or None if not set\n\n '
return self['Supply Air Fan Placement']
|
@property
def supply_air_fan_placement(self):
'field `Supply Air Fan Placement`\n\n | Select fan placement as either blow through or draw through.\n | Default value: BlowThrough\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Placement`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_placement` or None if not set\n\n '
return self['Supply Air Fan Placement']<|docstring|>field `Supply Air Fan Placement`
| Select fan placement as either blow through or draw through.
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Supply Air Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_placement` or None if not set<|endoftext|>
|
b1c8c10112d5e3afd8849d69508de0ee5164a27bdd10659f0505a5901a9ab8e8
|
@supply_air_fan_placement.setter
def supply_air_fan_placement(self, value='BlowThrough'):
'Corresponds to IDD field `Supply Air Fan Placement`'
self['Supply Air Fan Placement'] = value
|
Corresponds to IDD field `Supply Air Fan Placement`
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_placement
|
marcelosalles/pyidf
| 19 |
python
|
@supply_air_fan_placement.setter
def supply_air_fan_placement(self, value='BlowThrough'):
self['Supply Air Fan Placement'] = value
|
@supply_air_fan_placement.setter
def supply_air_fan_placement(self, value='BlowThrough'):
self['Supply Air Fan Placement'] = value<|docstring|>Corresponds to IDD field `Supply Air Fan Placement`<|endoftext|>
|
c9496355eec1319a99d21f8c313682e03917ffa67f28ec7be8d599e7943571a2
|
@property
def supply_air_fan_object_type(self):
'field `Supply Air Fan Object Type`\n\n | Supply Air Fan Object Type must be\n | Fan:OnOff or Fan:ConstantVolume\n | if AirConditioner:VariableRefrigerantFlow\n | is used to model VRF outdoor unit\n | Supply Air Fan Object Type must be Fan:VariableVolume if\n | AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | Default value: Fan:ConstantVolume\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_object_type` or None if not set\n\n '
return self['Supply Air Fan Object Type']
|
field `Supply Air Fan Object Type`
| Supply Air Fan Object Type must be
| Fan:OnOff or Fan:ConstantVolume
| if AirConditioner:VariableRefrigerantFlow
| is used to model VRF outdoor unit
| Supply Air Fan Object Type must be Fan:VariableVolume if
| AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| Default value: Fan:ConstantVolume
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@property
def supply_air_fan_object_type(self):
'field `Supply Air Fan Object Type`\n\n | Supply Air Fan Object Type must be\n | Fan:OnOff or Fan:ConstantVolume\n | if AirConditioner:VariableRefrigerantFlow\n | is used to model VRF outdoor unit\n | Supply Air Fan Object Type must be Fan:VariableVolume if\n | AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | Default value: Fan:ConstantVolume\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_object_type` or None if not set\n\n '
return self['Supply Air Fan Object Type']
|
@property
def supply_air_fan_object_type(self):
'field `Supply Air Fan Object Type`\n\n | Supply Air Fan Object Type must be\n | Fan:OnOff or Fan:ConstantVolume\n | if AirConditioner:VariableRefrigerantFlow\n | is used to model VRF outdoor unit\n | Supply Air Fan Object Type must be Fan:VariableVolume if\n | AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | Default value: Fan:ConstantVolume\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_object_type` or None if not set\n\n '
return self['Supply Air Fan Object Type']<|docstring|>field `Supply Air Fan Object Type`
| Supply Air Fan Object Type must be
| Fan:OnOff or Fan:ConstantVolume
| if AirConditioner:VariableRefrigerantFlow
| is used to model VRF outdoor unit
| Supply Air Fan Object Type must be Fan:VariableVolume if
| AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| Default value: Fan:ConstantVolume
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set<|endoftext|>
|
438577a82e989931a07ac67e5acc874ead5e8f117654ed4b8a7754cb6d306e59
|
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value='Fan:ConstantVolume'):
'Corresponds to IDD field `Supply Air Fan Object Type`'
self['Supply Air Fan Object Type'] = value
|
Corresponds to IDD field `Supply Air Fan Object Type`
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value='Fan:ConstantVolume'):
self['Supply Air Fan Object Type'] = value
|
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value='Fan:ConstantVolume'):
self['Supply Air Fan Object Type'] = value<|docstring|>Corresponds to IDD field `Supply Air Fan Object Type`<|endoftext|>
|
60212763db12cb39ee6ecda4c89ee991d7b9cc9679ea33e6fd1417a0f2b27fc6
|
@property
def supply_air_fan_object_name(self):
'field `Supply Air Fan Object Name`\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_object_name` or None if not set\n\n '
return self['Supply Air Fan Object Name']
|
field `Supply Air Fan Object Name`
Args:
value (str): value for IDD Field `Supply Air Fan Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def supply_air_fan_object_name(self):
'field `Supply Air Fan Object Name`\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_object_name` or None if not set\n\n '
return self['Supply Air Fan Object Name']
|
@property
def supply_air_fan_object_name(self):
'field `Supply Air Fan Object Name`\n\n Args:\n value (str): value for IDD Field `Supply Air Fan Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `supply_air_fan_object_name` or None if not set\n\n '
return self['Supply Air Fan Object Name']<|docstring|>field `Supply Air Fan Object Name`
Args:
value (str): value for IDD Field `Supply Air Fan Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_name` or None if not set<|endoftext|>
|
f41e7d9426fa4789fe0a714de29bc1ef4e3ea169b7946580f5239144cd79d6e5
|
@supply_air_fan_object_name.setter
def supply_air_fan_object_name(self, value=None):
'Corresponds to IDD field `Supply Air Fan Object Name`'
self['Supply Air Fan Object Name'] = value
|
Corresponds to IDD field `Supply Air Fan Object Name`
|
pyidf/zone_hvac_forced_air_units.py
|
supply_air_fan_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@supply_air_fan_object_name.setter
def supply_air_fan_object_name(self, value=None):
self['Supply Air Fan Object Name'] = value
|
@supply_air_fan_object_name.setter
def supply_air_fan_object_name(self, value=None):
self['Supply Air Fan Object Name'] = value<|docstring|>Corresponds to IDD field `Supply Air Fan Object Name`<|endoftext|>
|
6fb173a039dadd9654e021d68f7458ef2bda21d38c46ab3880a0a89dce2028d0
|
@property
def outside_air_mixer_object_type(self):
'field `Outside Air Mixer Object Type`\n\n | If this field is blank, and outside air mixer is not used.\n\n Args:\n value (str): value for IDD Field `Outside Air Mixer Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `outside_air_mixer_object_type` or None if not set\n\n '
return self['Outside Air Mixer Object Type']
|
field `Outside Air Mixer Object Type`
| If this field is blank, and outside air mixer is not used.
Args:
value (str): value for IDD Field `Outside Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_air_mixer_object_type` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
outside_air_mixer_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@property
def outside_air_mixer_object_type(self):
'field `Outside Air Mixer Object Type`\n\n | If this field is blank, and outside air mixer is not used.\n\n Args:\n value (str): value for IDD Field `Outside Air Mixer Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `outside_air_mixer_object_type` or None if not set\n\n '
return self['Outside Air Mixer Object Type']
|
@property
def outside_air_mixer_object_type(self):
'field `Outside Air Mixer Object Type`\n\n | If this field is blank, and outside air mixer is not used.\n\n Args:\n value (str): value for IDD Field `Outside Air Mixer Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `outside_air_mixer_object_type` or None if not set\n\n '
return self['Outside Air Mixer Object Type']<|docstring|>field `Outside Air Mixer Object Type`
| If this field is blank, and outside air mixer is not used.
Args:
value (str): value for IDD Field `Outside Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_air_mixer_object_type` or None if not set<|endoftext|>
|
db96e586633c2ef9ffd36954034c124aa785957ed5526a22bac7a8432a6ca2ff
|
@outside_air_mixer_object_type.setter
def outside_air_mixer_object_type(self, value=None):
'Corresponds to IDD field `Outside Air Mixer Object Type`'
self['Outside Air Mixer Object Type'] = value
|
Corresponds to IDD field `Outside Air Mixer Object Type`
|
pyidf/zone_hvac_forced_air_units.py
|
outside_air_mixer_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@outside_air_mixer_object_type.setter
def outside_air_mixer_object_type(self, value=None):
self['Outside Air Mixer Object Type'] = value
|
@outside_air_mixer_object_type.setter
def outside_air_mixer_object_type(self, value=None):
self['Outside Air Mixer Object Type'] = value<|docstring|>Corresponds to IDD field `Outside Air Mixer Object Type`<|endoftext|>
|
ac93667e1be023f2e56085eb4ee451da3dc093cbfea44c8c7050ad9ffaa7b417
|
@property
def outside_air_mixer_object_name(self):
'field `Outside Air Mixer Object Name`\n\n | If this field is blank, and outside air mixer is not used.\n\n Args:\n value (str): value for IDD Field `Outside Air Mixer Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `outside_air_mixer_object_name` or None if not set\n\n '
return self['Outside Air Mixer Object Name']
|
field `Outside Air Mixer Object Name`
| If this field is blank, and outside air mixer is not used.
Args:
value (str): value for IDD Field `Outside Air Mixer Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_air_mixer_object_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
outside_air_mixer_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def outside_air_mixer_object_name(self):
'field `Outside Air Mixer Object Name`\n\n | If this field is blank, and outside air mixer is not used.\n\n Args:\n value (str): value for IDD Field `Outside Air Mixer Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `outside_air_mixer_object_name` or None if not set\n\n '
return self['Outside Air Mixer Object Name']
|
@property
def outside_air_mixer_object_name(self):
'field `Outside Air Mixer Object Name`\n\n | If this field is blank, and outside air mixer is not used.\n\n Args:\n value (str): value for IDD Field `Outside Air Mixer Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `outside_air_mixer_object_name` or None if not set\n\n '
return self['Outside Air Mixer Object Name']<|docstring|>field `Outside Air Mixer Object Name`
| If this field is blank, and outside air mixer is not used.
Args:
value (str): value for IDD Field `Outside Air Mixer Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_air_mixer_object_name` or None if not set<|endoftext|>
|
b27f45b5e931971e9a2dc5841457cacb64ab467ffeec9a872416693e100ddbf2
|
@outside_air_mixer_object_name.setter
def outside_air_mixer_object_name(self, value=None):
'Corresponds to IDD field `Outside Air Mixer Object Name`'
self['Outside Air Mixer Object Name'] = value
|
Corresponds to IDD field `Outside Air Mixer Object Name`
|
pyidf/zone_hvac_forced_air_units.py
|
outside_air_mixer_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@outside_air_mixer_object_name.setter
def outside_air_mixer_object_name(self, value=None):
self['Outside Air Mixer Object Name'] = value
|
@outside_air_mixer_object_name.setter
def outside_air_mixer_object_name(self, value=None):
self['Outside Air Mixer Object Name'] = value<|docstring|>Corresponds to IDD field `Outside Air Mixer Object Name`<|endoftext|>
|
4e977f9c9be33bfc42c61380f83fb76444fb04b87e86ffbe45f2ad23b8e2acab
|
@property
def cooling_coil_object_type(self):
'field `Cooling Coil Object Type`\n\n | Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow\n | if AirConditioner:VariableRefrigerantFlow is used\n | to model VRF outdoor unit\n | Cooling Coil Type must be\n | Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl\n | if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | This field may be left blank if heating-only mode is used\n\n Args:\n value (str): value for IDD Field `Cooling Coil Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `cooling_coil_object_type` or None if not set\n\n '
return self['Cooling Coil Object Type']
|
field `Cooling Coil Object Type`
| Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow
| if AirConditioner:VariableRefrigerantFlow is used
| to model VRF outdoor unit
| Cooling Coil Type must be
| Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl
| if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| This field may be left blank if heating-only mode is used
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_coil_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@property
def cooling_coil_object_type(self):
'field `Cooling Coil Object Type`\n\n | Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow\n | if AirConditioner:VariableRefrigerantFlow is used\n | to model VRF outdoor unit\n | Cooling Coil Type must be\n | Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl\n | if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | This field may be left blank if heating-only mode is used\n\n Args:\n value (str): value for IDD Field `Cooling Coil Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `cooling_coil_object_type` or None if not set\n\n '
return self['Cooling Coil Object Type']
|
@property
def cooling_coil_object_type(self):
'field `Cooling Coil Object Type`\n\n | Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow\n | if AirConditioner:VariableRefrigerantFlow is used\n | to model VRF outdoor unit\n | Cooling Coil Type must be\n | Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl\n | if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | This field may be left blank if heating-only mode is used\n\n Args:\n value (str): value for IDD Field `Cooling Coil Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `cooling_coil_object_type` or None if not set\n\n '
return self['Cooling Coil Object Type']<|docstring|>field `Cooling Coil Object Type`
| Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow
| if AirConditioner:VariableRefrigerantFlow is used
| to model VRF outdoor unit
| Cooling Coil Type must be
| Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl
| if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| This field may be left blank if heating-only mode is used
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set<|endoftext|>
|
e3ac5efa41b9efef5448e40e65ec3be3037a428925356f91b53877e2292c5305
|
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
'Corresponds to IDD field `Cooling Coil Object Type`'
self['Cooling Coil Object Type'] = value
|
Corresponds to IDD field `Cooling Coil Object Type`
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_coil_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
self['Cooling Coil Object Type'] = value
|
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
self['Cooling Coil Object Type'] = value<|docstring|>Corresponds to IDD field `Cooling Coil Object Type`<|endoftext|>
|
6ad00b0f3efa61d75b3343ad3fe22c1ae0de26cd56a5cc7b13468c4fabb1923e
|
@property
def cooling_coil_object_name(self):
'field `Cooling Coil Object Name`\n\n | Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow\n | This field may be left blank if heating-only mode is used\n\n Args:\n value (str): value for IDD Field `Cooling Coil Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `cooling_coil_object_name` or None if not set\n\n '
return self['Cooling Coil Object Name']
|
field `Cooling Coil Object Name`
| Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow
| This field may be left blank if heating-only mode is used
Args:
value (str): value for IDD Field `Cooling Coil Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_coil_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def cooling_coil_object_name(self):
'field `Cooling Coil Object Name`\n\n | Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow\n | This field may be left blank if heating-only mode is used\n\n Args:\n value (str): value for IDD Field `Cooling Coil Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `cooling_coil_object_name` or None if not set\n\n '
return self['Cooling Coil Object Name']
|
@property
def cooling_coil_object_name(self):
'field `Cooling Coil Object Name`\n\n | Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow\n | This field may be left blank if heating-only mode is used\n\n Args:\n value (str): value for IDD Field `Cooling Coil Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `cooling_coil_object_name` or None if not set\n\n '
return self['Cooling Coil Object Name']<|docstring|>field `Cooling Coil Object Name`
| Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow
| This field may be left blank if heating-only mode is used
Args:
value (str): value for IDD Field `Cooling Coil Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_name` or None if not set<|endoftext|>
|
37c39de94597420878cba10356f0afa0573055181678c965ada18dc229add645
|
@cooling_coil_object_name.setter
def cooling_coil_object_name(self, value=None):
'Corresponds to IDD field `Cooling Coil Object Name`'
self['Cooling Coil Object Name'] = value
|
Corresponds to IDD field `Cooling Coil Object Name`
|
pyidf/zone_hvac_forced_air_units.py
|
cooling_coil_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@cooling_coil_object_name.setter
def cooling_coil_object_name(self, value=None):
self['Cooling Coil Object Name'] = value
|
@cooling_coil_object_name.setter
def cooling_coil_object_name(self, value=None):
self['Cooling Coil Object Name'] = value<|docstring|>Corresponds to IDD field `Cooling Coil Object Name`<|endoftext|>
|
b6a315fd929664962ca765dee71cebb5f41b6ebc7e4f53d28255a3c8df138e1f
|
@property
def heating_coil_object_type(self):
'field `Heating Coil Object Type`\n\n | Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow\n | if AirConditioner:VariableRefrigerantFlow is used\n | to model VRF outdoor unit\n | Heating Coil Type must be\n | Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl\n | if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | This field may be left blank if cooling-only mode is used\n\n Args:\n value (str): value for IDD Field `Heating Coil Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `heating_coil_object_type` or None if not set\n\n '
return self['Heating Coil Object Type']
|
field `Heating Coil Object Type`
| Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow
| if AirConditioner:VariableRefrigerantFlow is used
| to model VRF outdoor unit
| Heating Coil Type must be
| Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl
| if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| This field may be left blank if cooling-only mode is used
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
heating_coil_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@property
def heating_coil_object_type(self):
'field `Heating Coil Object Type`\n\n | Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow\n | if AirConditioner:VariableRefrigerantFlow is used\n | to model VRF outdoor unit\n | Heating Coil Type must be\n | Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl\n | if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | This field may be left blank if cooling-only mode is used\n\n Args:\n value (str): value for IDD Field `Heating Coil Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `heating_coil_object_type` or None if not set\n\n '
return self['Heating Coil Object Type']
|
@property
def heating_coil_object_type(self):
'field `Heating Coil Object Type`\n\n | Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow\n | if AirConditioner:VariableRefrigerantFlow is used\n | to model VRF outdoor unit\n | Heating Coil Type must be\n | Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl\n | if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl\n | is used to model VRF outdoor unit\n | This field may be left blank if cooling-only mode is used\n\n Args:\n value (str): value for IDD Field `Heating Coil Object Type`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `heating_coil_object_type` or None if not set\n\n '
return self['Heating Coil Object Type']<|docstring|>field `Heating Coil Object Type`
| Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow
| if AirConditioner:VariableRefrigerantFlow is used
| to model VRF outdoor unit
| Heating Coil Type must be
| Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl
| if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| This field may be left blank if cooling-only mode is used
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set<|endoftext|>
|
1c42f1a45c6d1dc3d7709b81a0c8ec0628351fbea7c349d7dd5740bdcbae1c17
|
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
'Corresponds to IDD field `Heating Coil Object Type`'
self['Heating Coil Object Type'] = value
|
Corresponds to IDD field `Heating Coil Object Type`
|
pyidf/zone_hvac_forced_air_units.py
|
heating_coil_object_type
|
marcelosalles/pyidf
| 19 |
python
|
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
self['Heating Coil Object Type'] = value
|
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
self['Heating Coil Object Type'] = value<|docstring|>Corresponds to IDD field `Heating Coil Object Type`<|endoftext|>
|
416dbe57ebf84be0431ee781a99ddadaea17f9642334491d898e4b798df8c916
|
@property
def heating_coil_object_name(self):
'field `Heating Coil Object Name`\n\n | Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow\n | This field may be left blank if cooling-only mode is used\n\n Args:\n value (str): value for IDD Field `Heating Coil Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `heating_coil_object_name` or None if not set\n\n '
return self['Heating Coil Object Name']
|
field `Heating Coil Object Name`
| Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow
| This field may be left blank if cooling-only mode is used
Args:
value (str): value for IDD Field `Heating Coil Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
heating_coil_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def heating_coil_object_name(self):
'field `Heating Coil Object Name`\n\n | Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow\n | This field may be left blank if cooling-only mode is used\n\n Args:\n value (str): value for IDD Field `Heating Coil Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `heating_coil_object_name` or None if not set\n\n '
return self['Heating Coil Object Name']
|
@property
def heating_coil_object_name(self):
'field `Heating Coil Object Name`\n\n | Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow\n | This field may be left blank if cooling-only mode is used\n\n Args:\n value (str): value for IDD Field `Heating Coil Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `heating_coil_object_name` or None if not set\n\n '
return self['Heating Coil Object Name']<|docstring|>field `Heating Coil Object Name`
| Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow
| This field may be left blank if cooling-only mode is used
Args:
value (str): value for IDD Field `Heating Coil Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_name` or None if not set<|endoftext|>
|
d6688fafbc67427bd86e5213a0853ec9c9005754f40c5c30c97e8ec9ab24589e
|
@heating_coil_object_name.setter
def heating_coil_object_name(self, value=None):
'Corresponds to IDD field `Heating Coil Object Name`'
self['Heating Coil Object Name'] = value
|
Corresponds to IDD field `Heating Coil Object Name`
|
pyidf/zone_hvac_forced_air_units.py
|
heating_coil_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@heating_coil_object_name.setter
def heating_coil_object_name(self, value=None):
self['Heating Coil Object Name'] = value
|
@heating_coil_object_name.setter
def heating_coil_object_name(self, value=None):
self['Heating Coil Object Name'] = value<|docstring|>Corresponds to IDD field `Heating Coil Object Name`<|endoftext|>
|
6ff08f1cd529a635739de017107f18a2a61dc895aceade9d2fab37f37442f7c2
|
@property
def zone_terminal_unit_on_parasitic_electric_energy_use(self):
'field `Zone Terminal Unit On Parasitic Electric Energy Use`\n\n | Units: W\n\n Args:\n value (float): value for IDD Field `Zone Terminal Unit On Parasitic Electric Energy Use`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `zone_terminal_unit_on_parasitic_electric_energy_use` or None if not set\n\n '
return self['Zone Terminal Unit On Parasitic Electric Energy Use']
|
field `Zone Terminal Unit On Parasitic Electric Energy Use`
| Units: W
Args:
value (float): value for IDD Field `Zone Terminal Unit On Parasitic Electric Energy Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_terminal_unit_on_parasitic_electric_energy_use` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
zone_terminal_unit_on_parasitic_electric_energy_use
|
marcelosalles/pyidf
| 19 |
python
|
@property
def zone_terminal_unit_on_parasitic_electric_energy_use(self):
'field `Zone Terminal Unit On Parasitic Electric Energy Use`\n\n | Units: W\n\n Args:\n value (float): value for IDD Field `Zone Terminal Unit On Parasitic Electric Energy Use`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `zone_terminal_unit_on_parasitic_electric_energy_use` or None if not set\n\n '
return self['Zone Terminal Unit On Parasitic Electric Energy Use']
|
@property
def zone_terminal_unit_on_parasitic_electric_energy_use(self):
'field `Zone Terminal Unit On Parasitic Electric Energy Use`\n\n | Units: W\n\n Args:\n value (float): value for IDD Field `Zone Terminal Unit On Parasitic Electric Energy Use`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `zone_terminal_unit_on_parasitic_electric_energy_use` or None if not set\n\n '
return self['Zone Terminal Unit On Parasitic Electric Energy Use']<|docstring|>field `Zone Terminal Unit On Parasitic Electric Energy Use`
| Units: W
Args:
value (float): value for IDD Field `Zone Terminal Unit On Parasitic Electric Energy Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_terminal_unit_on_parasitic_electric_energy_use` or None if not set<|endoftext|>
|
c460c516159f22b959f975f3842924b0c9d186a059e332353f3827f58f45fdff
|
@zone_terminal_unit_on_parasitic_electric_energy_use.setter
def zone_terminal_unit_on_parasitic_electric_energy_use(self, value=None):
'Corresponds to IDD field `Zone Terminal Unit On Parasitic Electric\n Energy Use`'
self['Zone Terminal Unit On Parasitic Electric Energy Use'] = value
|
Corresponds to IDD field `Zone Terminal Unit On Parasitic Electric
Energy Use`
|
pyidf/zone_hvac_forced_air_units.py
|
zone_terminal_unit_on_parasitic_electric_energy_use
|
marcelosalles/pyidf
| 19 |
python
|
@zone_terminal_unit_on_parasitic_electric_energy_use.setter
def zone_terminal_unit_on_parasitic_electric_energy_use(self, value=None):
'Corresponds to IDD field `Zone Terminal Unit On Parasitic Electric\n Energy Use`'
self['Zone Terminal Unit On Parasitic Electric Energy Use'] = value
|
@zone_terminal_unit_on_parasitic_electric_energy_use.setter
def zone_terminal_unit_on_parasitic_electric_energy_use(self, value=None):
'Corresponds to IDD field `Zone Terminal Unit On Parasitic Electric\n Energy Use`'
self['Zone Terminal Unit On Parasitic Electric Energy Use'] = value<|docstring|>Corresponds to IDD field `Zone Terminal Unit On Parasitic Electric
Energy Use`<|endoftext|>
|
c3a0a7122229149784ca366856d4fd0675ad2bbf5c09a5d949dc9881562245a0
|
@property
def zone_terminal_unit_off_parasitic_electric_energy_use(self):
'field `Zone Terminal Unit Off Parasitic Electric Energy Use`\n\n | Units: W\n\n Args:\n value (float): value for IDD Field `Zone Terminal Unit Off Parasitic Electric Energy Use`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `zone_terminal_unit_off_parasitic_electric_energy_use` or None if not set\n\n '
return self['Zone Terminal Unit Off Parasitic Electric Energy Use']
|
field `Zone Terminal Unit Off Parasitic Electric Energy Use`
| Units: W
Args:
value (float): value for IDD Field `Zone Terminal Unit Off Parasitic Electric Energy Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_terminal_unit_off_parasitic_electric_energy_use` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
zone_terminal_unit_off_parasitic_electric_energy_use
|
marcelosalles/pyidf
| 19 |
python
|
@property
def zone_terminal_unit_off_parasitic_electric_energy_use(self):
'field `Zone Terminal Unit Off Parasitic Electric Energy Use`\n\n | Units: W\n\n Args:\n value (float): value for IDD Field `Zone Terminal Unit Off Parasitic Electric Energy Use`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `zone_terminal_unit_off_parasitic_electric_energy_use` or None if not set\n\n '
return self['Zone Terminal Unit Off Parasitic Electric Energy Use']
|
@property
def zone_terminal_unit_off_parasitic_electric_energy_use(self):
'field `Zone Terminal Unit Off Parasitic Electric Energy Use`\n\n | Units: W\n\n Args:\n value (float): value for IDD Field `Zone Terminal Unit Off Parasitic Electric Energy Use`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `zone_terminal_unit_off_parasitic_electric_energy_use` or None if not set\n\n '
return self['Zone Terminal Unit Off Parasitic Electric Energy Use']<|docstring|>field `Zone Terminal Unit Off Parasitic Electric Energy Use`
| Units: W
Args:
value (float): value for IDD Field `Zone Terminal Unit Off Parasitic Electric Energy Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_terminal_unit_off_parasitic_electric_energy_use` or None if not set<|endoftext|>
|
1c5e50444fcd743f654f7f18b84f123ca96c17703398a0066fd2fccd997c5cbb
|
@zone_terminal_unit_off_parasitic_electric_energy_use.setter
def zone_terminal_unit_off_parasitic_electric_energy_use(self, value=None):
'Corresponds to IDD field `Zone Terminal Unit Off Parasitic Electric\n Energy Use`'
self['Zone Terminal Unit Off Parasitic Electric Energy Use'] = value
|
Corresponds to IDD field `Zone Terminal Unit Off Parasitic Electric
Energy Use`
|
pyidf/zone_hvac_forced_air_units.py
|
zone_terminal_unit_off_parasitic_electric_energy_use
|
marcelosalles/pyidf
| 19 |
python
|
@zone_terminal_unit_off_parasitic_electric_energy_use.setter
def zone_terminal_unit_off_parasitic_electric_energy_use(self, value=None):
'Corresponds to IDD field `Zone Terminal Unit Off Parasitic Electric\n Energy Use`'
self['Zone Terminal Unit Off Parasitic Electric Energy Use'] = value
|
@zone_terminal_unit_off_parasitic_electric_energy_use.setter
def zone_terminal_unit_off_parasitic_electric_energy_use(self, value=None):
'Corresponds to IDD field `Zone Terminal Unit Off Parasitic Electric\n Energy Use`'
self['Zone Terminal Unit Off Parasitic Electric Energy Use'] = value<|docstring|>Corresponds to IDD field `Zone Terminal Unit Off Parasitic Electric
Energy Use`<|endoftext|>
|
fd2492a3923140c0d03bfad3186bc894bfd7ae6d383e7f6f8b65fd9cb8d7fe2d
|
@property
def rated_heating_capacity_sizing_ratio(self):
"field `Rated Heating Capacity Sizing Ratio`\n\n | If this terminal unit's heating coil is autosized, the heating capacity is sized\n | to be equal to the cooling capacity multiplied by this sizing ratio.\n | This input applies to the terminal unit heating coil and overrides the sizing\n | ratio entered in the AirConditioner:VariableRefrigerantFlow object.\n | Units: W/W\n | Default value: 1.0\n | value >= 1.0\n\n Args:\n value (float): value for IDD Field `Rated Heating Capacity Sizing Ratio`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `rated_heating_capacity_sizing_ratio` or None if not set\n\n "
return self['Rated Heating Capacity Sizing Ratio']
|
field `Rated Heating Capacity Sizing Ratio`
| If this terminal unit's heating coil is autosized, the heating capacity is sized
| to be equal to the cooling capacity multiplied by this sizing ratio.
| This input applies to the terminal unit heating coil and overrides the sizing
| ratio entered in the AirConditioner:VariableRefrigerantFlow object.
| Units: W/W
| Default value: 1.0
| value >= 1.0
Args:
value (float): value for IDD Field `Rated Heating Capacity Sizing Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_heating_capacity_sizing_ratio` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
rated_heating_capacity_sizing_ratio
|
marcelosalles/pyidf
| 19 |
python
|
@property
def rated_heating_capacity_sizing_ratio(self):
"field `Rated Heating Capacity Sizing Ratio`\n\n | If this terminal unit's heating coil is autosized, the heating capacity is sized\n | to be equal to the cooling capacity multiplied by this sizing ratio.\n | This input applies to the terminal unit heating coil and overrides the sizing\n | ratio entered in the AirConditioner:VariableRefrigerantFlow object.\n | Units: W/W\n | Default value: 1.0\n | value >= 1.0\n\n Args:\n value (float): value for IDD Field `Rated Heating Capacity Sizing Ratio`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `rated_heating_capacity_sizing_ratio` or None if not set\n\n "
return self['Rated Heating Capacity Sizing Ratio']
|
@property
def rated_heating_capacity_sizing_ratio(self):
"field `Rated Heating Capacity Sizing Ratio`\n\n | If this terminal unit's heating coil is autosized, the heating capacity is sized\n | to be equal to the cooling capacity multiplied by this sizing ratio.\n | This input applies to the terminal unit heating coil and overrides the sizing\n | ratio entered in the AirConditioner:VariableRefrigerantFlow object.\n | Units: W/W\n | Default value: 1.0\n | value >= 1.0\n\n Args:\n value (float): value for IDD Field `Rated Heating Capacity Sizing Ratio`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n float: the value of `rated_heating_capacity_sizing_ratio` or None if not set\n\n "
return self['Rated Heating Capacity Sizing Ratio']<|docstring|>field `Rated Heating Capacity Sizing Ratio`
| If this terminal unit's heating coil is autosized, the heating capacity is sized
| to be equal to the cooling capacity multiplied by this sizing ratio.
| This input applies to the terminal unit heating coil and overrides the sizing
| ratio entered in the AirConditioner:VariableRefrigerantFlow object.
| Units: W/W
| Default value: 1.0
| value >= 1.0
Args:
value (float): value for IDD Field `Rated Heating Capacity Sizing Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_heating_capacity_sizing_ratio` or None if not set<|endoftext|>
|
4da0110321f808f125067d4d56f8f98538bd18a713951655c64558db4fb0b149
|
@rated_heating_capacity_sizing_ratio.setter
def rated_heating_capacity_sizing_ratio(self, value=1.0):
'Corresponds to IDD field `Rated Heating Capacity Sizing Ratio`'
self['Rated Heating Capacity Sizing Ratio'] = value
|
Corresponds to IDD field `Rated Heating Capacity Sizing Ratio`
|
pyidf/zone_hvac_forced_air_units.py
|
rated_heating_capacity_sizing_ratio
|
marcelosalles/pyidf
| 19 |
python
|
@rated_heating_capacity_sizing_ratio.setter
def rated_heating_capacity_sizing_ratio(self, value=1.0):
self['Rated Heating Capacity Sizing Ratio'] = value
|
@rated_heating_capacity_sizing_ratio.setter
def rated_heating_capacity_sizing_ratio(self, value=1.0):
self['Rated Heating Capacity Sizing Ratio'] = value<|docstring|>Corresponds to IDD field `Rated Heating Capacity Sizing Ratio`<|endoftext|>
|
34a6536179864bd69e16dfb58123b60585d7e47147be939cb77f6d2a7aaf5089
|
@property
def availability_manager_list_name(self):
'field `Availability Manager List Name`\n\n | Enter the name of an AvailabilityManagerAssignmentList object.\n\n Args:\n value (str): value for IDD Field `Availability Manager List Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `availability_manager_list_name` or None if not set\n\n '
return self['Availability Manager List Name']
|
field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
availability_manager_list_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def availability_manager_list_name(self):
'field `Availability Manager List Name`\n\n | Enter the name of an AvailabilityManagerAssignmentList object.\n\n Args:\n value (str): value for IDD Field `Availability Manager List Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `availability_manager_list_name` or None if not set\n\n '
return self['Availability Manager List Name']
|
@property
def availability_manager_list_name(self):
'field `Availability Manager List Name`\n\n | Enter the name of an AvailabilityManagerAssignmentList object.\n\n Args:\n value (str): value for IDD Field `Availability Manager List Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `availability_manager_list_name` or None if not set\n\n '
return self['Availability Manager List Name']<|docstring|>field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set<|endoftext|>
|
bc2db9236470a5671addc2a0b07d5fb679b2752715f4f954b1795de5078d1081
|
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
'Corresponds to IDD field `Availability Manager List Name`'
self['Availability Manager List Name'] = value
|
Corresponds to IDD field `Availability Manager List Name`
|
pyidf/zone_hvac_forced_air_units.py
|
availability_manager_list_name
|
marcelosalles/pyidf
| 19 |
python
|
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
self['Availability Manager List Name'] = value
|
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
self['Availability Manager List Name'] = value<|docstring|>Corresponds to IDD field `Availability Manager List Name`<|endoftext|>
|
e22dc2ca7581756faf160f37f87e17946e6823b64cba016312c2a5a35936fb33
|
@property
def design_specification_zonehvac_sizing_object_name(self):
'field `Design Specification ZoneHVAC Sizing Object Name`\n\n | Enter the name of a DesignSpecificationZoneHVACSizing object.\n\n Args:\n value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set\n\n '
return self['Design Specification ZoneHVAC Sizing Object Name']
|
field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
|
pyidf/zone_hvac_forced_air_units.py
|
design_specification_zonehvac_sizing_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@property
def design_specification_zonehvac_sizing_object_name(self):
'field `Design Specification ZoneHVAC Sizing Object Name`\n\n | Enter the name of a DesignSpecificationZoneHVACSizing object.\n\n Args:\n value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set\n\n '
return self['Design Specification ZoneHVAC Sizing Object Name']
|
@property
def design_specification_zonehvac_sizing_object_name(self):
'field `Design Specification ZoneHVAC Sizing Object Name`\n\n | Enter the name of a DesignSpecificationZoneHVACSizing object.\n\n Args:\n value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`\n\n Raises:\n ValueError: if `value` is not a valid value\n\n Returns:\n str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set\n\n '
return self['Design Specification ZoneHVAC Sizing Object Name']<|docstring|>field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set<|endoftext|>
|
c4a5304706326768fb8ac75260fc557fa4d93624417102dac8ce5bc59056b944
|
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
'Corresponds to IDD field `Design Specification ZoneHVAC Sizing\n Object Name`'
self['Design Specification ZoneHVAC Sizing Object Name'] = value
|
Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`
|
pyidf/zone_hvac_forced_air_units.py
|
design_specification_zonehvac_sizing_object_name
|
marcelosalles/pyidf
| 19 |
python
|
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
'Corresponds to IDD field `Design Specification ZoneHVAC Sizing\n Object Name`'
self['Design Specification ZoneHVAC Sizing Object Name'] = value
|
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
'Corresponds to IDD field `Design Specification ZoneHVAC Sizing\n Object Name`'
self['Design Specification ZoneHVAC Sizing Object Name'] = value<|docstring|>Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`<|endoftext|>
|
1243a3d26a989ce7e0aa834cfd823d56c556479e0916f9d4f24db72e26c8bc64
|
def load_trained_model(self, weight_path, remove_module=False):
' another loader method for `model`\n\n It can recover changed model module from dumped `latest.pt` and load\n pre-trained weights.\n\n Args:\n weight_path (str): full path or short weight name to the dumped weight\n remove_module (bool)\n '
folder = self.get_checkpoint_dir()
if (str(folder) not in weight_path):
folder = Path(weight_path).parent
ckpt = torch.load((folder / 'latest.pt'))
full_model = ckpt['model']
if ('latest.pt' not in weight_path):
state_dict = export_checkpoint_weight(weight_path, remove_module)
full_model.load_state_dict(state_dict)
return full_model
|
another loader method for `model`
It can recover changed model module from dumped `latest.pt` and load
pre-trained weights.
Args:
weight_path (str): full path or short weight name to the dumped weight
remove_module (bool)
|
onegan/extension/checkpoint.py
|
load_trained_model
|
leVirve/OneGAN
| 6 |
python
|
def load_trained_model(self, weight_path, remove_module=False):
' another loader method for `model`\n\n It can recover changed model module from dumped `latest.pt` and load\n pre-trained weights.\n\n Args:\n weight_path (str): full path or short weight name to the dumped weight\n remove_module (bool)\n '
folder = self.get_checkpoint_dir()
if (str(folder) not in weight_path):
folder = Path(weight_path).parent
ckpt = torch.load((folder / 'latest.pt'))
full_model = ckpt['model']
if ('latest.pt' not in weight_path):
state_dict = export_checkpoint_weight(weight_path, remove_module)
full_model.load_state_dict(state_dict)
return full_model
|
def load_trained_model(self, weight_path, remove_module=False):
' another loader method for `model`\n\n It can recover changed model module from dumped `latest.pt` and load\n pre-trained weights.\n\n Args:\n weight_path (str): full path or short weight name to the dumped weight\n remove_module (bool)\n '
folder = self.get_checkpoint_dir()
if (str(folder) not in weight_path):
folder = Path(weight_path).parent
ckpt = torch.load((folder / 'latest.pt'))
full_model = ckpt['model']
if ('latest.pt' not in weight_path):
state_dict = export_checkpoint_weight(weight_path, remove_module)
full_model.load_state_dict(state_dict)
return full_model<|docstring|>another loader method for `model`
It can recover changed model module from dumped `latest.pt` and load
pre-trained weights.
Args:
weight_path (str): full path or short weight name to the dumped weight
remove_module (bool)<|endoftext|>
|
6cd300a94f201ed40a5943341de1254cd75b634a4f8f60704b292089c1779626
|
def load(self, path=None, model=None, remove_module=False, resume=False):
' load method for `model` and `optimizer`\n\n If `resume` is True, full `model` and `optimizer` modules will be returned;\n or the loaded model will be returned.\n\n Args:\n path (str): full path to the dumped weight or full module\n model (nn.Module)\n remove_module (bool)\n resume (bool)\n\n Return:\n - dict() of dumped data inside `latest.pt`\n - OrderedDict() of `state_dict`\n - nn.Module of input model with loaded state_dict\n - nn.Module of dumped full module with loaded state_dict\n '
if resume:
latest_ckpt = torch.load(path)
return latest_ckpt
try:
state_dict = export_checkpoint_weight(path, remove_module)
if (model is None):
return state_dict
model.load_state_dict(state_dict)
return model
except KeyError:
self.logger.warn('Use fallback solution: load `latest.pt` as module')
return self.load_trained_model(path, remove_module)
|
load method for `model` and `optimizer`
If `resume` is True, full `model` and `optimizer` modules will be returned;
or the loaded model will be returned.
Args:
path (str): full path to the dumped weight or full module
model (nn.Module)
remove_module (bool)
resume (bool)
Return:
- dict() of dumped data inside `latest.pt`
- OrderedDict() of `state_dict`
- nn.Module of input model with loaded state_dict
- nn.Module of dumped full module with loaded state_dict
|
onegan/extension/checkpoint.py
|
load
|
leVirve/OneGAN
| 6 |
python
|
def load(self, path=None, model=None, remove_module=False, resume=False):
' load method for `model` and `optimizer`\n\n If `resume` is True, full `model` and `optimizer` modules will be returned;\n or the loaded model will be returned.\n\n Args:\n path (str): full path to the dumped weight or full module\n model (nn.Module)\n remove_module (bool)\n resume (bool)\n\n Return:\n - dict() of dumped data inside `latest.pt`\n - OrderedDict() of `state_dict`\n - nn.Module of input model with loaded state_dict\n - nn.Module of dumped full module with loaded state_dict\n '
if resume:
latest_ckpt = torch.load(path)
return latest_ckpt
try:
state_dict = export_checkpoint_weight(path, remove_module)
if (model is None):
return state_dict
model.load_state_dict(state_dict)
return model
except KeyError:
self.logger.warn('Use fallback solution: load `latest.pt` as module')
return self.load_trained_model(path, remove_module)
|
def load(self, path=None, model=None, remove_module=False, resume=False):
' load method for `model` and `optimizer`\n\n If `resume` is True, full `model` and `optimizer` modules will be returned;\n or the loaded model will be returned.\n\n Args:\n path (str): full path to the dumped weight or full module\n model (nn.Module)\n remove_module (bool)\n resume (bool)\n\n Return:\n - dict() of dumped data inside `latest.pt`\n - OrderedDict() of `state_dict`\n - nn.Module of input model with loaded state_dict\n - nn.Module of dumped full module with loaded state_dict\n '
if resume:
latest_ckpt = torch.load(path)
return latest_ckpt
try:
state_dict = export_checkpoint_weight(path, remove_module)
if (model is None):
return state_dict
model.load_state_dict(state_dict)
return model
except KeyError:
self.logger.warn('Use fallback solution: load `latest.pt` as module')
return self.load_trained_model(path, remove_module)<|docstring|>load method for `model` and `optimizer`
If `resume` is True, full `model` and `optimizer` modules will be returned;
or the loaded model will be returned.
Args:
path (str): full path to the dumped weight or full module
model (nn.Module)
remove_module (bool)
resume (bool)
Return:
- dict() of dumped data inside `latest.pt`
- OrderedDict() of `state_dict`
- nn.Module of input model with loaded state_dict
- nn.Module of dumped full module with loaded state_dict<|endoftext|>
|
eecbc20ffb66f0cdc258879a54d66dd09564ffc1d410b6aa69cece775abbf62d
|
def save(self, model, optimizer=None, epoch=None):
' save method for `model` and `optimizer`\n\n Args:\n model (nn.Module)\n optimizer (nn.Module)\n epoch (int): epoch step of training\n '
if ((epoch + 1) % self.save_interval):
return
folder = self.get_checkpoint_dir(unique=True)
torch.save({'weight': model.state_dict()}, (folder / f'net-{epoch}.pt'))
torch.save({'model': model, 'optimizer': optimizer, 'epoch': (epoch + 1)}, (folder / 'latest.pt'))
|
save method for `model` and `optimizer`
Args:
model (nn.Module)
optimizer (nn.Module)
epoch (int): epoch step of training
|
onegan/extension/checkpoint.py
|
save
|
leVirve/OneGAN
| 6 |
python
|
def save(self, model, optimizer=None, epoch=None):
' save method for `model` and `optimizer`\n\n Args:\n model (nn.Module)\n optimizer (nn.Module)\n epoch (int): epoch step of training\n '
if ((epoch + 1) % self.save_interval):
return
folder = self.get_checkpoint_dir(unique=True)
torch.save({'weight': model.state_dict()}, (folder / f'net-{epoch}.pt'))
torch.save({'model': model, 'optimizer': optimizer, 'epoch': (epoch + 1)}, (folder / 'latest.pt'))
|
def save(self, model, optimizer=None, epoch=None):
' save method for `model` and `optimizer`\n\n Args:\n model (nn.Module)\n optimizer (nn.Module)\n epoch (int): epoch step of training\n '
if ((epoch + 1) % self.save_interval):
return
folder = self.get_checkpoint_dir(unique=True)
torch.save({'weight': model.state_dict()}, (folder / f'net-{epoch}.pt'))
torch.save({'model': model, 'optimizer': optimizer, 'epoch': (epoch + 1)}, (folder / 'latest.pt'))<|docstring|>save method for `model` and `optimizer`
Args:
model (nn.Module)
optimizer (nn.Module)
epoch (int): epoch step of training<|endoftext|>
|
83205b4726419b4c1e982db563124bc673d6af5a4eba60cb28f31f9de8fe2c01
|
def get_weights(self, weight_path, model=None, remove_module=False, path_only=False):
' model weights searcher\n\n Args:\n weight_path (str): the path to single weight file or the folder of weights\n model (nn.Module): if given, the model will be filled with state_dict\n remove_module (bool): remove the `module.` string from the keys of state_dict\n path_only (bool): if true, the return value will be only path to weights\n Returns:\n - payload, path: if model is given, payload will be loaded model else will be state_dict\n - path: the path to the weight\n '
weight_path = Path(weight_path)
if weight_path.is_file():
path = str(weight_path)
if path_only:
(yield path)
payload = self.load(path, model=model, remove_module=remove_module)
return (payload, path)
paths = list(weight_path.glob('*.pt'))
if weight_path.is_dir():
assert len(paths), 'Weights folder contains nothing.'
for path in paths:
path = str(path)
if ('latest.pt' in path):
continue
if path_only:
(yield path)
continue
payload = self.load(path, model=model, remove_module=remove_module)
model = payload
(yield (payload, path))
|
model weights searcher
Args:
weight_path (str): the path to single weight file or the folder of weights
model (nn.Module): if given, the model will be filled with state_dict
remove_module (bool): remove the `module.` string from the keys of state_dict
path_only (bool): if true, the return value will be only path to weights
Returns:
- payload, path: if model is given, payload will be loaded model else will be state_dict
- path: the path to the weight
|
onegan/extension/checkpoint.py
|
get_weights
|
leVirve/OneGAN
| 6 |
python
|
def get_weights(self, weight_path, model=None, remove_module=False, path_only=False):
' model weights searcher\n\n Args:\n weight_path (str): the path to single weight file or the folder of weights\n model (nn.Module): if given, the model will be filled with state_dict\n remove_module (bool): remove the `module.` string from the keys of state_dict\n path_only (bool): if true, the return value will be only path to weights\n Returns:\n - payload, path: if model is given, payload will be loaded model else will be state_dict\n - path: the path to the weight\n '
weight_path = Path(weight_path)
if weight_path.is_file():
path = str(weight_path)
if path_only:
(yield path)
payload = self.load(path, model=model, remove_module=remove_module)
return (payload, path)
paths = list(weight_path.glob('*.pt'))
if weight_path.is_dir():
assert len(paths), 'Weights folder contains nothing.'
for path in paths:
path = str(path)
if ('latest.pt' in path):
continue
if path_only:
(yield path)
continue
payload = self.load(path, model=model, remove_module=remove_module)
model = payload
(yield (payload, path))
|
def get_weights(self, weight_path, model=None, remove_module=False, path_only=False):
' model weights searcher\n\n Args:\n weight_path (str): the path to single weight file or the folder of weights\n model (nn.Module): if given, the model will be filled with state_dict\n remove_module (bool): remove the `module.` string from the keys of state_dict\n path_only (bool): if true, the return value will be only path to weights\n Returns:\n - payload, path: if model is given, payload will be loaded model else will be state_dict\n - path: the path to the weight\n '
weight_path = Path(weight_path)
if weight_path.is_file():
path = str(weight_path)
if path_only:
(yield path)
payload = self.load(path, model=model, remove_module=remove_module)
return (payload, path)
paths = list(weight_path.glob('*.pt'))
if weight_path.is_dir():
assert len(paths), 'Weights folder contains nothing.'
for path in paths:
path = str(path)
if ('latest.pt' in path):
continue
if path_only:
(yield path)
continue
payload = self.load(path, model=model, remove_module=remove_module)
model = payload
(yield (payload, path))<|docstring|>model weights searcher
Args:
weight_path (str): the path to single weight file or the folder of weights
model (nn.Module): if given, the model will be filled with state_dict
remove_module (bool): remove the `module.` string from the keys of state_dict
path_only (bool): if true, the return value will be only path to weights
Returns:
- payload, path: if model is given, payload will be loaded model else will be state_dict
- path: the path to the weight<|endoftext|>
|
fa50664a80d4cd27a40f173a9f18e69952158714f15656875ca4b234435b92f5
|
def get_versions(self):
"\n Return versions of framework and its plugins.\n\n As 'unittest' is a built-in framework, we return the python version.\n "
import platform
return ['unittest {}'.format(platform.python_version())]
|
Return versions of framework and its plugins.
As 'unittest' is a built-in framework, we return the python version.
|
spyder_unittest/backend/unittestrunner.py
|
get_versions
|
jitseniesen/spyder-unittest
| 61 |
python
|
def get_versions(self):
"\n Return versions of framework and its plugins.\n\n As 'unittest' is a built-in framework, we return the python version.\n "
import platform
return ['unittest {}'.format(platform.python_version())]
|
def get_versions(self):
"\n Return versions of framework and its plugins.\n\n As 'unittest' is a built-in framework, we return the python version.\n "
import platform
return ['unittest {}'.format(platform.python_version())]<|docstring|>Return versions of framework and its plugins.
As 'unittest' is a built-in framework, we return the python version.<|endoftext|>
|
2e26d4b8dd2d056c24471c4d64901dc2c32bbdeefa49415a74a38b13f97e51c4
|
def create_argument_list(self):
'Create argument list for testing process.'
return ['-m', self.module, 'discover', '-v']
|
Create argument list for testing process.
|
spyder_unittest/backend/unittestrunner.py
|
create_argument_list
|
jitseniesen/spyder-unittest
| 61 |
python
|
def create_argument_list(self):
return ['-m', self.module, 'discover', '-v']
|
def create_argument_list(self):
return ['-m', self.module, 'discover', '-v']<|docstring|>Create argument list for testing process.<|endoftext|>
|
8aefcb16740f9015886b4026e2f55330ba30006b071f56892f1c897ed163be40
|
def finished(self):
'\n Called when the unit test process has finished.\n\n This function reads the results and emits `sig_finished`.\n '
output = self.read_all_process_output()
testresults = self.load_data(output)
self.sig_finished.emit(testresults, output)
|
Called when the unit test process has finished.
This function reads the results and emits `sig_finished`.
|
spyder_unittest/backend/unittestrunner.py
|
finished
|
jitseniesen/spyder-unittest
| 61 |
python
|
def finished(self):
'\n Called when the unit test process has finished.\n\n This function reads the results and emits `sig_finished`.\n '
output = self.read_all_process_output()
testresults = self.load_data(output)
self.sig_finished.emit(testresults, output)
|
def finished(self):
'\n Called when the unit test process has finished.\n\n This function reads the results and emits `sig_finished`.\n '
output = self.read_all_process_output()
testresults = self.load_data(output)
self.sig_finished.emit(testresults, output)<|docstring|>Called when the unit test process has finished.
This function reads the results and emits `sig_finished`.<|endoftext|>
|
cb306ceba6f78324e633970416aa97d9907c537952383d391a7a98c07866f4dc
|
def load_data(self, output):
'\n Read and parse output from unittest module.\n\n Any parsing errors are silently ignored.\n\n Returns\n -------\n list of TestResult\n Unit test results.\n '
res = []
lines = output.splitlines()
line_index = 0
try:
while lines[line_index]:
data = self.try_parse_result(lines, line_index)
if data:
line_index = data[0]
if (data[3] == 'ok'):
cat = Category.OK
elif ((data[3] == 'FAIL') or (data[3] == 'ERROR')):
cat = Category.FAIL
else:
cat = Category.SKIP
name = '{}.{}'.format(data[2], data[1])
tr = TestResult(category=cat, status=data[3], name=name, message=data[4])
res.append(tr)
else:
line_index += 1
line_index += 1
while (not (lines[line_index] and all(((c == '-') for c in lines[line_index])))):
data = self.try_parse_exception_block(lines, line_index)
if data:
line_index = data[0]
test_index = next((i for (i, tr) in enumerate(res) if (tr.name == '{}.{}'.format(data[2], data[1]))))
res[test_index].extra_text = data[3]
else:
line_index += 1
except IndexError:
pass
return res
|
Read and parse output from unittest module.
Any parsing errors are silently ignored.
Returns
-------
list of TestResult
Unit test results.
|
spyder_unittest/backend/unittestrunner.py
|
load_data
|
jitseniesen/spyder-unittest
| 61 |
python
|
def load_data(self, output):
'\n Read and parse output from unittest module.\n\n Any parsing errors are silently ignored.\n\n Returns\n -------\n list of TestResult\n Unit test results.\n '
res = []
lines = output.splitlines()
line_index = 0
try:
while lines[line_index]:
data = self.try_parse_result(lines, line_index)
if data:
line_index = data[0]
if (data[3] == 'ok'):
cat = Category.OK
elif ((data[3] == 'FAIL') or (data[3] == 'ERROR')):
cat = Category.FAIL
else:
cat = Category.SKIP
name = '{}.{}'.format(data[2], data[1])
tr = TestResult(category=cat, status=data[3], name=name, message=data[4])
res.append(tr)
else:
line_index += 1
line_index += 1
while (not (lines[line_index] and all(((c == '-') for c in lines[line_index])))):
data = self.try_parse_exception_block(lines, line_index)
if data:
line_index = data[0]
test_index = next((i for (i, tr) in enumerate(res) if (tr.name == '{}.{}'.format(data[2], data[1]))))
res[test_index].extra_text = data[3]
else:
line_index += 1
except IndexError:
pass
return res
|
def load_data(self, output):
'\n Read and parse output from unittest module.\n\n Any parsing errors are silently ignored.\n\n Returns\n -------\n list of TestResult\n Unit test results.\n '
res = []
lines = output.splitlines()
line_index = 0
try:
while lines[line_index]:
data = self.try_parse_result(lines, line_index)
if data:
line_index = data[0]
if (data[3] == 'ok'):
cat = Category.OK
elif ((data[3] == 'FAIL') or (data[3] == 'ERROR')):
cat = Category.FAIL
else:
cat = Category.SKIP
name = '{}.{}'.format(data[2], data[1])
tr = TestResult(category=cat, status=data[3], name=name, message=data[4])
res.append(tr)
else:
line_index += 1
line_index += 1
while (not (lines[line_index] and all(((c == '-') for c in lines[line_index])))):
data = self.try_parse_exception_block(lines, line_index)
if data:
line_index = data[0]
test_index = next((i for (i, tr) in enumerate(res) if (tr.name == '{}.{}'.format(data[2], data[1]))))
res[test_index].extra_text = data[3]
else:
line_index += 1
except IndexError:
pass
return res<|docstring|>Read and parse output from unittest module.
Any parsing errors are silently ignored.
Returns
-------
list of TestResult
Unit test results.<|endoftext|>
|
1c9de0dd4a9f8b71adf3bf6d8224ead289c1ce77a2967b939aeafd3723f7bf81
|
def try_parse_result(self, lines, line_index):
'\n Try to parse one or more lines of text as a test result.\n\n Returns\n -------\n (int, str, str, str, str) or None\n If a test result is parsed successfully then return a tuple with\n the line index of the first line after the test result, the name\n of the test function, the name of the test class, the test result,\n and the reason (if no reason is given, the fourth string is empty).\n Otherwise, return None.\n '
regexp = '([^\\d\\W]\\w*) \\(([^\\d\\W][\\w.]*)\\)'
match = re.match(regexp, lines[line_index])
if match:
function_name = match.group(1)
class_name = match.group(2)
else:
return None
while lines[line_index]:
regexp = " \\.\\.\\. (ok|FAIL|ERROR|skipped|expected failure|unexpected success)( '([^']*)')?\\Z"
match = re.search(regexp, lines[line_index])
if match:
result = match.group(1)
msg = (match.group(3) or '')
return ((line_index + 1), function_name, class_name, result, msg)
line_index += 1
return None
|
Try to parse one or more lines of text as a test result.
Returns
-------
(int, str, str, str, str) or None
If a test result is parsed successfully then return a tuple with
the line index of the first line after the test result, the name
of the test function, the name of the test class, the test result,
and the reason (if no reason is given, the fourth string is empty).
Otherwise, return None.
|
spyder_unittest/backend/unittestrunner.py
|
try_parse_result
|
jitseniesen/spyder-unittest
| 61 |
python
|
def try_parse_result(self, lines, line_index):
'\n Try to parse one or more lines of text as a test result.\n\n Returns\n -------\n (int, str, str, str, str) or None\n If a test result is parsed successfully then return a tuple with\n the line index of the first line after the test result, the name\n of the test function, the name of the test class, the test result,\n and the reason (if no reason is given, the fourth string is empty).\n Otherwise, return None.\n '
regexp = '([^\\d\\W]\\w*) \\(([^\\d\\W][\\w.]*)\\)'
match = re.match(regexp, lines[line_index])
if match:
function_name = match.group(1)
class_name = match.group(2)
else:
return None
while lines[line_index]:
regexp = " \\.\\.\\. (ok|FAIL|ERROR|skipped|expected failure|unexpected success)( '([^']*)')?\\Z"
match = re.search(regexp, lines[line_index])
if match:
result = match.group(1)
msg = (match.group(3) or )
return ((line_index + 1), function_name, class_name, result, msg)
line_index += 1
return None
|
def try_parse_result(self, lines, line_index):
'\n Try to parse one or more lines of text as a test result.\n\n Returns\n -------\n (int, str, str, str, str) or None\n If a test result is parsed successfully then return a tuple with\n the line index of the first line after the test result, the name\n of the test function, the name of the test class, the test result,\n and the reason (if no reason is given, the fourth string is empty).\n Otherwise, return None.\n '
regexp = '([^\\d\\W]\\w*) \\(([^\\d\\W][\\w.]*)\\)'
match = re.match(regexp, lines[line_index])
if match:
function_name = match.group(1)
class_name = match.group(2)
else:
return None
while lines[line_index]:
regexp = " \\.\\.\\. (ok|FAIL|ERROR|skipped|expected failure|unexpected success)( '([^']*)')?\\Z"
match = re.search(regexp, lines[line_index])
if match:
result = match.group(1)
msg = (match.group(3) or )
return ((line_index + 1), function_name, class_name, result, msg)
line_index += 1
return None<|docstring|>Try to parse one or more lines of text as a test result.
Returns
-------
(int, str, str, str, str) or None
If a test result is parsed successfully then return a tuple with
the line index of the first line after the test result, the name
of the test function, the name of the test class, the test result,
and the reason (if no reason is given, the fourth string is empty).
Otherwise, return None.<|endoftext|>
|
ff9a0b4d224012311dc1e021d4250748dee8ebb61fb99c9d6cf1eb400fb89843
|
def try_parse_exception_block(self, lines, line_index):
'\n Try to parse a block detailing an exception in unittest output.\n\n Returns\n -------\n (int, str, str, list of str) or None\n If an exception block is parsed successfully, then return a tuple\n with the line index of the first line after the block, the name of\n the test function, the name of the test class, and the text of the\n exception. Otherwise, return None.\n '
if (not all(((char == '=') for char in lines[line_index]))):
return None
regexp = '\\w+: ([^\\d\\W]\\w*) \\(([^\\d\\W][\\w.]*)\\)\\Z'
match = re.match(regexp, lines[(line_index + 1)])
if (not match):
return None
line_index += 1
while (not all(((char == '-') for char in lines[line_index]))):
if (not lines[line_index]):
return None
line_index += 1
line_index += 1
exception_text = []
while lines[line_index]:
exception_text.append(lines[line_index])
line_index += 1
return (line_index, match.group(1), match.group(2), exception_text)
|
Try to parse a block detailing an exception in unittest output.
Returns
-------
(int, str, str, list of str) or None
If an exception block is parsed successfully, then return a tuple
with the line index of the first line after the block, the name of
the test function, the name of the test class, and the text of the
exception. Otherwise, return None.
|
spyder_unittest/backend/unittestrunner.py
|
try_parse_exception_block
|
jitseniesen/spyder-unittest
| 61 |
python
|
def try_parse_exception_block(self, lines, line_index):
'\n Try to parse a block detailing an exception in unittest output.\n\n Returns\n -------\n (int, str, str, list of str) or None\n If an exception block is parsed successfully, then return a tuple\n with the line index of the first line after the block, the name of\n the test function, the name of the test class, and the text of the\n exception. Otherwise, return None.\n '
if (not all(((char == '=') for char in lines[line_index]))):
return None
regexp = '\\w+: ([^\\d\\W]\\w*) \\(([^\\d\\W][\\w.]*)\\)\\Z'
match = re.match(regexp, lines[(line_index + 1)])
if (not match):
return None
line_index += 1
while (not all(((char == '-') for char in lines[line_index]))):
if (not lines[line_index]):
return None
line_index += 1
line_index += 1
exception_text = []
while lines[line_index]:
exception_text.append(lines[line_index])
line_index += 1
return (line_index, match.group(1), match.group(2), exception_text)
|
def try_parse_exception_block(self, lines, line_index):
'\n Try to parse a block detailing an exception in unittest output.\n\n Returns\n -------\n (int, str, str, list of str) or None\n If an exception block is parsed successfully, then return a tuple\n with the line index of the first line after the block, the name of\n the test function, the name of the test class, and the text of the\n exception. Otherwise, return None.\n '
if (not all(((char == '=') for char in lines[line_index]))):
return None
regexp = '\\w+: ([^\\d\\W]\\w*) \\(([^\\d\\W][\\w.]*)\\)\\Z'
match = re.match(regexp, lines[(line_index + 1)])
if (not match):
return None
line_index += 1
while (not all(((char == '-') for char in lines[line_index]))):
if (not lines[line_index]):
return None
line_index += 1
line_index += 1
exception_text = []
while lines[line_index]:
exception_text.append(lines[line_index])
line_index += 1
return (line_index, match.group(1), match.group(2), exception_text)<|docstring|>Try to parse a block detailing an exception in unittest output.
Returns
-------
(int, str, str, list of str) or None
If an exception block is parsed successfully, then return a tuple
with the line index of the first line after the block, the name of
the test function, the name of the test class, and the text of the
exception. Otherwise, return None.<|endoftext|>
|
6e3f343e03e3b8f33538500643682f94ee9c1df46d9bb2126ee98c00e09bfcd2
|
def main():
'Load addresses and count how many are have\n reflections outside of a bracketed sequence\n and no reflections within a bracketed sequence.\n '
addresses = load_addresses()
print(sum([is_compatible(address) for address in addresses]))
|
Load addresses and count how many are have
reflections outside of a bracketed sequence
and no reflections within a bracketed sequence.
|
07/solve_1.py
|
main
|
machinelearningdeveloper/aoc_2016
| 0 |
python
|
def main():
'Load addresses and count how many are have\n reflections outside of a bracketed sequence\n and no reflections within a bracketed sequence.\n '
addresses = load_addresses()
print(sum([is_compatible(address) for address in addresses]))
|
def main():
'Load addresses and count how many are have\n reflections outside of a bracketed sequence\n and no reflections within a bracketed sequence.\n '
addresses = load_addresses()
print(sum([is_compatible(address) for address in addresses]))<|docstring|>Load addresses and count how many are have
reflections outside of a bracketed sequence
and no reflections within a bracketed sequence.<|endoftext|>
|
e80357ce2dc2dae167828b7b393ffad11be5a4f6fd307d067643f1b7eeba493d
|
def _decompose_(self, qubits):
'An adjacency-respecting decomposition.\n\n 0: ───p───@──────────────@───────@──────────@──────────\n │ │ │ │\n 1: ───p───X───@───p^-1───X───@───X──────@───X──────@───\n │ │ │ │\n 2: ───p───────X───p──────────X───p^-1───X───p^-1───X───\n\n where p = T**self._exponent\n '
(a, b, c) = qubits
if hasattr(b, 'is_adjacent'):
if (not b.is_adjacent(a)):
(b, c) = (c, b)
elif (not b.is_adjacent(c)):
(a, b) = (b, a)
p = (common_gates.T ** self._exponent)
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
(yield (p(a), p(b), p(c)))
(yield sweep_abc)
(yield ((p(b) ** (- 1)), p(c)))
(yield sweep_abc)
(yield (p(c) ** (- 1)))
(yield sweep_abc)
(yield (p(c) ** (- 1)))
(yield sweep_abc)
|
An adjacency-respecting decomposition.
0: ───p───@──────────────@───────@──────────@──────────
│ │ │ │
1: ───p───X───@───p^-1───X───@───X──────@───X──────@───
│ │ │ │
2: ───p───────X───p──────────X───p^-1───X───p^-1───X───
where p = T**self._exponent
|
cirq/ops/three_qubit_gates.py
|
_decompose_
|
mannurulz/Cirq
| 1 |
python
|
def _decompose_(self, qubits):
'An adjacency-respecting decomposition.\n\n 0: ───p───@──────────────@───────@──────────@──────────\n │ │ │ │\n 1: ───p───X───@───p^-1───X───@───X──────@───X──────@───\n │ │ │ │\n 2: ───p───────X───p──────────X───p^-1───X───p^-1───X───\n\n where p = T**self._exponent\n '
(a, b, c) = qubits
if hasattr(b, 'is_adjacent'):
if (not b.is_adjacent(a)):
(b, c) = (c, b)
elif (not b.is_adjacent(c)):
(a, b) = (b, a)
p = (common_gates.T ** self._exponent)
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
(yield (p(a), p(b), p(c)))
(yield sweep_abc)
(yield ((p(b) ** (- 1)), p(c)))
(yield sweep_abc)
(yield (p(c) ** (- 1)))
(yield sweep_abc)
(yield (p(c) ** (- 1)))
(yield sweep_abc)
|
def _decompose_(self, qubits):
'An adjacency-respecting decomposition.\n\n 0: ───p───@──────────────@───────@──────────@──────────\n │ │ │ │\n 1: ───p───X───@───p^-1───X───@───X──────@───X──────@───\n │ │ │ │\n 2: ───p───────X───p──────────X───p^-1───X───p^-1───X───\n\n where p = T**self._exponent\n '
(a, b, c) = qubits
if hasattr(b, 'is_adjacent'):
if (not b.is_adjacent(a)):
(b, c) = (c, b)
elif (not b.is_adjacent(c)):
(a, b) = (b, a)
p = (common_gates.T ** self._exponent)
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
(yield (p(a), p(b), p(c)))
(yield sweep_abc)
(yield ((p(b) ** (- 1)), p(c)))
(yield sweep_abc)
(yield (p(c) ** (- 1)))
(yield sweep_abc)
(yield (p(c) ** (- 1)))
(yield sweep_abc)<|docstring|>An adjacency-respecting decomposition.
0: ───p───@──────────────@───────@──────────@──────────
│ │ │ │
1: ───p───X───@───p^-1───X───@───X──────@───X──────@───
│ │ │ │
2: ───p───────X───p──────────X───p^-1───X───p^-1───X───
where p = T**self._exponent<|endoftext|>
|
17acade2790467169f305bd22470a7023a48a27363c523552da5d6ccb1925c29
|
def _decompose_inside_control(self, target1: raw_types.QubitId, control: raw_types.QubitId, target2: raw_types.QubitId) -> op_tree.OP_TREE:
'A decomposition assuming the control separates the targets.\n\n target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─\n │ │ │ │ │ │\n control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────\n │ │ │ │ │ │\n target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───\n '
(a, b, c) = (target1, control, target2)
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, a))
(yield common_gates.CNOT(c, b))
(yield common_gates.H(c))
(yield common_gates.T(c))
(yield common_gates.CNOT(b, c))
(yield common_gates.T(a))
(yield (common_gates.T(b) ** (- 1)))
(yield common_gates.T(c))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield common_gates.T(b))
(yield (common_gates.T(c) ** (- 1)))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield (common_gates.X(b) ** 0.5))
(yield (common_gates.T(c) ** (- 1)))
(yield common_gates.CNOT(b, a))
(yield common_gates.CNOT(b, c))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield common_gates.H(c))
(yield (common_gates.S(c) ** (- 1)))
(yield (common_gates.X(a) ** (- 0.5)))
|
A decomposition assuming the control separates the targets.
target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─
│ │ │ │ │ │
control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────
│ │ │ │ │ │
target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───
|
cirq/ops/three_qubit_gates.py
|
_decompose_inside_control
|
mannurulz/Cirq
| 1 |
python
|
def _decompose_inside_control(self, target1: raw_types.QubitId, control: raw_types.QubitId, target2: raw_types.QubitId) -> op_tree.OP_TREE:
'A decomposition assuming the control separates the targets.\n\n target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─\n │ │ │ │ │ │\n control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────\n │ │ │ │ │ │\n target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───\n '
(a, b, c) = (target1, control, target2)
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, a))
(yield common_gates.CNOT(c, b))
(yield common_gates.H(c))
(yield common_gates.T(c))
(yield common_gates.CNOT(b, c))
(yield common_gates.T(a))
(yield (common_gates.T(b) ** (- 1)))
(yield common_gates.T(c))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield common_gates.T(b))
(yield (common_gates.T(c) ** (- 1)))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield (common_gates.X(b) ** 0.5))
(yield (common_gates.T(c) ** (- 1)))
(yield common_gates.CNOT(b, a))
(yield common_gates.CNOT(b, c))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield common_gates.H(c))
(yield (common_gates.S(c) ** (- 1)))
(yield (common_gates.X(a) ** (- 0.5)))
|
def _decompose_inside_control(self, target1: raw_types.QubitId, control: raw_types.QubitId, target2: raw_types.QubitId) -> op_tree.OP_TREE:
'A decomposition assuming the control separates the targets.\n\n target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─\n │ │ │ │ │ │\n control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────\n │ │ │ │ │ │\n target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───\n '
(a, b, c) = (target1, control, target2)
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, a))
(yield common_gates.CNOT(c, b))
(yield common_gates.H(c))
(yield common_gates.T(c))
(yield common_gates.CNOT(b, c))
(yield common_gates.T(a))
(yield (common_gates.T(b) ** (- 1)))
(yield common_gates.T(c))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield common_gates.T(b))
(yield (common_gates.T(c) ** (- 1)))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield (common_gates.X(b) ** 0.5))
(yield (common_gates.T(c) ** (- 1)))
(yield common_gates.CNOT(b, a))
(yield common_gates.CNOT(b, c))
(yield common_gates.CNOT(a, b))
(yield common_gates.CNOT(b, c))
(yield common_gates.H(c))
(yield (common_gates.S(c) ** (- 1)))
(yield (common_gates.X(a) ** (- 0.5)))<|docstring|>A decomposition assuming the control separates the targets.
target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─
│ │ │ │ │ │
control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────
│ │ │ │ │ │
target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───<|endoftext|>
|
bc02b0d3f2531b53d71cde69161b05c8d90f016244a5b2a438110ec5e023f2fd
|
def _decompose_outside_control(self, control: raw_types.QubitId, near_target: raw_types.QubitId, far_target: raw_types.QubitId) -> op_tree.OP_TREE:
'A decomposition assuming one of the targets is in the middle.\n\n control: ───T──────@────────@───@────────────@────────────────\n │ │ │ │\n near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────\n │ │ │ │ │\n far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─\n '
(a, b, c) = (control, near_target, far_target)
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
(yield common_gates.CNOT(c, b))
(yield (common_gates.Y(c) ** (- 0.5)))
(yield (t(a), t(b), t(c)))
(yield sweep_abc)
(yield ((t(b) ** (- 1)), t(c)))
(yield sweep_abc)
(yield (t(c) ** (- 1)))
(yield sweep_abc)
(yield (t(c) ** (- 1)))
(yield (common_gates.X(b) ** 0.5))
(yield sweep_abc)
(yield common_gates.S(c))
(yield (common_gates.X(b) ** 0.5))
(yield (common_gates.X(c) ** (- 0.5)))
|
A decomposition assuming one of the targets is in the middle.
control: ───T──────@────────@───@────────────@────────────────
│ │ │ │
near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────
│ │ │ │ │
far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─
|
cirq/ops/three_qubit_gates.py
|
_decompose_outside_control
|
mannurulz/Cirq
| 1 |
python
|
def _decompose_outside_control(self, control: raw_types.QubitId, near_target: raw_types.QubitId, far_target: raw_types.QubitId) -> op_tree.OP_TREE:
'A decomposition assuming one of the targets is in the middle.\n\n control: ───T──────@────────@───@────────────@────────────────\n │ │ │ │\n near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────\n │ │ │ │ │\n far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─\n '
(a, b, c) = (control, near_target, far_target)
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
(yield common_gates.CNOT(c, b))
(yield (common_gates.Y(c) ** (- 0.5)))
(yield (t(a), t(b), t(c)))
(yield sweep_abc)
(yield ((t(b) ** (- 1)), t(c)))
(yield sweep_abc)
(yield (t(c) ** (- 1)))
(yield sweep_abc)
(yield (t(c) ** (- 1)))
(yield (common_gates.X(b) ** 0.5))
(yield sweep_abc)
(yield common_gates.S(c))
(yield (common_gates.X(b) ** 0.5))
(yield (common_gates.X(c) ** (- 0.5)))
|
def _decompose_outside_control(self, control: raw_types.QubitId, near_target: raw_types.QubitId, far_target: raw_types.QubitId) -> op_tree.OP_TREE:
'A decomposition assuming one of the targets is in the middle.\n\n control: ───T──────@────────@───@────────────@────────────────\n │ │ │ │\n near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────\n │ │ │ │ │\n far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─\n '
(a, b, c) = (control, near_target, far_target)
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
(yield common_gates.CNOT(c, b))
(yield (common_gates.Y(c) ** (- 0.5)))
(yield (t(a), t(b), t(c)))
(yield sweep_abc)
(yield ((t(b) ** (- 1)), t(c)))
(yield sweep_abc)
(yield (t(c) ** (- 1)))
(yield sweep_abc)
(yield (t(c) ** (- 1)))
(yield (common_gates.X(b) ** 0.5))
(yield sweep_abc)
(yield common_gates.S(c))
(yield (common_gates.X(b) ** 0.5))
(yield (common_gates.X(c) ** (- 0.5)))<|docstring|>A decomposition assuming one of the targets is in the middle.
control: ───T──────@────────@───@────────────@────────────────
│ │ │ │
near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────
│ │ │ │ │
far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─<|endoftext|>
|
2346c3cae4142cc64f399ffc89de2211e87dc83024e6ed39c3c40accac6d46ab
|
def normalize_api_path(api_path):
"\n Resolve paths with '..' to normalized paths, raising an error if the final\n result is outside root.\n "
normalized = posixpath.normpath(api_path.strip('/'))
if (normalized == '.'):
normalized = ''
elif normalized.startswith('..'):
raise PathOutsideRoot(normalized)
return normalized
|
Resolve paths with '..' to normalized paths, raising an error if the final
result is outside root.
|
s3contents/hybridmanager.py
|
normalize_api_path
|
cailiang9/s3contents
| 0 |
python
|
def normalize_api_path(api_path):
"\n Resolve paths with '..' to normalized paths, raising an error if the final\n result is outside root.\n "
normalized = posixpath.normpath(api_path.strip('/'))
if (normalized == '.'):
normalized =
elif normalized.startswith('..'):
raise PathOutsideRoot(normalized)
return normalized
|
def normalize_api_path(api_path):
"\n Resolve paths with '..' to normalized paths, raising an error if the final\n result is outside root.\n "
normalized = posixpath.normpath(api_path.strip('/'))
if (normalized == '.'):
normalized =
elif normalized.startswith('..'):
raise PathOutsideRoot(normalized)
return normalized<|docstring|>Resolve paths with '..' to normalized paths, raising an error if the final
result is outside root.<|endoftext|>
|
ec83c4753446603e903a186495d68bb627f98ad996e9fa42583445a7fba54f57
|
def outside_root_to_404(fn):
'\n Decorator for converting PathOutsideRoot errors to 404s.\n '
@wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except PathOutsideRoot as e:
raise HTTPError(404, ('Path outside root: [%s]' % e.args[0]))
return wrapped
|
Decorator for converting PathOutsideRoot errors to 404s.
|
s3contents/hybridmanager.py
|
outside_root_to_404
|
cailiang9/s3contents
| 0 |
python
|
def outside_root_to_404(fn):
'\n \n '
@wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except PathOutsideRoot as e:
raise HTTPError(404, ('Path outside root: [%s]' % e.args[0]))
return wrapped
|
def outside_root_to_404(fn):
'\n \n '
@wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except PathOutsideRoot as e:
raise HTTPError(404, ('Path outside root: [%s]' % e.args[0]))
return wrapped<|docstring|>Decorator for converting PathOutsideRoot errors to 404s.<|endoftext|>
|
5268587c51343d5e85cd65b62dd201f4c6329f2639d09cc992b2d92979ed3218
|
@outside_root_to_404
def _resolve_path(path, manager_dict):
'\n Resolve a path based on a dictionary of manager prefixes.\n Returns a triple of (prefix, manager, manager_relative_path).\n '
path = normalize_api_path(path)
parts = path.split('/')
mgr = manager_dict.get(parts[0])
if (mgr is not None):
return (parts[0], mgr, '/'.join(parts[1:]))
mgr = manager_dict.get('')
if (mgr is not None):
return ('', mgr, path)
raise HTTPError(404, "Couldn't resolve path [{path}] and no root manager supplied!".format(path=path))
|
Resolve a path based on a dictionary of manager prefixes.
Returns a triple of (prefix, manager, manager_relative_path).
|
s3contents/hybridmanager.py
|
_resolve_path
|
cailiang9/s3contents
| 0 |
python
|
@outside_root_to_404
def _resolve_path(path, manager_dict):
'\n Resolve a path based on a dictionary of manager prefixes.\n Returns a triple of (prefix, manager, manager_relative_path).\n '
path = normalize_api_path(path)
parts = path.split('/')
mgr = manager_dict.get(parts[0])
if (mgr is not None):
return (parts[0], mgr, '/'.join(parts[1:]))
mgr = manager_dict.get()
if (mgr is not None):
return (, mgr, path)
raise HTTPError(404, "Couldn't resolve path [{path}] and no root manager supplied!".format(path=path))
|
@outside_root_to_404
def _resolve_path(path, manager_dict):
'\n Resolve a path based on a dictionary of manager prefixes.\n Returns a triple of (prefix, manager, manager_relative_path).\n '
path = normalize_api_path(path)
parts = path.split('/')
mgr = manager_dict.get(parts[0])
if (mgr is not None):
return (parts[0], mgr, '/'.join(parts[1:]))
mgr = manager_dict.get()
if (mgr is not None):
return (, mgr, path)
raise HTTPError(404, "Couldn't resolve path [{path}] and no root manager supplied!".format(path=path))<|docstring|>Resolve a path based on a dictionary of manager prefixes.
Returns a triple of (prefix, manager, manager_relative_path).<|endoftext|>
|
7911cbcb425c5da8457777125f9f8a91ac61098994f161e4a7adfa4297a550ed
|
def _get_arg(argname, args, kwargs):
'\n Get an argument, either from kwargs or from the first entry in args.\n Raises a TypeError if argname not in kwargs and len(args) == 0.\n Mutates kwargs in place if the value is found in kwargs.\n '
try:
return (kwargs.pop(argname), args)
except KeyError:
pass
try:
return (args[0], args[1:])
except IndexError:
raise TypeError(('No value passed for %s' % argname))
|
Get an argument, either from kwargs or from the first entry in args.
Raises a TypeError if argname not in kwargs and len(args) == 0.
Mutates kwargs in place if the value is found in kwargs.
|
s3contents/hybridmanager.py
|
_get_arg
|
cailiang9/s3contents
| 0 |
python
|
def _get_arg(argname, args, kwargs):
'\n Get an argument, either from kwargs or from the first entry in args.\n Raises a TypeError if argname not in kwargs and len(args) == 0.\n Mutates kwargs in place if the value is found in kwargs.\n '
try:
return (kwargs.pop(argname), args)
except KeyError:
pass
try:
return (args[0], args[1:])
except IndexError:
raise TypeError(('No value passed for %s' % argname))
|
def _get_arg(argname, args, kwargs):
'\n Get an argument, either from kwargs or from the first entry in args.\n Raises a TypeError if argname not in kwargs and len(args) == 0.\n Mutates kwargs in place if the value is found in kwargs.\n '
try:
return (kwargs.pop(argname), args)
except KeyError:
pass
try:
return (args[0], args[1:])
except IndexError:
raise TypeError(('No value passed for %s' % argname))<|docstring|>Get an argument, either from kwargs or from the first entry in args.
Raises a TypeError if argname not in kwargs and len(args) == 0.
Mutates kwargs in place if the value is found in kwargs.<|endoftext|>
|
e3f8dc2213d9085bf1bc4d3d7bbfcb3ac2e1839d7ab483fac4915cb13c65ee7a
|
def _apply_prefix(prefix, model):
'\n Prefix all path entries in model with the given prefix.\n '
if (not isinstance(model, dict)):
raise TypeError(('Expected dict for model, got %s' % type(model)))
model['path'] = '/'.join((prefix, model['path'])).strip('/')
if (model['type'] in ('notebook', 'file')):
return model
if (model['type'] != 'directory'):
raise ValueError(('Unknown model type %s.' % type(model)))
content = model.get('content', None)
if (content is not None):
for sub_model in content:
_apply_prefix(prefix, sub_model)
return model
|
Prefix all path entries in model with the given prefix.
|
s3contents/hybridmanager.py
|
_apply_prefix
|
cailiang9/s3contents
| 0 |
python
|
def _apply_prefix(prefix, model):
'\n \n '
if (not isinstance(model, dict)):
raise TypeError(('Expected dict for model, got %s' % type(model)))
model['path'] = '/'.join((prefix, model['path'])).strip('/')
if (model['type'] in ('notebook', 'file')):
return model
if (model['type'] != 'directory'):
raise ValueError(('Unknown model type %s.' % type(model)))
content = model.get('content', None)
if (content is not None):
for sub_model in content:
_apply_prefix(prefix, sub_model)
return model
|
def _apply_prefix(prefix, model):
'\n \n '
if (not isinstance(model, dict)):
raise TypeError(('Expected dict for model, got %s' % type(model)))
model['path'] = '/'.join((prefix, model['path'])).strip('/')
if (model['type'] in ('notebook', 'file')):
return model
if (model['type'] != 'directory'):
raise ValueError(('Unknown model type %s.' % type(model)))
content = model.get('content', None)
if (content is not None):
for sub_model in content:
_apply_prefix(prefix, sub_model)
return model<|docstring|>Prefix all path entries in model with the given prefix.<|endoftext|>
|
a06ac088acfdbcca046edadce5946d5c31e3d4008996524b9cb0922444049568
|
def path_dispatch1(mname, returns_model):
'\n Decorator for methods that accept path as a first argument.\n '
def _wrapper(self, *args, **kwargs):
(path, args) = _get_arg('path', args, kwargs)
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(mgr_path, *args, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
Decorator for methods that accept path as a first argument.
|
s3contents/hybridmanager.py
|
path_dispatch1
|
cailiang9/s3contents
| 0 |
python
|
def path_dispatch1(mname, returns_model):
'\n \n '
def _wrapper(self, *args, **kwargs):
(path, args) = _get_arg('path', args, kwargs)
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(mgr_path, *args, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
def path_dispatch1(mname, returns_model):
'\n \n '
def _wrapper(self, *args, **kwargs):
(path, args) = _get_arg('path', args, kwargs)
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(mgr_path, *args, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper<|docstring|>Decorator for methods that accept path as a first argument.<|endoftext|>
|
67a5a826db4102879e0575d10573f676d0361e3fc6bc0d88e65ea837d4f4c305
|
def path_dispatch2(mname, first_argname, returns_model):
'\n Decorator for methods that accept path as a second argument.\n '
def _wrapper(self, *args, **kwargs):
(other, args) = _get_arg(first_argname, args, kwargs)
(path, args) = _get_arg('path', args, kwargs)
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(other, mgr_path, *args, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
Decorator for methods that accept path as a second argument.
|
s3contents/hybridmanager.py
|
path_dispatch2
|
cailiang9/s3contents
| 0 |
python
|
def path_dispatch2(mname, first_argname, returns_model):
'\n \n '
def _wrapper(self, *args, **kwargs):
(other, args) = _get_arg(first_argname, args, kwargs)
(path, args) = _get_arg('path', args, kwargs)
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(other, mgr_path, *args, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
def path_dispatch2(mname, first_argname, returns_model):
'\n \n '
def _wrapper(self, *args, **kwargs):
(other, args) = _get_arg(first_argname, args, kwargs)
(path, args) = _get_arg('path', args, kwargs)
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(other, mgr_path, *args, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper<|docstring|>Decorator for methods that accept path as a second argument.<|endoftext|>
|
31fa57fca2f64bcbd35dba6f22b785404506eed171b2803f1319498c92593e53
|
def path_dispatch_kwarg(mname, path_default, returns_model):
'\n Parameterized decorator for methods that accept path as a second\n argument.\n '
def _wrapper(self, path=path_default, **kwargs):
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(path=mgr_path, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
Parameterized decorator for methods that accept path as a second
argument.
|
s3contents/hybridmanager.py
|
path_dispatch_kwarg
|
cailiang9/s3contents
| 0 |
python
|
def path_dispatch_kwarg(mname, path_default, returns_model):
'\n Parameterized decorator for methods that accept path as a second\n argument.\n '
def _wrapper(self, path=path_default, **kwargs):
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(path=mgr_path, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
def path_dispatch_kwarg(mname, path_default, returns_model):
'\n Parameterized decorator for methods that accept path as a second\n argument.\n '
def _wrapper(self, path=path_default, **kwargs):
(prefix, mgr, mgr_path) = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(path=mgr_path, **kwargs)
if (returns_model and prefix):
return _apply_prefix(prefix, result)
else:
return result
return _wrapper<|docstring|>Parameterized decorator for methods that accept path as a second
argument.<|endoftext|>
|
ab1ac0a6b2c259ec80c7c98479ce6106d875f82b8ad21145706f0988aff7489f
|
def path_dispatch_old_new(mname, returns_model):
'\n Decorator for methods accepting old_path and new_path.\n '
def _wrapper(self, old_path, new_path, *args, **kwargs):
(old_prefix, old_mgr, old_mgr_path) = _resolve_path(old_path, self.managers)
(new_prefix, new_mgr, new_mgr_path) = _resolve_path(new_path, self.managers)
if (old_mgr is not new_mgr):
raise HTTPError(400, "Can't move files between backends ({old} -> {new})".format(old=old_path, new=new_path))
assert (new_prefix == old_prefix)
result = getattr(new_mgr, mname)(old_mgr_path, new_mgr_path, *args, **kwargs)
if (returns_model and new_prefix):
return _apply_prefix(new_prefix, result)
else:
return result
return _wrapper
|
Decorator for methods accepting old_path and new_path.
|
s3contents/hybridmanager.py
|
path_dispatch_old_new
|
cailiang9/s3contents
| 0 |
python
|
def path_dispatch_old_new(mname, returns_model):
'\n \n '
def _wrapper(self, old_path, new_path, *args, **kwargs):
(old_prefix, old_mgr, old_mgr_path) = _resolve_path(old_path, self.managers)
(new_prefix, new_mgr, new_mgr_path) = _resolve_path(new_path, self.managers)
if (old_mgr is not new_mgr):
raise HTTPError(400, "Can't move files between backends ({old} -> {new})".format(old=old_path, new=new_path))
assert (new_prefix == old_prefix)
result = getattr(new_mgr, mname)(old_mgr_path, new_mgr_path, *args, **kwargs)
if (returns_model and new_prefix):
return _apply_prefix(new_prefix, result)
else:
return result
return _wrapper
|
def path_dispatch_old_new(mname, returns_model):
'\n \n '
def _wrapper(self, old_path, new_path, *args, **kwargs):
(old_prefix, old_mgr, old_mgr_path) = _resolve_path(old_path, self.managers)
(new_prefix, new_mgr, new_mgr_path) = _resolve_path(new_path, self.managers)
if (old_mgr is not new_mgr):
raise HTTPError(400, "Can't move files between backends ({old} -> {new})".format(old=old_path, new=new_path))
assert (new_prefix == old_prefix)
result = getattr(new_mgr, mname)(old_mgr_path, new_mgr_path, *args, **kwargs)
if (returns_model and new_prefix):
return _apply_prefix(new_prefix, result)
else:
return result
return _wrapper<|docstring|>Decorator for methods accepting old_path and new_path.<|endoftext|>
|
d24287d8b4946b47dd04810c393fd23fe191fdf2bb23563eaa7489bf7744d2b8
|
def _managers_changed(self, name, old, new):
'\n Strip slashes from directories before updating.\n '
for key in new:
if ('/' in key):
raise ValueError(('Expected directory names w/o slashes. Got [%s]' % key))
self.managers = {k.strip('/'): v for (k, v) in new.items()}
|
Strip slashes from directories before updating.
|
s3contents/hybridmanager.py
|
_managers_changed
|
cailiang9/s3contents
| 0 |
python
|
def _managers_changed(self, name, old, new):
'\n \n '
for key in new:
if ('/' in key):
raise ValueError(('Expected directory names w/o slashes. Got [%s]' % key))
self.managers = {k.strip('/'): v for (k, v) in new.items()}
|
def _managers_changed(self, name, old, new):
'\n \n '
for key in new:
if ('/' in key):
raise ValueError(('Expected directory names w/o slashes. Got [%s]' % key))
self.managers = {k.strip('/'): v for (k, v) in new.items()}<|docstring|>Strip slashes from directories before updating.<|endoftext|>
|
13d11972b1321d5d809574a292956767d0b242b4e74d3fdd5f438fab4b5fe5a3
|
@outside_root_to_404
def get(self, path, content=True, type=None, format=None):
'\n Special case handling for listing root dir.\n '
path = normalize_api_path(path)
if path:
return self.__get(path, content=content, type=type, format=format)
if (not content):
return base_directory_model('')
extra_content = self._extra_root_dirs()
rm = self.root_manager
if (rm is None):
root_model = base_directory_model('')
root_model.update(format='json', content=extra_content)
else:
root_model = rm.get(path, content=content, type=type, format=format)
root_model['content'].extend(extra_content)
return root_model
|
Special case handling for listing root dir.
|
s3contents/hybridmanager.py
|
get
|
cailiang9/s3contents
| 0 |
python
|
@outside_root_to_404
def get(self, path, content=True, type=None, format=None):
'\n \n '
path = normalize_api_path(path)
if path:
return self.__get(path, content=content, type=type, format=format)
if (not content):
return base_directory_model()
extra_content = self._extra_root_dirs()
rm = self.root_manager
if (rm is None):
root_model = base_directory_model()
root_model.update(format='json', content=extra_content)
else:
root_model = rm.get(path, content=content, type=type, format=format)
root_model['content'].extend(extra_content)
return root_model
|
@outside_root_to_404
def get(self, path, content=True, type=None, format=None):
'\n \n '
path = normalize_api_path(path)
if path:
return self.__get(path, content=content, type=type, format=format)
if (not content):
return base_directory_model()
extra_content = self._extra_root_dirs()
rm = self.root_manager
if (rm is None):
root_model = base_directory_model()
root_model.update(format='json', content=extra_content)
else:
root_model = rm.get(path, content=content, type=type, format=format)
root_model['content'].extend(extra_content)
return root_model<|docstring|>Special case handling for listing root dir.<|endoftext|>
|
798e32b976b536be5e7b08207fb4c090ddb9b3dc29885ece4b15f8b349fd2c53
|
@outside_root_to_404
def delete(self, path):
"\n Ensure that roots of our managers can't be deleted. This should be\n enforced by https://github.com/ipython/ipython/pull/8168, but rogue\n implementations might override this behavior.\n "
path = normalize_api_path(path)
if (path in self.managers):
raise HTTPError(400, ("Can't delete root of %s" % self.managers[path]))
return self.__delete(path)
|
Ensure that roots of our managers can't be deleted. This should be
enforced by https://github.com/ipython/ipython/pull/8168, but rogue
implementations might override this behavior.
|
s3contents/hybridmanager.py
|
delete
|
cailiang9/s3contents
| 0 |
python
|
@outside_root_to_404
def delete(self, path):
"\n Ensure that roots of our managers can't be deleted. This should be\n enforced by https://github.com/ipython/ipython/pull/8168, but rogue\n implementations might override this behavior.\n "
path = normalize_api_path(path)
if (path in self.managers):
raise HTTPError(400, ("Can't delete root of %s" % self.managers[path]))
return self.__delete(path)
|
@outside_root_to_404
def delete(self, path):
"\n Ensure that roots of our managers can't be deleted. This should be\n enforced by https://github.com/ipython/ipython/pull/8168, but rogue\n implementations might override this behavior.\n "
path = normalize_api_path(path)
if (path in self.managers):
raise HTTPError(400, ("Can't delete root of %s" % self.managers[path]))
return self.__delete(path)<|docstring|>Ensure that roots of our managers can't be deleted. This should be
enforced by https://github.com/ipython/ipython/pull/8168, but rogue
implementations might override this behavior.<|endoftext|>
|
5921f4323854e4eb9f8569397861898bdaceba6a589b17f28f099bd7c3795015
|
def get_kernel_path(self, path, model=None):
'Return the initial API path of a kernel associated with a given notebook'
if self.dir_exists(path):
return path
if ('/' in path):
parent_dir = path.rsplit('/', 1)[0]
else:
parent_dir = ''
return parent_dir
|
Return the initial API path of a kernel associated with a given notebook
|
s3contents/hybridmanager.py
|
get_kernel_path
|
cailiang9/s3contents
| 0 |
python
|
def get_kernel_path(self, path, model=None):
if self.dir_exists(path):
return path
if ('/' in path):
parent_dir = path.rsplit('/', 1)[0]
else:
parent_dir =
return parent_dir
|
def get_kernel_path(self, path, model=None):
if self.dir_exists(path):
return path
if ('/' in path):
parent_dir = path.rsplit('/', 1)[0]
else:
parent_dir =
return parent_dir<|docstring|>Return the initial API path of a kernel associated with a given notebook<|endoftext|>
|
a75d059e645995cb91d0da72207b23e8ab62caef41e54cf50e77524099775591
|
def __getitem__(self, item):
'__getitem__: slow, copy full array. returns numpy.ndarray'
d = object.__getattribute__(self, d)
if d.has_key(item):
return self.d[item]
else:
return object.__getattribute__(self, item)
|
__getitem__: slow, copy full array. returns numpy.ndarray
|
RTRBM/rtrbm/std/named_vec.py
|
__getitem__
|
liuzc188/Convolutional-LSTM-in-Tensorflow-master
| 1 |
python
|
def __getitem__(self, item):
d = object.__getattribute__(self, d)
if d.has_key(item):
return self.d[item]
else:
return object.__getattribute__(self, item)
|
def __getitem__(self, item):
d = object.__getattribute__(self, d)
if d.has_key(item):
return self.d[item]
else:
return object.__getattribute__(self, item)<|docstring|>__getitem__: slow, copy full array. returns numpy.ndarray<|endoftext|>
|
7b4ed2071472418689d2ae5612b95daa981a08c5130be32480f7f9893fd7f017
|
def __setitem__(self, item, val):
'__getitem__: slow, copy full array. returns numpy.ndarray'
self.d[item] = val
|
__getitem__: slow, copy full array. returns numpy.ndarray
|
RTRBM/rtrbm/std/named_vec.py
|
__setitem__
|
liuzc188/Convolutional-LSTM-in-Tensorflow-master
| 1 |
python
|
def __setitem__(self, item, val):
self.d[item] = val
|
def __setitem__(self, item, val):
self.d[item] = val<|docstring|>__getitem__: slow, copy full array. returns numpy.ndarray<|endoftext|>
|
b7fb90afb85a473619d712ae471f2891852cd774ead0153668bb9dde44bc8dd7
|
def T(self):
'same as transpose'
a = self.soft_copy()
a.TR = (not a.TR)
return a
|
same as transpose
|
RTRBM/rtrbm/std/named_vec.py
|
T
|
liuzc188/Convolutional-LSTM-in-Tensorflow-master
| 1 |
python
|
def T(self):
a = self.soft_copy()
a.TR = (not a.TR)
return a
|
def T(self):
a = self.soft_copy()
a.TR = (not a.TR)
return a<|docstring|>same as transpose<|endoftext|>
|
3b714650f54d621ba1fa7ab17bdc0e7d7c1a10f5abf02c5e1af83398ce3d029e
|
def _as_type_list(dtypes):
'Convert dtypes to a list of types.'
assert (dtypes is not None)
if (not (isinstance(dtypes, list) or isinstance(dtypes, tuple))):
return [dtypes]
else:
return list(dtypes)
|
Convert dtypes to a list of types.
|
tensorflow/python/ops/data_flow_ops.py
|
_as_type_list
|
habangar/tensorflow
| 73 |
python
|
def _as_type_list(dtypes):
assert (dtypes is not None)
if (not (isinstance(dtypes, list) or isinstance(dtypes, tuple))):
return [dtypes]
else:
return list(dtypes)
|
def _as_type_list(dtypes):
assert (dtypes is not None)
if (not (isinstance(dtypes, list) or isinstance(dtypes, tuple))):
return [dtypes]
else:
return list(dtypes)<|docstring|>Convert dtypes to a list of types.<|endoftext|>
|
13f0385e21077b4f87be31b0c6c9c2bbeaa3d398542cac8940e6ab3944cff333
|
def _as_shape_list(shapes, dtypes, unknown_dim_allowed=False, unknown_rank_allowed=False):
'Convert shapes to a list of tuples of int (or None).'
if unknown_dim_allowed:
if ((not isinstance(shapes, collections.Sequence)) or (not shapes) or any((((shape is None) or isinstance(shape, int)) for shape in shapes))):
raise ValueError('When providing partial shapes, a list of shapes must be provided.')
if (shapes is None):
return None
if isinstance(shapes, tensor_shape.TensorShape):
shapes = [shapes]
if (not isinstance(shapes, (tuple, list))):
raise TypeError('shapes must be a TensorShape or a list or tuple of TensorShapes.')
if all((((shape is None) or isinstance(shape, int)) for shape in shapes)):
shapes = [shapes]
shapes = [tensor_shape.as_shape(shape) for shape in shapes]
if (not unknown_dim_allowed):
if any([(not shape.is_fully_defined()) for shape in shapes]):
raise ValueError(('All shapes must be fully defined: %s' % shapes))
if (not unknown_rank_allowed):
if any([(shape.dims is None) for shape in shapes]):
raise ValueError(('All shapes must have a defined rank: %s' % shapes))
return shapes
|
Convert shapes to a list of tuples of int (or None).
|
tensorflow/python/ops/data_flow_ops.py
|
_as_shape_list
|
habangar/tensorflow
| 73 |
python
|
def _as_shape_list(shapes, dtypes, unknown_dim_allowed=False, unknown_rank_allowed=False):
if unknown_dim_allowed:
if ((not isinstance(shapes, collections.Sequence)) or (not shapes) or any((((shape is None) or isinstance(shape, int)) for shape in shapes))):
raise ValueError('When providing partial shapes, a list of shapes must be provided.')
if (shapes is None):
return None
if isinstance(shapes, tensor_shape.TensorShape):
shapes = [shapes]
if (not isinstance(shapes, (tuple, list))):
raise TypeError('shapes must be a TensorShape or a list or tuple of TensorShapes.')
if all((((shape is None) or isinstance(shape, int)) for shape in shapes)):
shapes = [shapes]
shapes = [tensor_shape.as_shape(shape) for shape in shapes]
if (not unknown_dim_allowed):
if any([(not shape.is_fully_defined()) for shape in shapes]):
raise ValueError(('All shapes must be fully defined: %s' % shapes))
if (not unknown_rank_allowed):
if any([(shape.dims is None) for shape in shapes]):
raise ValueError(('All shapes must have a defined rank: %s' % shapes))
return shapes
|
def _as_shape_list(shapes, dtypes, unknown_dim_allowed=False, unknown_rank_allowed=False):
if unknown_dim_allowed:
if ((not isinstance(shapes, collections.Sequence)) or (not shapes) or any((((shape is None) or isinstance(shape, int)) for shape in shapes))):
raise ValueError('When providing partial shapes, a list of shapes must be provided.')
if (shapes is None):
return None
if isinstance(shapes, tensor_shape.TensorShape):
shapes = [shapes]
if (not isinstance(shapes, (tuple, list))):
raise TypeError('shapes must be a TensorShape or a list or tuple of TensorShapes.')
if all((((shape is None) or isinstance(shape, int)) for shape in shapes)):
shapes = [shapes]
shapes = [tensor_shape.as_shape(shape) for shape in shapes]
if (not unknown_dim_allowed):
if any([(not shape.is_fully_defined()) for shape in shapes]):
raise ValueError(('All shapes must be fully defined: %s' % shapes))
if (not unknown_rank_allowed):
if any([(shape.dims is None) for shape in shapes]):
raise ValueError(('All shapes must have a defined rank: %s' % shapes))
return shapes<|docstring|>Convert shapes to a list of tuples of int (or None).<|endoftext|>
|
f667776f50063cdf8ced997ebd44ebb0b8cc161a47237395c701459c8b847e86
|
def initialize_all_tables(name='init_all_tables'):
'Returns an Op that initializes all tables of the default graph.\n\n Args:\n name: Optional name for the initialization op.\n\n Returns:\n An Op that initializes all tables. Note that if there are\n not tables the returned Op is a NoOp.\n '
initializers = ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS)
if initializers:
return control_flow_ops.group(*initializers, name=name)
return control_flow_ops.no_op(name=name)
|
Returns an Op that initializes all tables of the default graph.
Args:
name: Optional name for the initialization op.
Returns:
An Op that initializes all tables. Note that if there are
not tables the returned Op is a NoOp.
|
tensorflow/python/ops/data_flow_ops.py
|
initialize_all_tables
|
habangar/tensorflow
| 73 |
python
|
def initialize_all_tables(name='init_all_tables'):
'Returns an Op that initializes all tables of the default graph.\n\n Args:\n name: Optional name for the initialization op.\n\n Returns:\n An Op that initializes all tables. Note that if there are\n not tables the returned Op is a NoOp.\n '
initializers = ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS)
if initializers:
return control_flow_ops.group(*initializers, name=name)
return control_flow_ops.no_op(name=name)
|
def initialize_all_tables(name='init_all_tables'):
'Returns an Op that initializes all tables of the default graph.\n\n Args:\n name: Optional name for the initialization op.\n\n Returns:\n An Op that initializes all tables. Note that if there are\n not tables the returned Op is a NoOp.\n '
initializers = ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS)
if initializers:
return control_flow_ops.group(*initializers, name=name)
return control_flow_ops.no_op(name=name)<|docstring|>Returns an Op that initializes all tables of the default graph.
Args:
name: Optional name for the initialization op.
Returns:
An Op that initializes all tables. Note that if there are
not tables the returned Op is a NoOp.<|endoftext|>
|
2b3894d7469a3e6e98e88cd5ad3dd3ed6f9e8718a2232bc11be6105277c38d48
|
def _ScalarToVoidShape(op):
'Shape function for ops that take a scalar and produce no outputs.'
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return []
|
Shape function for ops that take a scalar and produce no outputs.
|
tensorflow/python/ops/data_flow_ops.py
|
_ScalarToVoidShape
|
habangar/tensorflow
| 73 |
python
|
def _ScalarToVoidShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return []
|
def _ScalarToVoidShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return []<|docstring|>Shape function for ops that take a scalar and produce no outputs.<|endoftext|>
|
eb7b32090c9fcd33504128d5091f6eaa2affa4f2914a77bfa0a7f55153408f51
|
@ops.RegisterShape('DynamicPartition')
def _DynamicPartitionShape(op):
'Shape function for data_flow_ops.dynamic_partition.'
data_shape = op.inputs[0].get_shape()
partitions_shape = op.inputs[1].get_shape()
mid = partitions_shape.ndims
if (mid is None):
result_shape = tensor_shape.unknown_shape()
else:
partitions_shape.assert_is_compatible_with(data_shape[:mid])
result_shape = tensor_shape.TensorShape([None]).concatenate(data_shape[mid:])
return ([result_shape] * op.get_attr('num_partitions'))
|
Shape function for data_flow_ops.dynamic_partition.
|
tensorflow/python/ops/data_flow_ops.py
|
_DynamicPartitionShape
|
habangar/tensorflow
| 73 |
python
|
@ops.RegisterShape('DynamicPartition')
def _DynamicPartitionShape(op):
data_shape = op.inputs[0].get_shape()
partitions_shape = op.inputs[1].get_shape()
mid = partitions_shape.ndims
if (mid is None):
result_shape = tensor_shape.unknown_shape()
else:
partitions_shape.assert_is_compatible_with(data_shape[:mid])
result_shape = tensor_shape.TensorShape([None]).concatenate(data_shape[mid:])
return ([result_shape] * op.get_attr('num_partitions'))
|
@ops.RegisterShape('DynamicPartition')
def _DynamicPartitionShape(op):
data_shape = op.inputs[0].get_shape()
partitions_shape = op.inputs[1].get_shape()
mid = partitions_shape.ndims
if (mid is None):
result_shape = tensor_shape.unknown_shape()
else:
partitions_shape.assert_is_compatible_with(data_shape[:mid])
result_shape = tensor_shape.TensorShape([None]).concatenate(data_shape[mid:])
return ([result_shape] * op.get_attr('num_partitions'))<|docstring|>Shape function for data_flow_ops.dynamic_partition.<|endoftext|>
|
1ce8efc838d8456e7c43b68ff4563ed4f020ddc8b28ca07b238efd06d9cd8da6
|
@ops.RegisterShape('DynamicStitch')
def _DynamicStitchShape(op):
'Shape function for data_flow_ops.dynamic_stitch.'
num_partitions = op.get_attr('N')
indices_shapes = [t.get_shape() for t in op.inputs[0:num_partitions]]
data_shapes = [t.get_shape() for t in op.inputs[num_partitions:]]
output_shape = tensor_shape.unknown_shape()
extra_shape = tensor_shape.TensorShape(None)
for (indices_shape, data_shape) in zip(indices_shapes, data_shapes):
indices_ndims = indices_shape.ndims
if (indices_ndims is not None):
indices_shape.merge_with(data_shape[:indices_ndims])
extra_shape = extra_shape.merge_with(data_shape[indices_ndims:])
return [tensor_shape.TensorShape([None]).concatenate(extra_shape)]
|
Shape function for data_flow_ops.dynamic_stitch.
|
tensorflow/python/ops/data_flow_ops.py
|
_DynamicStitchShape
|
habangar/tensorflow
| 73 |
python
|
@ops.RegisterShape('DynamicStitch')
def _DynamicStitchShape(op):
num_partitions = op.get_attr('N')
indices_shapes = [t.get_shape() for t in op.inputs[0:num_partitions]]
data_shapes = [t.get_shape() for t in op.inputs[num_partitions:]]
output_shape = tensor_shape.unknown_shape()
extra_shape = tensor_shape.TensorShape(None)
for (indices_shape, data_shape) in zip(indices_shapes, data_shapes):
indices_ndims = indices_shape.ndims
if (indices_ndims is not None):
indices_shape.merge_with(data_shape[:indices_ndims])
extra_shape = extra_shape.merge_with(data_shape[indices_ndims:])
return [tensor_shape.TensorShape([None]).concatenate(extra_shape)]
|
@ops.RegisterShape('DynamicStitch')
def _DynamicStitchShape(op):
num_partitions = op.get_attr('N')
indices_shapes = [t.get_shape() for t in op.inputs[0:num_partitions]]
data_shapes = [t.get_shape() for t in op.inputs[num_partitions:]]
output_shape = tensor_shape.unknown_shape()
extra_shape = tensor_shape.TensorShape(None)
for (indices_shape, data_shape) in zip(indices_shapes, data_shapes):
indices_ndims = indices_shape.ndims
if (indices_ndims is not None):
indices_shape.merge_with(data_shape[:indices_ndims])
extra_shape = extra_shape.merge_with(data_shape[indices_ndims:])
return [tensor_shape.TensorShape([None]).concatenate(extra_shape)]<|docstring|>Shape function for data_flow_ops.dynamic_stitch.<|endoftext|>
|
3153b1a790b7206b1f28658e8d357a2012328bf2e6cacfefbd903f2b2533b154
|
@ops.RegisterShape('LookupTableFind')
def _LookupTableFindShape(op):
'Shape function for data_flow_ops._lookup_table_find.'
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
shape_in = op.inputs[1].get_shape()
return [shape_in]
|
Shape function for data_flow_ops._lookup_table_find.
|
tensorflow/python/ops/data_flow_ops.py
|
_LookupTableFindShape
|
habangar/tensorflow
| 73 |
python
|
@ops.RegisterShape('LookupTableFind')
def _LookupTableFindShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
shape_in = op.inputs[1].get_shape()
return [shape_in]
|
@ops.RegisterShape('LookupTableFind')
def _LookupTableFindShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
shape_in = op.inputs[1].get_shape()
return [shape_in]<|docstring|>Shape function for data_flow_ops._lookup_table_find.<|endoftext|>
|
9901d876717cb17b9b0a06722ad46ec45882cf997f94582d21fa4f1289e71136
|
@ops.RegisterShape('LookupTableSize')
def _LookupTableSizeShape(op):
'Shape function for data_flow_ops._lookup_table_find.'
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return [tensor_shape.scalar()]
|
Shape function for data_flow_ops._lookup_table_find.
|
tensorflow/python/ops/data_flow_ops.py
|
_LookupTableSizeShape
|
habangar/tensorflow
| 73 |
python
|
@ops.RegisterShape('LookupTableSize')
def _LookupTableSizeShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return [tensor_shape.scalar()]
|
@ops.RegisterShape('LookupTableSize')
def _LookupTableSizeShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return [tensor_shape.scalar()]<|docstring|>Shape function for data_flow_ops._lookup_table_find.<|endoftext|>
|
ff7900aa70ee007bc6e32fa32a3cd292623ed6aa81c0d51bf8c2fc8f0669123a
|
@ops.RegisterShape('HashTable')
def _HashTableShape(_):
'Shape function for data_flow_ops._hash_table.'
return [tensor_shape.scalar()]
|
Shape function for data_flow_ops._hash_table.
|
tensorflow/python/ops/data_flow_ops.py
|
_HashTableShape
|
habangar/tensorflow
| 73 |
python
|
@ops.RegisterShape('HashTable')
def _HashTableShape(_):
return [tensor_shape.scalar()]
|
@ops.RegisterShape('HashTable')
def _HashTableShape(_):
return [tensor_shape.scalar()]<|docstring|>Shape function for data_flow_ops._hash_table.<|endoftext|>
|
c0e8bfb17029fa1a162f26aade33535897dc5fd29577dd655a10e6754da50a11
|
@ops.RegisterShape('InitializeTable')
def _InitializeLookupTableShape(op):
'Shape function for data_flow_ops._initialize_table.'
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
keys_shape = op.inputs[1].get_shape().with_rank(1)
op.inputs[2].get_shape().merge_with(keys_shape)
return []
|
Shape function for data_flow_ops._initialize_table.
|
tensorflow/python/ops/data_flow_ops.py
|
_InitializeLookupTableShape
|
habangar/tensorflow
| 73 |
python
|
@ops.RegisterShape('InitializeTable')
def _InitializeLookupTableShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
keys_shape = op.inputs[1].get_shape().with_rank(1)
op.inputs[2].get_shape().merge_with(keys_shape)
return []
|
@ops.RegisterShape('InitializeTable')
def _InitializeLookupTableShape(op):
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
keys_shape = op.inputs[1].get_shape().with_rank(1)
op.inputs[2].get_shape().merge_with(keys_shape)
return []<|docstring|>Shape function for data_flow_ops._initialize_table.<|endoftext|>
|
95d33ba08c831e4f982d0d86c1e86b9836344024ad1cb1fb5b9c0fd5cc0aefb2
|
def __init__(self, dtypes, shapes, names, queue_ref):
'Constructs a queue object from a queue reference.\n\n The two optional lists, `shapes` and `names`, must be of the same length\n as `dtypes` if provided. The values at a given index `i` indicate the\n shape and name to use for the corresponding queue component in `dtypes`.\n\n Args:\n dtypes: A list of types. The length of dtypes must equal the number\n of tensors in each element.\n shapes: Constraints on the shapes of tensors in an element:\n A list of shape tuples or None. This list is the same length\n as dtypes. If the shape of any tensors in the element are constrained,\n all must be; shapes can be None if the shapes should not be constrained.\n names: Optional list of names. If provided, the `enqueue()` and\n `dequeue()` methods will use dictionaries with these names as keys.\n Must be None or a list or tuple of the same length as `dtypes`.\n queue_ref: The queue reference, i.e. the output of the queue op.\n\n Raises:\n ValueError: If one of the arguments is invalid.\n '
self._dtypes = dtypes
if (shapes is not None):
if (len(shapes) != len(dtypes)):
raise ValueError('Queue shapes must have the same length as dtypes')
self._shapes = [tensor_shape.TensorShape(s) for s in shapes]
else:
self._shapes = [tensor_shape.unknown_shape() for _ in self._dtypes]
if (names is not None):
if (len(names) != len(dtypes)):
raise ValueError('Queue names must have the same length as dtypes')
self._names = names
else:
self._names = None
self._queue_ref = queue_ref
self._name = self._queue_ref.op.name.split('/')[(- 1)]
|
Constructs a queue object from a queue reference.
The two optional lists, `shapes` and `names`, must be of the same length
as `dtypes` if provided. The values at a given index `i` indicate the
shape and name to use for the corresponding queue component in `dtypes`.
Args:
dtypes: A list of types. The length of dtypes must equal the number
of tensors in each element.
shapes: Constraints on the shapes of tensors in an element:
A list of shape tuples or None. This list is the same length
as dtypes. If the shape of any tensors in the element are constrained,
all must be; shapes can be None if the shapes should not be constrained.
names: Optional list of names. If provided, the `enqueue()` and
`dequeue()` methods will use dictionaries with these names as keys.
Must be None or a list or tuple of the same length as `dtypes`.
queue_ref: The queue reference, i.e. the output of the queue op.
Raises:
ValueError: If one of the arguments is invalid.
|
tensorflow/python/ops/data_flow_ops.py
|
__init__
|
habangar/tensorflow
| 73 |
python
|
def __init__(self, dtypes, shapes, names, queue_ref):
'Constructs a queue object from a queue reference.\n\n The two optional lists, `shapes` and `names`, must be of the same length\n as `dtypes` if provided. The values at a given index `i` indicate the\n shape and name to use for the corresponding queue component in `dtypes`.\n\n Args:\n dtypes: A list of types. The length of dtypes must equal the number\n of tensors in each element.\n shapes: Constraints on the shapes of tensors in an element:\n A list of shape tuples or None. This list is the same length\n as dtypes. If the shape of any tensors in the element are constrained,\n all must be; shapes can be None if the shapes should not be constrained.\n names: Optional list of names. If provided, the `enqueue()` and\n `dequeue()` methods will use dictionaries with these names as keys.\n Must be None or a list or tuple of the same length as `dtypes`.\n queue_ref: The queue reference, i.e. the output of the queue op.\n\n Raises:\n ValueError: If one of the arguments is invalid.\n '
self._dtypes = dtypes
if (shapes is not None):
if (len(shapes) != len(dtypes)):
raise ValueError('Queue shapes must have the same length as dtypes')
self._shapes = [tensor_shape.TensorShape(s) for s in shapes]
else:
self._shapes = [tensor_shape.unknown_shape() for _ in self._dtypes]
if (names is not None):
if (len(names) != len(dtypes)):
raise ValueError('Queue names must have the same length as dtypes')
self._names = names
else:
self._names = None
self._queue_ref = queue_ref
self._name = self._queue_ref.op.name.split('/')[(- 1)]
|
def __init__(self, dtypes, shapes, names, queue_ref):
'Constructs a queue object from a queue reference.\n\n The two optional lists, `shapes` and `names`, must be of the same length\n as `dtypes` if provided. The values at a given index `i` indicate the\n shape and name to use for the corresponding queue component in `dtypes`.\n\n Args:\n dtypes: A list of types. The length of dtypes must equal the number\n of tensors in each element.\n shapes: Constraints on the shapes of tensors in an element:\n A list of shape tuples or None. This list is the same length\n as dtypes. If the shape of any tensors in the element are constrained,\n all must be; shapes can be None if the shapes should not be constrained.\n names: Optional list of names. If provided, the `enqueue()` and\n `dequeue()` methods will use dictionaries with these names as keys.\n Must be None or a list or tuple of the same length as `dtypes`.\n queue_ref: The queue reference, i.e. the output of the queue op.\n\n Raises:\n ValueError: If one of the arguments is invalid.\n '
self._dtypes = dtypes
if (shapes is not None):
if (len(shapes) != len(dtypes)):
raise ValueError('Queue shapes must have the same length as dtypes')
self._shapes = [tensor_shape.TensorShape(s) for s in shapes]
else:
self._shapes = [tensor_shape.unknown_shape() for _ in self._dtypes]
if (names is not None):
if (len(names) != len(dtypes)):
raise ValueError('Queue names must have the same length as dtypes')
self._names = names
else:
self._names = None
self._queue_ref = queue_ref
self._name = self._queue_ref.op.name.split('/')[(- 1)]<|docstring|>Constructs a queue object from a queue reference.
The two optional lists, `shapes` and `names`, must be of the same length
as `dtypes` if provided. The values at a given index `i` indicate the
shape and name to use for the corresponding queue component in `dtypes`.
Args:
dtypes: A list of types. The length of dtypes must equal the number
of tensors in each element.
shapes: Constraints on the shapes of tensors in an element:
A list of shape tuples or None. This list is the same length
as dtypes. If the shape of any tensors in the element are constrained,
all must be; shapes can be None if the shapes should not be constrained.
names: Optional list of names. If provided, the `enqueue()` and
`dequeue()` methods will use dictionaries with these names as keys.
Must be None or a list or tuple of the same length as `dtypes`.
queue_ref: The queue reference, i.e. the output of the queue op.
Raises:
ValueError: If one of the arguments is invalid.<|endoftext|>
|
99f7f641fc6e1a01c876cbea5c8a1f1c5bbd935b1dd51571884dc9b421674cac
|
@staticmethod
def from_list(index, queues):
'Create a queue using the queue reference from `queues[index]`.\n\n Args:\n index: An integer scalar tensor that determines the input that gets\n selected.\n queues: A list of `QueueBase` objects.\n\n Returns:\n A `QueueBase` object.\n\n Raises:\n TypeError: When `queues` is not a list of `QueueBase` objects,\n or when the data types of `queues` are not all the same.\n '
if ((not queues) or (not isinstance(queues, list)) or (not all((isinstance(x, QueueBase) for x in queues)))):
raise TypeError('A list of queues expected')
dtypes = queues[0].dtypes
if (not all([(dtypes == q.dtypes) for q in queues[1:]])):
raise TypeError('Queues do not have matching component dtypes.')
names = queues[0].names
if (not all([(names == q.names) for q in queues[1:]])):
raise TypeError('Queues do not have matching component names.')
queue_refs = [x.queue_ref for x in queues]
selected_queue = control_flow_ops.ref_select(index, queue_refs)
return QueueBase(dtypes=dtypes, shapes=None, names=names, queue_ref=selected_queue)
|
Create a queue using the queue reference from `queues[index]`.
Args:
index: An integer scalar tensor that determines the input that gets
selected.
queues: A list of `QueueBase` objects.
Returns:
A `QueueBase` object.
Raises:
TypeError: When `queues` is not a list of `QueueBase` objects,
or when the data types of `queues` are not all the same.
|
tensorflow/python/ops/data_flow_ops.py
|
from_list
|
habangar/tensorflow
| 73 |
python
|
@staticmethod
def from_list(index, queues):
'Create a queue using the queue reference from `queues[index]`.\n\n Args:\n index: An integer scalar tensor that determines the input that gets\n selected.\n queues: A list of `QueueBase` objects.\n\n Returns:\n A `QueueBase` object.\n\n Raises:\n TypeError: When `queues` is not a list of `QueueBase` objects,\n or when the data types of `queues` are not all the same.\n '
if ((not queues) or (not isinstance(queues, list)) or (not all((isinstance(x, QueueBase) for x in queues)))):
raise TypeError('A list of queues expected')
dtypes = queues[0].dtypes
if (not all([(dtypes == q.dtypes) for q in queues[1:]])):
raise TypeError('Queues do not have matching component dtypes.')
names = queues[0].names
if (not all([(names == q.names) for q in queues[1:]])):
raise TypeError('Queues do not have matching component names.')
queue_refs = [x.queue_ref for x in queues]
selected_queue = control_flow_ops.ref_select(index, queue_refs)
return QueueBase(dtypes=dtypes, shapes=None, names=names, queue_ref=selected_queue)
|
@staticmethod
def from_list(index, queues):
'Create a queue using the queue reference from `queues[index]`.\n\n Args:\n index: An integer scalar tensor that determines the input that gets\n selected.\n queues: A list of `QueueBase` objects.\n\n Returns:\n A `QueueBase` object.\n\n Raises:\n TypeError: When `queues` is not a list of `QueueBase` objects,\n or when the data types of `queues` are not all the same.\n '
if ((not queues) or (not isinstance(queues, list)) or (not all((isinstance(x, QueueBase) for x in queues)))):
raise TypeError('A list of queues expected')
dtypes = queues[0].dtypes
if (not all([(dtypes == q.dtypes) for q in queues[1:]])):
raise TypeError('Queues do not have matching component dtypes.')
names = queues[0].names
if (not all([(names == q.names) for q in queues[1:]])):
raise TypeError('Queues do not have matching component names.')
queue_refs = [x.queue_ref for x in queues]
selected_queue = control_flow_ops.ref_select(index, queue_refs)
return QueueBase(dtypes=dtypes, shapes=None, names=names, queue_ref=selected_queue)<|docstring|>Create a queue using the queue reference from `queues[index]`.
Args:
index: An integer scalar tensor that determines the input that gets
selected.
queues: A list of `QueueBase` objects.
Returns:
A `QueueBase` object.
Raises:
TypeError: When `queues` is not a list of `QueueBase` objects,
or when the data types of `queues` are not all the same.<|endoftext|>
|
1a7c5c9f83af8f9f158c21589019ecb03e155f256ed5d37721f4708c7b62d396
|
@property
def queue_ref(self):
'The underlying queue reference.'
return self._queue_ref
|
The underlying queue reference.
|
tensorflow/python/ops/data_flow_ops.py
|
queue_ref
|
habangar/tensorflow
| 73 |
python
|
@property
def queue_ref(self):
return self._queue_ref
|
@property
def queue_ref(self):
return self._queue_ref<|docstring|>The underlying queue reference.<|endoftext|>
|
463209841548d5c8cc3601e05c3c7b59ac0b4eba44b639b53fc672b5b2b50dd5
|
@property
def name(self):
'The name of the underlying queue.'
return self._queue_ref.op.name
|
The name of the underlying queue.
|
tensorflow/python/ops/data_flow_ops.py
|
name
|
habangar/tensorflow
| 73 |
python
|
@property
def name(self):
return self._queue_ref.op.name
|
@property
def name(self):
return self._queue_ref.op.name<|docstring|>The name of the underlying queue.<|endoftext|>
|
ff3fe1ef0cec34857a6a9e5094ab0e27ebca9c85300584b5f62cac0b5199ee43
|
@property
def dtypes(self):
'The list of dtypes for each component of a queue element.'
return self._dtypes
|
The list of dtypes for each component of a queue element.
|
tensorflow/python/ops/data_flow_ops.py
|
dtypes
|
habangar/tensorflow
| 73 |
python
|
@property
def dtypes(self):
return self._dtypes
|
@property
def dtypes(self):
return self._dtypes<|docstring|>The list of dtypes for each component of a queue element.<|endoftext|>
|
caf0e0211df488371e0035f3cfdff7281ba7f316b62bc06b6d1542c286864fdb
|
@property
def names(self):
'The list of names for each component of a queue element.'
return self._names
|
The list of names for each component of a queue element.
|
tensorflow/python/ops/data_flow_ops.py
|
names
|
habangar/tensorflow
| 73 |
python
|
@property
def names(self):
return self._names
|
@property
def names(self):
return self._names<|docstring|>The list of names for each component of a queue element.<|endoftext|>
|
943003c12f6bb4fea92d9037bd957e559d9a01d4351d447d8e45d76a75616b38
|
def _check_enqueue_dtypes(self, vals):
'Validate and convert `vals` to a list of `Tensor`s.\n\n The `vals` argument can be a Tensor, a list or tuple of tensors, or a\n dictionary with tensor values.\n\n If it is a dictionary, the queue must have been constructed with a\n `names` attribute and the dictionary keys must math the queue names.\n If the queue was constructed with a `names` attribute, `vals` must\n be a dictionary.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary..\n\n Returns:\n A list of `Tensor` objects.\n\n Raises:\n ValueError: If `vals` is invalid.\n '
if isinstance(vals, dict):
if (not self._names):
raise ValueError('Queue must have names to enqueue a dictionary')
if (sorted(self._names) != sorted(vals.keys())):
raise ValueError(('Keys in dictionary to enqueue do not match names of Queue. Dictionary: (%s), Queue: (%s)' % (sorted(vals.keys()), sorted(self._names))))
vals = [vals[k] for k in self._names]
else:
if self._names:
raise ValueError('You must enqueue a dictionary in a Queue with names')
if (not isinstance(vals, (list, tuple))):
vals = [vals]
tensors = []
for (i, (val, dtype)) in enumerate(zip(vals, self._dtypes)):
tensors.append(ops.convert_to_tensor(val, dtype=dtype, name=('component_%d' % i)))
return tensors
|
Validate and convert `vals` to a list of `Tensor`s.
The `vals` argument can be a Tensor, a list or tuple of tensors, or a
dictionary with tensor values.
If it is a dictionary, the queue must have been constructed with a
`names` attribute and the dictionary keys must math the queue names.
If the queue was constructed with a `names` attribute, `vals` must
be a dictionary.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary..
Returns:
A list of `Tensor` objects.
Raises:
ValueError: If `vals` is invalid.
|
tensorflow/python/ops/data_flow_ops.py
|
_check_enqueue_dtypes
|
habangar/tensorflow
| 73 |
python
|
def _check_enqueue_dtypes(self, vals):
'Validate and convert `vals` to a list of `Tensor`s.\n\n The `vals` argument can be a Tensor, a list or tuple of tensors, or a\n dictionary with tensor values.\n\n If it is a dictionary, the queue must have been constructed with a\n `names` attribute and the dictionary keys must math the queue names.\n If the queue was constructed with a `names` attribute, `vals` must\n be a dictionary.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary..\n\n Returns:\n A list of `Tensor` objects.\n\n Raises:\n ValueError: If `vals` is invalid.\n '
if isinstance(vals, dict):
if (not self._names):
raise ValueError('Queue must have names to enqueue a dictionary')
if (sorted(self._names) != sorted(vals.keys())):
raise ValueError(('Keys in dictionary to enqueue do not match names of Queue. Dictionary: (%s), Queue: (%s)' % (sorted(vals.keys()), sorted(self._names))))
vals = [vals[k] for k in self._names]
else:
if self._names:
raise ValueError('You must enqueue a dictionary in a Queue with names')
if (not isinstance(vals, (list, tuple))):
vals = [vals]
tensors = []
for (i, (val, dtype)) in enumerate(zip(vals, self._dtypes)):
tensors.append(ops.convert_to_tensor(val, dtype=dtype, name=('component_%d' % i)))
return tensors
|
def _check_enqueue_dtypes(self, vals):
'Validate and convert `vals` to a list of `Tensor`s.\n\n The `vals` argument can be a Tensor, a list or tuple of tensors, or a\n dictionary with tensor values.\n\n If it is a dictionary, the queue must have been constructed with a\n `names` attribute and the dictionary keys must math the queue names.\n If the queue was constructed with a `names` attribute, `vals` must\n be a dictionary.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary..\n\n Returns:\n A list of `Tensor` objects.\n\n Raises:\n ValueError: If `vals` is invalid.\n '
if isinstance(vals, dict):
if (not self._names):
raise ValueError('Queue must have names to enqueue a dictionary')
if (sorted(self._names) != sorted(vals.keys())):
raise ValueError(('Keys in dictionary to enqueue do not match names of Queue. Dictionary: (%s), Queue: (%s)' % (sorted(vals.keys()), sorted(self._names))))
vals = [vals[k] for k in self._names]
else:
if self._names:
raise ValueError('You must enqueue a dictionary in a Queue with names')
if (not isinstance(vals, (list, tuple))):
vals = [vals]
tensors = []
for (i, (val, dtype)) in enumerate(zip(vals, self._dtypes)):
tensors.append(ops.convert_to_tensor(val, dtype=dtype, name=('component_%d' % i)))
return tensors<|docstring|>Validate and convert `vals` to a list of `Tensor`s.
The `vals` argument can be a Tensor, a list or tuple of tensors, or a
dictionary with tensor values.
If it is a dictionary, the queue must have been constructed with a
`names` attribute and the dictionary keys must math the queue names.
If the queue was constructed with a `names` attribute, `vals` must
be a dictionary.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary..
Returns:
A list of `Tensor` objects.
Raises:
ValueError: If `vals` is invalid.<|endoftext|>
|
887bb5871479709a5d8d3645288ed8e498e1c49d9de5795006b6bc1ff68b7576
|
def _scope_vals(self, vals):
'Return a list of values to pass to `op_scope()`.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary.\n\n Returns:\n The values in vals as a list.\n '
if isinstance(vals, (list, tuple)):
return vals
elif isinstance(vals, dict):
return vals.values()
else:
return [vals]
|
Return a list of values to pass to `op_scope()`.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary.
Returns:
The values in vals as a list.
|
tensorflow/python/ops/data_flow_ops.py
|
_scope_vals
|
habangar/tensorflow
| 73 |
python
|
def _scope_vals(self, vals):
'Return a list of values to pass to `op_scope()`.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary.\n\n Returns:\n The values in vals as a list.\n '
if isinstance(vals, (list, tuple)):
return vals
elif isinstance(vals, dict):
return vals.values()
else:
return [vals]
|
def _scope_vals(self, vals):
'Return a list of values to pass to `op_scope()`.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary.\n\n Returns:\n The values in vals as a list.\n '
if isinstance(vals, (list, tuple)):
return vals
elif isinstance(vals, dict):
return vals.values()
else:
return [vals]<|docstring|>Return a list of values to pass to `op_scope()`.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary.
Returns:
The values in vals as a list.<|endoftext|>
|
ecbc9404dc5cbf46100d141bae166f23da1ac328eae3b49836c9cc6bcdf06af8
|
def enqueue(self, vals, name=None):
'Enqueues one element to this queue.\n\n If the queue is full when this operation executes, it will block\n until the element has been enqueued.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary containing\n the values to enqueue.\n name: A name for the operation (optional).\n\n Returns:\n The operation that enqueues a new tuple of tensors to the queue.\n '
with ops.op_scope(self._scope_vals(vals), name, ('%s_enqueue' % self._name)) as scope:
vals = self._check_enqueue_dtypes(vals)
for (val, shape) in zip(vals, self._shapes):
val.get_shape().assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue(self._queue_ref, vals, name=scope)
|
Enqueues one element to this queue.
If the queue is full when this operation executes, it will block
until the element has been enqueued.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary containing
the values to enqueue.
name: A name for the operation (optional).
Returns:
The operation that enqueues a new tuple of tensors to the queue.
|
tensorflow/python/ops/data_flow_ops.py
|
enqueue
|
habangar/tensorflow
| 73 |
python
|
def enqueue(self, vals, name=None):
'Enqueues one element to this queue.\n\n If the queue is full when this operation executes, it will block\n until the element has been enqueued.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary containing\n the values to enqueue.\n name: A name for the operation (optional).\n\n Returns:\n The operation that enqueues a new tuple of tensors to the queue.\n '
with ops.op_scope(self._scope_vals(vals), name, ('%s_enqueue' % self._name)) as scope:
vals = self._check_enqueue_dtypes(vals)
for (val, shape) in zip(vals, self._shapes):
val.get_shape().assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue(self._queue_ref, vals, name=scope)
|
def enqueue(self, vals, name=None):
'Enqueues one element to this queue.\n\n If the queue is full when this operation executes, it will block\n until the element has been enqueued.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary containing\n the values to enqueue.\n name: A name for the operation (optional).\n\n Returns:\n The operation that enqueues a new tuple of tensors to the queue.\n '
with ops.op_scope(self._scope_vals(vals), name, ('%s_enqueue' % self._name)) as scope:
vals = self._check_enqueue_dtypes(vals)
for (val, shape) in zip(vals, self._shapes):
val.get_shape().assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue(self._queue_ref, vals, name=scope)<|docstring|>Enqueues one element to this queue.
If the queue is full when this operation executes, it will block
until the element has been enqueued.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary containing
the values to enqueue.
name: A name for the operation (optional).
Returns:
The operation that enqueues a new tuple of tensors to the queue.<|endoftext|>
|
ec5a7b812c0d02ef9756b539186ded52fdc017576619bcc64952f4a6ce17b8a0
|
def enqueue_many(self, vals, name=None):
'Enqueues zero or more elements to this queue.\n\n This operation slices each component tensor along the 0th dimension to\n make multiple queue elements. All of the tensors in `vals` must have the\n same size in the 0th dimension.\n\n If the queue is full when this operation executes, it will block\n until all of the elements have been enqueued.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary\n from which the queue elements are taken.\n name: A name for the operation (optional).\n\n Returns:\n The operation that enqueues a batch of tuples of tensors to the queue.\n '
with ops.op_scope(self._scope_vals(vals), name, ('%s_EnqueueMany' % self._name)) as scope:
vals = self._check_enqueue_dtypes(vals)
batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
for (val, shape) in zip(vals, self._shapes):
batch_dim = batch_dim.merge_with(val.get_shape().with_rank_at_least(1)[0])
val.get_shape()[1:].assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue_many(self._queue_ref, vals, name=scope)
|
Enqueues zero or more elements to this queue.
This operation slices each component tensor along the 0th dimension to
make multiple queue elements. All of the tensors in `vals` must have the
same size in the 0th dimension.
If the queue is full when this operation executes, it will block
until all of the elements have been enqueued.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary
from which the queue elements are taken.
name: A name for the operation (optional).
Returns:
The operation that enqueues a batch of tuples of tensors to the queue.
|
tensorflow/python/ops/data_flow_ops.py
|
enqueue_many
|
habangar/tensorflow
| 73 |
python
|
def enqueue_many(self, vals, name=None):
'Enqueues zero or more elements to this queue.\n\n This operation slices each component tensor along the 0th dimension to\n make multiple queue elements. All of the tensors in `vals` must have the\n same size in the 0th dimension.\n\n If the queue is full when this operation executes, it will block\n until all of the elements have been enqueued.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary\n from which the queue elements are taken.\n name: A name for the operation (optional).\n\n Returns:\n The operation that enqueues a batch of tuples of tensors to the queue.\n '
with ops.op_scope(self._scope_vals(vals), name, ('%s_EnqueueMany' % self._name)) as scope:
vals = self._check_enqueue_dtypes(vals)
batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
for (val, shape) in zip(vals, self._shapes):
batch_dim = batch_dim.merge_with(val.get_shape().with_rank_at_least(1)[0])
val.get_shape()[1:].assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue_many(self._queue_ref, vals, name=scope)
|
def enqueue_many(self, vals, name=None):
'Enqueues zero or more elements to this queue.\n\n This operation slices each component tensor along the 0th dimension to\n make multiple queue elements. All of the tensors in `vals` must have the\n same size in the 0th dimension.\n\n If the queue is full when this operation executes, it will block\n until all of the elements have been enqueued.\n\n Args:\n vals: A tensor, a list or tuple of tensors, or a dictionary\n from which the queue elements are taken.\n name: A name for the operation (optional).\n\n Returns:\n The operation that enqueues a batch of tuples of tensors to the queue.\n '
with ops.op_scope(self._scope_vals(vals), name, ('%s_EnqueueMany' % self._name)) as scope:
vals = self._check_enqueue_dtypes(vals)
batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
for (val, shape) in zip(vals, self._shapes):
batch_dim = batch_dim.merge_with(val.get_shape().with_rank_at_least(1)[0])
val.get_shape()[1:].assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue_many(self._queue_ref, vals, name=scope)<|docstring|>Enqueues zero or more elements to this queue.
This operation slices each component tensor along the 0th dimension to
make multiple queue elements. All of the tensors in `vals` must have the
same size in the 0th dimension.
If the queue is full when this operation executes, it will block
until all of the elements have been enqueued.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary
from which the queue elements are taken.
name: A name for the operation (optional).
Returns:
The operation that enqueues a batch of tuples of tensors to the queue.<|endoftext|>
|
0453558f4d28cd7e0207754c777ebddd693c841decceb63334d5604e0ab33dd7
|
def _dequeue_return_value(self, tensors):
'Return the value to return from a dequeue op.\n\n If the queue has names, return a dictionary with the\n names as keys. Otherwise return either a single tensor\n or a list of tensors depending on the length of `tensors`.\n\n Args:\n tensors: List of tensors from the dequeue op.\n\n Returns:\n A single tensor, a list of tensors, or a dictionary\n of tensors.\n '
if self._names:
return {n: tensors[i] for (i, n) in enumerate(self._names)}
elif (len(tensors) == 1):
return tensors[0]
else:
return tensors
|
Return the value to return from a dequeue op.
If the queue has names, return a dictionary with the
names as keys. Otherwise return either a single tensor
or a list of tensors depending on the length of `tensors`.
Args:
tensors: List of tensors from the dequeue op.
Returns:
A single tensor, a list of tensors, or a dictionary
of tensors.
|
tensorflow/python/ops/data_flow_ops.py
|
_dequeue_return_value
|
habangar/tensorflow
| 73 |
python
|
def _dequeue_return_value(self, tensors):
'Return the value to return from a dequeue op.\n\n If the queue has names, return a dictionary with the\n names as keys. Otherwise return either a single tensor\n or a list of tensors depending on the length of `tensors`.\n\n Args:\n tensors: List of tensors from the dequeue op.\n\n Returns:\n A single tensor, a list of tensors, or a dictionary\n of tensors.\n '
if self._names:
return {n: tensors[i] for (i, n) in enumerate(self._names)}
elif (len(tensors) == 1):
return tensors[0]
else:
return tensors
|
def _dequeue_return_value(self, tensors):
'Return the value to return from a dequeue op.\n\n If the queue has names, return a dictionary with the\n names as keys. Otherwise return either a single tensor\n or a list of tensors depending on the length of `tensors`.\n\n Args:\n tensors: List of tensors from the dequeue op.\n\n Returns:\n A single tensor, a list of tensors, or a dictionary\n of tensors.\n '
if self._names:
return {n: tensors[i] for (i, n) in enumerate(self._names)}
elif (len(tensors) == 1):
return tensors[0]
else:
return tensors<|docstring|>Return the value to return from a dequeue op.
If the queue has names, return a dictionary with the
names as keys. Otherwise return either a single tensor
or a list of tensors depending on the length of `tensors`.
Args:
tensors: List of tensors from the dequeue op.
Returns:
A single tensor, a list of tensors, or a dictionary
of tensors.<|endoftext|>
|
a4cd9c16a0ca431f9c4db0efc560b9321d44a86464cc5f005872c2440746526f
|
def dequeue(self, name=None):
'Dequeues one element from this queue.\n\n If the queue is empty when this operation executes, it will block\n until there is an element to dequeue.\n\n Args:\n name: A name for the operation (optional).\n\n Returns:\n The tuple of tensors that was dequeued.\n '
if (name is None):
name = ('%s_Dequeue' % self._name)
ret = gen_data_flow_ops._queue_dequeue(self._queue_ref, self._dtypes, name=name)
op = ret[0].op
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(shape)
return self._dequeue_return_value(ret)
|
Dequeues one element from this queue.
If the queue is empty when this operation executes, it will block
until there is an element to dequeue.
Args:
name: A name for the operation (optional).
Returns:
The tuple of tensors that was dequeued.
|
tensorflow/python/ops/data_flow_ops.py
|
dequeue
|
habangar/tensorflow
| 73 |
python
|
def dequeue(self, name=None):
'Dequeues one element from this queue.\n\n If the queue is empty when this operation executes, it will block\n until there is an element to dequeue.\n\n Args:\n name: A name for the operation (optional).\n\n Returns:\n The tuple of tensors that was dequeued.\n '
if (name is None):
name = ('%s_Dequeue' % self._name)
ret = gen_data_flow_ops._queue_dequeue(self._queue_ref, self._dtypes, name=name)
op = ret[0].op
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(shape)
return self._dequeue_return_value(ret)
|
def dequeue(self, name=None):
'Dequeues one element from this queue.\n\n If the queue is empty when this operation executes, it will block\n until there is an element to dequeue.\n\n Args:\n name: A name for the operation (optional).\n\n Returns:\n The tuple of tensors that was dequeued.\n '
if (name is None):
name = ('%s_Dequeue' % self._name)
ret = gen_data_flow_ops._queue_dequeue(self._queue_ref, self._dtypes, name=name)
op = ret[0].op
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(shape)
return self._dequeue_return_value(ret)<|docstring|>Dequeues one element from this queue.
If the queue is empty when this operation executes, it will block
until there is an element to dequeue.
Args:
name: A name for the operation (optional).
Returns:
The tuple of tensors that was dequeued.<|endoftext|>
|
8cee0b0be9401329e56003ad98f99fc867e00853f904b816c49e7934d634645f
|
def dequeue_many(self, n, name=None):
'Dequeues and concatenates `n` elements from this queue.\n\n This operation concatenates queue-element component tensors along\n the 0th dimension to make a single component tensor. All of the\n components in the dequeued tuple will have size `n` in the 0th dimension.\n\n If the queue is closed and there are less than `n` elements left, then an\n `OutOfRange` exception is raised.\n\n Args:\n n: A scalar `Tensor` containing the number of elements to dequeue.\n name: A name for the operation (optional).\n\n Returns:\n The tuple of concatenated tensors that was dequeued.\n '
if (name is None):
name = ('%s_DequeueMany' % self._name)
ret = gen_data_flow_ops._queue_dequeue_many(self._queue_ref, n=n, component_types=self._dtypes, name=name)
op = ret[0].op
batch_dim = tensor_shape.Dimension(tensor_util.constant_value(op.inputs[1]))
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([batch_dim]).concatenate(shape))
return self._dequeue_return_value(ret)
|
Dequeues and concatenates `n` elements from this queue.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. All of the
components in the dequeued tuple will have size `n` in the 0th dimension.
If the queue is closed and there are less than `n` elements left, then an
`OutOfRange` exception is raised.
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.
|
tensorflow/python/ops/data_flow_ops.py
|
dequeue_many
|
habangar/tensorflow
| 73 |
python
|
def dequeue_many(self, n, name=None):
'Dequeues and concatenates `n` elements from this queue.\n\n This operation concatenates queue-element component tensors along\n the 0th dimension to make a single component tensor. All of the\n components in the dequeued tuple will have size `n` in the 0th dimension.\n\n If the queue is closed and there are less than `n` elements left, then an\n `OutOfRange` exception is raised.\n\n Args:\n n: A scalar `Tensor` containing the number of elements to dequeue.\n name: A name for the operation (optional).\n\n Returns:\n The tuple of concatenated tensors that was dequeued.\n '
if (name is None):
name = ('%s_DequeueMany' % self._name)
ret = gen_data_flow_ops._queue_dequeue_many(self._queue_ref, n=n, component_types=self._dtypes, name=name)
op = ret[0].op
batch_dim = tensor_shape.Dimension(tensor_util.constant_value(op.inputs[1]))
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([batch_dim]).concatenate(shape))
return self._dequeue_return_value(ret)
|
def dequeue_many(self, n, name=None):
'Dequeues and concatenates `n` elements from this queue.\n\n This operation concatenates queue-element component tensors along\n the 0th dimension to make a single component tensor. All of the\n components in the dequeued tuple will have size `n` in the 0th dimension.\n\n If the queue is closed and there are less than `n` elements left, then an\n `OutOfRange` exception is raised.\n\n Args:\n n: A scalar `Tensor` containing the number of elements to dequeue.\n name: A name for the operation (optional).\n\n Returns:\n The tuple of concatenated tensors that was dequeued.\n '
if (name is None):
name = ('%s_DequeueMany' % self._name)
ret = gen_data_flow_ops._queue_dequeue_many(self._queue_ref, n=n, component_types=self._dtypes, name=name)
op = ret[0].op
batch_dim = tensor_shape.Dimension(tensor_util.constant_value(op.inputs[1]))
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([batch_dim]).concatenate(shape))
return self._dequeue_return_value(ret)<|docstring|>Dequeues and concatenates `n` elements from this queue.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. All of the
components in the dequeued tuple will have size `n` in the 0th dimension.
If the queue is closed and there are less than `n` elements left, then an
`OutOfRange` exception is raised.
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.<|endoftext|>
|
ec83222ddbcc42dce823c9014a806c354c70c36eaef280f4551068b9a577289c
|
def dequeue_up_to(self, n, name=None):
'Dequeues and concatenates `n` elements from this queue.\n\n **Note** This operation is not supported by all queues. If a queue does not\n support DequeueUpTo, then an Unimplemented exception is raised.\n\n This operation concatenates queue-element component tensors along the\n 0th dimension to make a single component tensor. All of the components\n in the dequeued tuple will have size `n` in the 0th dimension.\n\n If the queue is closed and there are more than `0` but less than `n`\n elements remaining, then instead of raising an `OutOfRange` exception like\n `dequeue_many`, the remaining elements are returned immediately.\n If the queue is closed and there are `0` elements left in the queue, then\n an `OutOfRange` exception is raised just like in `dequeue_many`.\n Otherwise the behavior is identical to `dequeue_many`:\n\n Args:\n n: A scalar `Tensor` containing the number of elements to dequeue.\n name: A name for the operation (optional).\n\n Returns:\n The tuple of concatenated tensors that was dequeued.\n '
if (name is None):
name = ('%s_DequeueUpTo' % self._name)
ret = gen_data_flow_ops._queue_dequeue_up_to(self._queue_ref, n=n, component_types=self._dtypes, name=name)
op = ret[0].op
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([None]).concatenate(shape))
return self._dequeue_return_value(ret)
|
Dequeues and concatenates `n` elements from this queue.
**Note** This operation is not supported by all queues. If a queue does not
support DequeueUpTo, then an Unimplemented exception is raised.
This operation concatenates queue-element component tensors along the
0th dimension to make a single component tensor. All of the components
in the dequeued tuple will have size `n` in the 0th dimension.
If the queue is closed and there are more than `0` but less than `n`
elements remaining, then instead of raising an `OutOfRange` exception like
`dequeue_many`, the remaining elements are returned immediately.
If the queue is closed and there are `0` elements left in the queue, then
an `OutOfRange` exception is raised just like in `dequeue_many`.
Otherwise the behavior is identical to `dequeue_many`:
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.
|
tensorflow/python/ops/data_flow_ops.py
|
dequeue_up_to
|
habangar/tensorflow
| 73 |
python
|
def dequeue_up_to(self, n, name=None):
'Dequeues and concatenates `n` elements from this queue.\n\n **Note** This operation is not supported by all queues. If a queue does not\n support DequeueUpTo, then an Unimplemented exception is raised.\n\n This operation concatenates queue-element component tensors along the\n 0th dimension to make a single component tensor. All of the components\n in the dequeued tuple will have size `n` in the 0th dimension.\n\n If the queue is closed and there are more than `0` but less than `n`\n elements remaining, then instead of raising an `OutOfRange` exception like\n `dequeue_many`, the remaining elements are returned immediately.\n If the queue is closed and there are `0` elements left in the queue, then\n an `OutOfRange` exception is raised just like in `dequeue_many`.\n Otherwise the behavior is identical to `dequeue_many`:\n\n Args:\n n: A scalar `Tensor` containing the number of elements to dequeue.\n name: A name for the operation (optional).\n\n Returns:\n The tuple of concatenated tensors that was dequeued.\n '
if (name is None):
name = ('%s_DequeueUpTo' % self._name)
ret = gen_data_flow_ops._queue_dequeue_up_to(self._queue_ref, n=n, component_types=self._dtypes, name=name)
op = ret[0].op
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([None]).concatenate(shape))
return self._dequeue_return_value(ret)
|
def dequeue_up_to(self, n, name=None):
'Dequeues and concatenates `n` elements from this queue.\n\n **Note** This operation is not supported by all queues. If a queue does not\n support DequeueUpTo, then an Unimplemented exception is raised.\n\n This operation concatenates queue-element component tensors along the\n 0th dimension to make a single component tensor. All of the components\n in the dequeued tuple will have size `n` in the 0th dimension.\n\n If the queue is closed and there are more than `0` but less than `n`\n elements remaining, then instead of raising an `OutOfRange` exception like\n `dequeue_many`, the remaining elements are returned immediately.\n If the queue is closed and there are `0` elements left in the queue, then\n an `OutOfRange` exception is raised just like in `dequeue_many`.\n Otherwise the behavior is identical to `dequeue_many`:\n\n Args:\n n: A scalar `Tensor` containing the number of elements to dequeue.\n name: A name for the operation (optional).\n\n Returns:\n The tuple of concatenated tensors that was dequeued.\n '
if (name is None):
name = ('%s_DequeueUpTo' % self._name)
ret = gen_data_flow_ops._queue_dequeue_up_to(self._queue_ref, n=n, component_types=self._dtypes, name=name)
op = ret[0].op
for (output, shape) in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([None]).concatenate(shape))
return self._dequeue_return_value(ret)<|docstring|>Dequeues and concatenates `n` elements from this queue.
**Note** This operation is not supported by all queues. If a queue does not
support DequeueUpTo, then an Unimplemented exception is raised.
This operation concatenates queue-element component tensors along the
0th dimension to make a single component tensor. All of the components
in the dequeued tuple will have size `n` in the 0th dimension.
If the queue is closed and there are more than `0` but less than `n`
elements remaining, then instead of raising an `OutOfRange` exception like
`dequeue_many`, the remaining elements are returned immediately.
If the queue is closed and there are `0` elements left in the queue, then
an `OutOfRange` exception is raised just like in `dequeue_many`.
Otherwise the behavior is identical to `dequeue_many`:
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.<|endoftext|>
|
fdc57f7e098d25baf506e63b7a78fe9722c1f9e8df72eda3b72da3fd95fd6114
|
def close(self, cancel_pending_enqueues=False, name=None):
'Closes this queue.\n\n This operation signals that no more elements will be enqueued in\n the given queue. Subsequent `enqueue` and `enqueue_many`\n operations will fail. Subsequent `dequeue` and `dequeue_many`\n operations will continue to succeed if sufficient elements remain\n in the queue. Subsequent `dequeue` and `dequeue_many` operations\n that would block will fail immediately.\n\n If `cancel_pending_enqueues` is `True`, all pending requests will also\n be cancelled.\n\n Args:\n cancel_pending_enqueues: (Optional.) A boolean, defaulting to\n `False` (described above).\n name: A name for the operation (optional).\n\n Returns:\n The operation that closes the queue.\n '
if (name is None):
name = ('%s_Close' % self._name)
return gen_data_flow_ops._queue_close(self._queue_ref, cancel_pending_enqueues=cancel_pending_enqueues, name=name)
|
Closes this queue.
This operation signals that no more elements will be enqueued in
the given queue. Subsequent `enqueue` and `enqueue_many`
operations will fail. Subsequent `dequeue` and `dequeue_many`
operations will continue to succeed if sufficient elements remain
in the queue. Subsequent `dequeue` and `dequeue_many` operations
that would block will fail immediately.
If `cancel_pending_enqueues` is `True`, all pending requests will also
be cancelled.
Args:
cancel_pending_enqueues: (Optional.) A boolean, defaulting to
`False` (described above).
name: A name for the operation (optional).
Returns:
The operation that closes the queue.
|
tensorflow/python/ops/data_flow_ops.py
|
close
|
habangar/tensorflow
| 73 |
python
|
def close(self, cancel_pending_enqueues=False, name=None):
'Closes this queue.\n\n This operation signals that no more elements will be enqueued in\n the given queue. Subsequent `enqueue` and `enqueue_many`\n operations will fail. Subsequent `dequeue` and `dequeue_many`\n operations will continue to succeed if sufficient elements remain\n in the queue. Subsequent `dequeue` and `dequeue_many` operations\n that would block will fail immediately.\n\n If `cancel_pending_enqueues` is `True`, all pending requests will also\n be cancelled.\n\n Args:\n cancel_pending_enqueues: (Optional.) A boolean, defaulting to\n `False` (described above).\n name: A name for the operation (optional).\n\n Returns:\n The operation that closes the queue.\n '
if (name is None):
name = ('%s_Close' % self._name)
return gen_data_flow_ops._queue_close(self._queue_ref, cancel_pending_enqueues=cancel_pending_enqueues, name=name)
|
def close(self, cancel_pending_enqueues=False, name=None):
'Closes this queue.\n\n This operation signals that no more elements will be enqueued in\n the given queue. Subsequent `enqueue` and `enqueue_many`\n operations will fail. Subsequent `dequeue` and `dequeue_many`\n operations will continue to succeed if sufficient elements remain\n in the queue. Subsequent `dequeue` and `dequeue_many` operations\n that would block will fail immediately.\n\n If `cancel_pending_enqueues` is `True`, all pending requests will also\n be cancelled.\n\n Args:\n cancel_pending_enqueues: (Optional.) A boolean, defaulting to\n `False` (described above).\n name: A name for the operation (optional).\n\n Returns:\n The operation that closes the queue.\n '
if (name is None):
name = ('%s_Close' % self._name)
return gen_data_flow_ops._queue_close(self._queue_ref, cancel_pending_enqueues=cancel_pending_enqueues, name=name)<|docstring|>Closes this queue.
This operation signals that no more elements will be enqueued in
the given queue. Subsequent `enqueue` and `enqueue_many`
operations will fail. Subsequent `dequeue` and `dequeue_many`
operations will continue to succeed if sufficient elements remain
in the queue. Subsequent `dequeue` and `dequeue_many` operations
that would block will fail immediately.
If `cancel_pending_enqueues` is `True`, all pending requests will also
be cancelled.
Args:
cancel_pending_enqueues: (Optional.) A boolean, defaulting to
`False` (described above).
name: A name for the operation (optional).
Returns:
The operation that closes the queue.<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.