Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
isnumberlike
(text)
Returns true if `text` can be interpreted as a floating point number.
Returns true if `text` can be interpreted as a floating point number.
def isnumberlike(text): """ Returns true if `text` can be interpreted as a floating point number. """ try: float(text) return True except ValueError: return False
[ "def", "isnumberlike", "(", "text", ")", ":", "try", ":", "float", "(", "text", ")", "return", "True", "except", "ValueError", ":", "return", "False" ]
[ 847, 0 ]
[ 853, 20 ]
python
en
['en', 'en', 'en']
True
get_index
(seq, value)
Find the first location in *seq* which contains a case-insensitive, whitespace-insensitive match for *value*. Returns *None* if no match is found.
Find the first location in *seq* which contains a case-insensitive, whitespace-insensitive match for *value*. Returns *None* if no match is found.
def get_index(seq, value): """ Find the first location in *seq* which contains a case-insensitive, whitespace-insensitive match for *value*. Returns *None* if no match is found. """ if isinstance(seq, string_types): seq = seq.split() value = value.lower().strip() for i,item in enumerate(seq): if item.lower() == value: return i return None
[ "def", "get_index", "(", "seq", ",", "value", ")", ":", "if", "isinstance", "(", "seq", ",", "string_types", ")", ":", "seq", "=", "seq", ".", "split", "(", ")", "value", "=", "value", ".", "lower", "(", ")", ".", "strip", "(", ")", "for", "i", ",", "item", "in", "enumerate", "(", "seq", ")", ":", "if", "item", ".", "lower", "(", ")", "==", "value", ":", "return", "i", "return", "None" ]
[ 855, 0 ]
[ 867, 15 ]
python
en
['en', 'error', 'th']
False
main
(options)
import getopt longOptions = ['input=', 'thermo=', 'transport=', 'id=', 'output=', 'permissive', 'help', 'debug'] try: optlist, args = getopt.getopt(argv, 'dh', longOptions) options = dict() for o,a in optlist: options[o] = a if args: raise getopt.GetoptError('Unexpected command line option: ' + repr(' '.join(args))) except getopt.GetoptError as e: print('ck2cti.py: Error parsing arguments:') print(e) print('Run "ck2cti.py --help" to see usage help.') sys.exit(1)
import getopt
def main(options): """ import getopt longOptions = ['input=', 'thermo=', 'transport=', 'id=', 'output=', 'permissive', 'help', 'debug'] try: optlist, args = getopt.getopt(argv, 'dh', longOptions) options = dict() for o,a in optlist: options[o] = a if args: raise getopt.GetoptError('Unexpected command line option: ' + repr(' '.join(args))) except getopt.GetoptError as e: print('ck2cti.py: Error parsing arguments:') print(e) print('Run "ck2cti.py --help" to see usage help.') sys.exit(1) """ parser = Parser() if not options or '-h' in options or '--help' in options: parser.showHelp() sys.exit(0) if '--input' in options: inputFile = options['--input'] else: inputFile = None thermoFile = options.get('--thermo') if '--output' in options: outName = options['--output'] if not outName.endswith('.cti'): outName += '.cti' elif inputFile: outName = os.path.splitext(inputFile)[0] + '.cti' else: outName = os.path.splitext(thermoFile)[0] + '.cti' permissive = '--permissive' in options transportFile = options.get('--transport') phaseName = options.get('--id', 'gas') parser.convertMech(inputFile, thermoFile, transportFile, phaseName, outName, permissive=permissive) # Do full validation by importing the resulting mechanism if not inputFile: # Can't validate input file that don't define a phase return try: import cantera as ct except ImportError: print('WARNING: Unable to import Cantera Python module. Output ' 'mechanism has not been validated') sys.exit(0) try: print('Validating mechanism...', end='') gas = ct.Solution(outName) print('PASSED.') except RuntimeError as e: print('FAILED.') print(e) sys.exit(1)
[ "def", "main", "(", "options", ")", ":", "parser", "=", "Parser", "(", ")", "if", "not", "options", "or", "'-h'", "in", "options", "or", "'--help'", "in", "options", ":", "parser", ".", "showHelp", "(", ")", "sys", ".", "exit", "(", "0", ")", "if", "'--input'", "in", "options", ":", "inputFile", "=", "options", "[", "'--input'", "]", "else", ":", "inputFile", "=", "None", "thermoFile", "=", "options", ".", "get", "(", "'--thermo'", ")", "if", "'--output'", "in", "options", ":", "outName", "=", "options", "[", "'--output'", "]", "if", "not", "outName", ".", "endswith", "(", "'.cti'", ")", ":", "outName", "+=", "'.cti'", "elif", "inputFile", ":", "outName", "=", "os", ".", "path", ".", "splitext", "(", "inputFile", ")", "[", "0", "]", "+", "'.cti'", "else", ":", "outName", "=", "os", ".", "path", ".", "splitext", "(", "thermoFile", ")", "[", "0", "]", "+", "'.cti'", "permissive", "=", "'--permissive'", "in", "options", "transportFile", "=", "options", ".", "get", "(", "'--transport'", ")", "phaseName", "=", "options", ".", "get", "(", "'--id'", ",", "'gas'", ")", "parser", ".", "convertMech", "(", "inputFile", ",", "thermoFile", ",", "transportFile", ",", "phaseName", ",", "outName", ",", "permissive", "=", "permissive", ")", "# Do full validation by importing the resulting mechanism", "if", "not", "inputFile", ":", "# Can't validate input file that don't define a phase", "return", "try", ":", "import", "cantera", "as", "ct", "except", "ImportError", ":", "print", "(", "'WARNING: Unable to import Cantera Python module. Output '", "'mechanism has not been validated'", ")", "sys", ".", "exit", "(", "0", ")", "try", ":", "print", "(", "'Validating mechanism...'", ",", "end", "=", "''", ")", "gas", "=", "ct", ".", "Solution", "(", "outName", ")", "print", "(", "'PASSED.'", ")", "except", "RuntimeError", "as", "e", ":", "print", "(", "'FAILED.'", ")", "print", "(", "e", ")", "sys", ".", "exit", "(", "1", ")" ]
[ 1940, 0 ]
[ 2013, 19 ]
python
en
['en', 'error', 'th']
False
Reaction.__str__
(self)
Return a string representation of the reaction, in the form 'A + B <=> C + D'.
Return a string representation of the reaction, in the form 'A + B <=> C + D'.
def __str__(self): """ Return a string representation of the reaction, in the form 'A + B <=> C + D'. """ arrow = ' <=> ' if self.reversible else ' -> ' return arrow.join([self.reactantString, self.productString])
[ "def", "__str__", "(", "self", ")", ":", "arrow", "=", "' <=> '", "if", "self", ".", "reversible", "else", "' -> '", "return", "arrow", ".", "join", "(", "[", "self", ".", "reactantString", ",", "self", ".", "productString", "]", ")" ]
[ 284, 4 ]
[ 289, 68 ]
python
en
['en', 'error', 'th']
False
KineticsModel.isPressureDependent
(self)
Return ``True`` if the kinetics are pressure-dependent or ``False`` if they are pressure-independent. This method must be overloaded in the derived class.
Return ``True`` if the kinetics are pressure-dependent or ``False`` if they are pressure-independent. This method must be overloaded in the derived class.
def isPressureDependent(self): """ Return ``True`` if the kinetics are pressure-dependent or ``False`` if they are pressure-independent. This method must be overloaded in the derived class. """ raise InputParseError('Unexpected call to KineticsModel.isPressureDependent();' ' you should be using a class derived from KineticsModel.')
[ "def", "isPressureDependent", "(", "self", ")", ":", "raise", "InputParseError", "(", "'Unexpected call to KineticsModel.isPressureDependent();'", "' you should be using a class derived from KineticsModel.'", ")" ]
[ 345, 4 ]
[ 352, 89 ]
python
en
['en', 'error', 'th']
False
KineticsData.isPressureDependent
(self)
Returns ``False`` since KineticsData kinetics are not pressure-dependent.
Returns ``False`` since KineticsData kinetics are not pressure-dependent.
def isPressureDependent(self): """ Returns ``False`` since KineticsData kinetics are not pressure-dependent. """ return False
[ "def", "isPressureDependent", "(", "self", ")", ":", "return", "False" ]
[ 384, 4 ]
[ 389, 20 ]
python
en
['en', 'error', 'th']
False
Arrhenius.isPressureDependent
(self)
Returns ``False`` since Arrhenius kinetics are not pressure-dependent.
Returns ``False`` since Arrhenius kinetics are not pressure-dependent.
def isPressureDependent(self): """ Returns ``False`` since Arrhenius kinetics are not pressure-dependent. """ return False
[ "def", "isPressureDependent", "(", "self", ")", ":", "return", "False" ]
[ 421, 4 ]
[ 425, 20 ]
python
en
['en', 'error', 'th']
False
PDepArrhenius.isPressureDependent
(self)
Returns ``True`` since PDepArrhenius kinetics are pressure-dependent.
Returns ``True`` since PDepArrhenius kinetics are pressure-dependent.
def isPressureDependent(self): """ Returns ``True`` since PDepArrhenius kinetics are pressure-dependent. """ return True
[ "def", "isPressureDependent", "(", "self", ")", ":", "return", "True" ]
[ 478, 4 ]
[ 482, 19 ]
python
en
['en', 'error', 'th']
False
Chebyshev.isPressureDependent
(self)
Returns ``True`` since Chebyshev polynomial kinetics are pressure-dependent.
Returns ``True`` since Chebyshev polynomial kinetics are pressure-dependent.
def isPressureDependent(self): """ Returns ``True`` since Chebyshev polynomial kinetics are pressure-dependent. """ return True
[ "def", "isPressureDependent", "(", "self", ")", ":", "return", "True" ]
[ 539, 4 ]
[ 544, 19 ]
python
en
['en', 'error', 'th']
False
ThirdBody.isPressureDependent
(self)
Returns ``True`` since third-body kinetics are pressure-dependent.
Returns ``True`` since third-body kinetics are pressure-dependent.
def isPressureDependent(self): """ Returns ``True`` since third-body kinetics are pressure-dependent. """ return True
[ "def", "isPressureDependent", "(", "self", ")", ":", "return", "True" ]
[ 594, 4 ]
[ 598, 19 ]
python
en
['en', 'error', 'th']
False
Parser.parseComposition
(self, elements, nElements, width)
Parse the elemental composition from a 7 or 9 coefficient NASA polynomial entry.
Parse the elemental composition from a 7 or 9 coefficient NASA polynomial entry.
def parseComposition(self, elements, nElements, width): """ Parse the elemental composition from a 7 or 9 coefficient NASA polynomial entry. """ composition = {} for i in range(nElements): symbol = elements[width*i:width*i+2].strip() count = elements[width*i+2:width*i+width].strip() if not symbol: continue try: count = int(float(count)) if count: composition[symbol.capitalize()] = count except ValueError: pass return composition
[ "def", "parseComposition", "(", "self", ",", "elements", ",", "nElements", ",", "width", ")", ":", "composition", "=", "{", "}", "for", "i", "in", "range", "(", "nElements", ")", ":", "symbol", "=", "elements", "[", "width", "*", "i", ":", "width", "*", "i", "+", "2", "]", ".", "strip", "(", ")", "count", "=", "elements", "[", "width", "*", "i", "+", "2", ":", "width", "*", "i", "+", "width", "]", ".", "strip", "(", ")", "if", "not", "symbol", ":", "continue", "try", ":", "count", "=", "int", "(", "float", "(", "count", ")", ")", "if", "count", ":", "composition", "[", "symbol", ".", "capitalize", "(", ")", "]", "=", "count", "except", "ValueError", ":", "pass", "return", "composition" ]
[ 896, 4 ]
[ 913, 26 ]
python
en
['en', 'error', 'th']
False
Parser.readThermoEntry
(self, lines, TintDefault)
Read a thermodynamics entry for one species in a Chemkin-format file (consisting of two 7-coefficient NASA polynomials). Returns the label of the species, the thermodynamics model as a :class:`MultiNASA` object, the elemental composition of the species, and the comment/note associated with the thermo entry.
Read a thermodynamics entry for one species in a Chemkin-format file (consisting of two 7-coefficient NASA polynomials). Returns the label of the species, the thermodynamics model as a :class:`MultiNASA` object, the elemental composition of the species, and the comment/note associated with the thermo entry.
def readThermoEntry(self, lines, TintDefault): """ Read a thermodynamics entry for one species in a Chemkin-format file (consisting of two 7-coefficient NASA polynomials). Returns the label of the species, the thermodynamics model as a :class:`MultiNASA` object, the elemental composition of the species, and the comment/note associated with the thermo entry. """ identifier = lines[0][0:24].split() species = identifier[0].strip() if len(identifier) > 1: note = ''.join(identifier[1:]).strip() else: note = '' # Extract the NASA polynomial coefficients # Remember that the high-T polynomial comes first! try: Tmin = fortFloat(lines[0][45:55]) Tmax = fortFloat(lines[0][55:65]) try: Tint = fortFloat(lines[0][65:75]) except ValueError: Tint = TintDefault coeffs_high = [fortFloat(lines[i][j:k]) for i,j,k in [(1,0,15), (1,15,30), (1,30,45), (1,45,60), (1,60,75), (2,0,15), (2,15,30)]] coeffs_low = [fortFloat(lines[i][j:k]) for i,j,k in [(2,30,45), (2,45,60), (2,60,75), (3,0,15), (3,15,30), (3,30,45), (3,45,60)]] except (IndexError, ValueError) as err: raise InputParseError('Error while reading thermo entry for species {0}:\n{1}'.format(species, err)) composition = self.parseComposition(lines[0][24:44], 4, 5) # Non-standard extended elemental composition data may be located beyond # column 80 on the first line of the thermo entry if len(lines[0]) > 80: elements = lines[0][80:] composition2 = self.parseComposition(elements, len(elements)//10, 10) composition.update(composition2) if not composition: raise InputParseError("Error parsing elemental composition for " "species '{0}'".format(species)) # Construct and return the thermodynamics model thermo = MultiNASA( polynomials=[ NASA(Tmin=(Tmin,"K"), Tmax=(Tint,"K"), coeffs=coeffs_low), NASA(Tmin=(Tint,"K"), Tmax=(Tmax,"K"), coeffs=coeffs_high) ], Tmin=(Tmin,"K"), Tmax=(Tmax,"K"), ) return species, thermo, composition, note
[ "def", "readThermoEntry", "(", "self", ",", "lines", ",", "TintDefault", ")", ":", "identifier", "=", "lines", "[", "0", "]", "[", "0", ":", "24", "]", ".", "split", "(", ")", "species", "=", "identifier", "[", "0", "]", ".", "strip", "(", ")", "if", "len", "(", "identifier", ")", ">", "1", ":", "note", "=", "''", ".", "join", "(", "identifier", "[", "1", ":", "]", ")", ".", "strip", "(", ")", "else", ":", "note", "=", "''", "# Extract the NASA polynomial coefficients", "# Remember that the high-T polynomial comes first!", "try", ":", "Tmin", "=", "fortFloat", "(", "lines", "[", "0", "]", "[", "45", ":", "55", "]", ")", "Tmax", "=", "fortFloat", "(", "lines", "[", "0", "]", "[", "55", ":", "65", "]", ")", "try", ":", "Tint", "=", "fortFloat", "(", "lines", "[", "0", "]", "[", "65", ":", "75", "]", ")", "except", "ValueError", ":", "Tint", "=", "TintDefault", "coeffs_high", "=", "[", "fortFloat", "(", "lines", "[", "i", "]", "[", "j", ":", "k", "]", ")", "for", "i", ",", "j", ",", "k", "in", "[", "(", "1", ",", "0", ",", "15", ")", ",", "(", "1", ",", "15", ",", "30", ")", ",", "(", "1", ",", "30", ",", "45", ")", ",", "(", "1", ",", "45", ",", "60", ")", ",", "(", "1", ",", "60", ",", "75", ")", ",", "(", "2", ",", "0", ",", "15", ")", ",", "(", "2", ",", "15", ",", "30", ")", "]", "]", "coeffs_low", "=", "[", "fortFloat", "(", "lines", "[", "i", "]", "[", "j", ":", "k", "]", ")", "for", "i", ",", "j", ",", "k", "in", "[", "(", "2", ",", "30", ",", "45", ")", ",", "(", "2", ",", "45", ",", "60", ")", ",", "(", "2", ",", "60", ",", "75", ")", ",", "(", "3", ",", "0", ",", "15", ")", ",", "(", "3", ",", "15", ",", "30", ")", ",", "(", "3", ",", "30", ",", "45", ")", ",", "(", "3", ",", "45", ",", "60", ")", "]", "]", "except", "(", "IndexError", ",", "ValueError", ")", "as", "err", ":", "raise", "InputParseError", "(", "'Error while reading thermo entry for species {0}:\\n{1}'", ".", "format", "(", "species", ",", "err", ")", ")", "composition", "=", "self", ".", "parseComposition", "(", "lines", "[", "0", "]", "[", "24", ":", "44", "]", ",", "4", ",", "5", ")", "# Non-standard extended elemental composition data may be located beyond", "# column 80 on the first line of the thermo entry", "if", "len", "(", "lines", "[", "0", "]", ")", ">", "80", ":", "elements", "=", "lines", "[", "0", "]", "[", "80", ":", "]", "composition2", "=", "self", ".", "parseComposition", "(", "elements", ",", "len", "(", "elements", ")", "//", "10", ",", "10", ")", "composition", ".", "update", "(", "composition2", ")", "if", "not", "composition", ":", "raise", "InputParseError", "(", "\"Error parsing elemental composition for \"", "\"species '{0}'\"", ".", "format", "(", "species", ")", ")", "# Construct and return the thermodynamics model", "thermo", "=", "MultiNASA", "(", "polynomials", "=", "[", "NASA", "(", "Tmin", "=", "(", "Tmin", ",", "\"K\"", ")", ",", "Tmax", "=", "(", "Tint", ",", "\"K\"", ")", ",", "coeffs", "=", "coeffs_low", ")", ",", "NASA", "(", "Tmin", "=", "(", "Tint", ",", "\"K\"", ")", ",", "Tmax", "=", "(", "Tmax", ",", "\"K\"", ")", ",", "coeffs", "=", "coeffs_high", ")", "]", ",", "Tmin", "=", "(", "Tmin", ",", "\"K\"", ")", ",", "Tmax", "=", "(", "Tmax", ",", "\"K\"", ")", ",", ")", "return", "species", ",", "thermo", ",", "composition", ",", "note" ]
[ 935, 4 ]
[ 995, 49 ]
python
en
['en', 'error', 'th']
False
Parser.readNasa9Entry
(self, entry)
Read a thermodynamics `entry` for one species given as one or more 9-coefficient NASA polynomials, written in the format described in Appendix A of NASA Reference Publication 1311 (McBride and Gordon, 1996). Returns the label of the species, the thermodynamics model as a :class:`MultiNASA` object, the elemental composition of the species, and the comment/note associated with the thermo entry.
Read a thermodynamics `entry` for one species given as one or more 9-coefficient NASA polynomials, written in the format described in Appendix A of NASA Reference Publication 1311 (McBride and Gordon, 1996). Returns the label of the species, the thermodynamics model as a :class:`MultiNASA` object, the elemental composition of the species, and the comment/note associated with the thermo entry.
def readNasa9Entry(self, entry): """ Read a thermodynamics `entry` for one species given as one or more 9-coefficient NASA polynomials, written in the format described in Appendix A of NASA Reference Publication 1311 (McBride and Gordon, 1996). Returns the label of the species, the thermodynamics model as a :class:`MultiNASA` object, the elemental composition of the species, and the comment/note associated with the thermo entry. """ tokens = entry[0].split() species = tokens[0] note = ' '.join(tokens[1:]) N = int(entry[1][:2]) note2 = entry[1][3:9].strip() if note and note2: note = '{0} [{1}]'.format(note, note2) elif note2: note = note2 composition = self.parseComposition(entry[1][10:50], 5, 8) polys = [] totalTmin = 1e100 totalTmax = -1e100 try: for i in range(N): A,B,C = entry[2+3*i:2+3*(i+1)] Tmin = fortFloat(A[1:11]) Tmax = fortFloat(A[11:21]) coeffs = [fortFloat(B[0:16]), fortFloat(B[16:32]), fortFloat(B[32:48]), fortFloat(B[48:64]), fortFloat(B[64:80]), fortFloat(C[0:16]), fortFloat(C[16:32]), fortFloat(C[48:64]), fortFloat(C[64:80])] polys.append(NASA(Tmin=(Tmin,"K"), Tmax=(Tmax,"K"), coeffs=coeffs)) totalTmin = min(Tmin, totalTmin) totalTmax = max(Tmax, totalTmax) except (IndexError, ValueError) as err: raise InputParseError('Error while reading thermo entry for species {0}:\n{1}'.format(species, err)) thermo = MultiNASA(polynomials=polys, Tmin=(totalTmin,"K"), Tmax=(totalTmax,"K")) return species, thermo, composition, note
[ "def", "readNasa9Entry", "(", "self", ",", "entry", ")", ":", "tokens", "=", "entry", "[", "0", "]", ".", "split", "(", ")", "species", "=", "tokens", "[", "0", "]", "note", "=", "' '", ".", "join", "(", "tokens", "[", "1", ":", "]", ")", "N", "=", "int", "(", "entry", "[", "1", "]", "[", ":", "2", "]", ")", "note2", "=", "entry", "[", "1", "]", "[", "3", ":", "9", "]", ".", "strip", "(", ")", "if", "note", "and", "note2", ":", "note", "=", "'{0} [{1}]'", ".", "format", "(", "note", ",", "note2", ")", "elif", "note2", ":", "note", "=", "note2", "composition", "=", "self", ".", "parseComposition", "(", "entry", "[", "1", "]", "[", "10", ":", "50", "]", ",", "5", ",", "8", ")", "polys", "=", "[", "]", "totalTmin", "=", "1e100", "totalTmax", "=", "-", "1e100", "try", ":", "for", "i", "in", "range", "(", "N", ")", ":", "A", ",", "B", ",", "C", "=", "entry", "[", "2", "+", "3", "*", "i", ":", "2", "+", "3", "*", "(", "i", "+", "1", ")", "]", "Tmin", "=", "fortFloat", "(", "A", "[", "1", ":", "11", "]", ")", "Tmax", "=", "fortFloat", "(", "A", "[", "11", ":", "21", "]", ")", "coeffs", "=", "[", "fortFloat", "(", "B", "[", "0", ":", "16", "]", ")", ",", "fortFloat", "(", "B", "[", "16", ":", "32", "]", ")", ",", "fortFloat", "(", "B", "[", "32", ":", "48", "]", ")", ",", "fortFloat", "(", "B", "[", "48", ":", "64", "]", ")", ",", "fortFloat", "(", "B", "[", "64", ":", "80", "]", ")", ",", "fortFloat", "(", "C", "[", "0", ":", "16", "]", ")", ",", "fortFloat", "(", "C", "[", "16", ":", "32", "]", ")", ",", "fortFloat", "(", "C", "[", "48", ":", "64", "]", ")", ",", "fortFloat", "(", "C", "[", "64", ":", "80", "]", ")", "]", "polys", ".", "append", "(", "NASA", "(", "Tmin", "=", "(", "Tmin", ",", "\"K\"", ")", ",", "Tmax", "=", "(", "Tmax", ",", "\"K\"", ")", ",", "coeffs", "=", "coeffs", ")", ")", "totalTmin", "=", "min", "(", "Tmin", ",", "totalTmin", ")", "totalTmax", "=", "max", "(", "Tmax", ",", "totalTmax", ")", "except", "(", "IndexError", ",", "ValueError", ")", "as", "err", ":", "raise", "InputParseError", "(", "'Error while reading thermo entry for species {0}:\\n{1}'", ".", "format", "(", "species", ",", "err", ")", ")", "thermo", "=", "MultiNASA", "(", "polynomials", "=", "polys", ",", "Tmin", "=", "(", "totalTmin", ",", "\"K\"", ")", ",", "Tmax", "=", "(", "totalTmax", ",", "\"K\"", ")", ")", "return", "species", ",", "thermo", ",", "composition", ",", "note" ]
[ 997, 4 ]
[ 1041, 49 ]
python
en
['en', 'error', 'th']
False
Parser.readKineticsEntry
(self, entry)
Read a kinetics `entry` for a single reaction as loaded from a Chemkin-format file. Returns a :class:`Reaction` object with the reaction and its associated kinetics.
Read a kinetics `entry` for a single reaction as loaded from a Chemkin-format file. Returns a :class:`Reaction` object with the reaction and its associated kinetics.
def readKineticsEntry(self, entry): """ Read a kinetics `entry` for a single reaction as loaded from a Chemkin-format file. Returns a :class:`Reaction` object with the reaction and its associated kinetics. """ # Handle non-default units which apply to this entry energy_units = self.energy_units quantity_units = self.quantity_units if 'units' in entry.lower(): for units in sorted(QUANTITY_UNITS, key=lambda k: -len(k)): pattern = re.compile(r'units *\/ *%s *\/' % re.escape(units), flags=re.IGNORECASE) m = pattern.search(entry) if m: entry = pattern.sub('', entry) quantity_units = QUANTITY_UNITS[units] break for units in sorted(ENERGY_UNITS, key=lambda k: -len(k)): pattern = re.compile(r'units *\/ *%s *\/' % re.escape(units), re.IGNORECASE) m = pattern.search(entry) if m: entry = pattern.sub('', entry) energy_units = ENERGY_UNITS[units] break lines = entry.strip().splitlines() # The first line contains the reaction equation and a set of modified Arrhenius parameters tokens = lines[0].split() A = float(tokens[-3]) b = float(tokens[-2]) Ea = float(tokens[-1]) reaction = ''.join(tokens[:-3]) + '\n' # Identify species tokens in the reaction expression in order of # decreasing length locs = {} for i in range(self.Slen, 0, -1): for j in range(len(reaction)-i+1): test = reaction[j:j+i] if test in self.species_tokens: reaction = reaction[:j] + ' '*(i-1) + reaction[j+i-1:] locs[j] = test[:-1], 'species' # Identify other tokens in the reaction expression in order of # descending length for i in range(self.Slen, 0, -1): for j in range(len(reaction)-i+1): test = reaction[j:j+i] if test in self.other_tokens: reaction = reaction[:j] + ' '*i + reaction[j+i:] locs[j] = test, self.other_tokens[test] # Anything that's left should be a stoichiometric coefficient or a '+' # between species for token in reaction.split(): j = reaction.find(token) i = len(token) reaction = reaction[:j] + ' '*i + reaction[j+i:] if token == '+': continue try: locs[j] = int(token), 'coeff' except ValueError: try: locs[j] = float(token), 'coeff' except ValueError: raise InputParseError('Unexpected token "{0}" in reaction expression "{1}".'.format(token, reaction)) reactants = [] products = [] stoichiometry = 1 lhs = True for token,kind in [v for k,v in sorted(locs.items())]: if kind == 'equal': reversible = token in ('<=>', '=') lhs = False elif kind == 'coeff': stoichiometry = token elif lhs: reactants.append((stoichiometry,token,kind)) stoichiometry = 1 else: products.append((stoichiometry,token,kind)) stoichiometry = 1 if lhs is True: raise InputParseError("Failed to find reactant/product delimiter in reaction string.") # Create a new Reaction object for this reaction reaction = Reaction(reactants=[], products=[], reversible=reversible) def parseExpression(expression, dest): falloff3b = None thirdBody = False # simple third body reaction (non-falloff) for stoichiometry,species,kind in expression: if kind == 'third-body': thirdBody = True elif kind == 'falloff3b': falloff3b = 'M' elif kind.startswith('falloff3b:'): falloff3b = kind.split()[1] else: dest.append((stoichiometry, self.speciesDict[species])) return falloff3b, thirdBody falloff_3b_r, thirdBody = parseExpression(reactants, reaction.reactants) falloff_3b_p, thirdBody = parseExpression(products, reaction.products) if falloff_3b_r != falloff_3b_p: raise InputParseError('Third bodies do not match: "{0}" and "{1}" in' ' reaction entry:\n\n{2}'.format(falloff_3b_r, falloff_3b_p, entry)) reaction.thirdBody = falloff_3b_r # Determine the appropriate units for k(T) and k(T,P) based on the number of reactants # This assumes elementary kinetics for all reactions rStoich = sum(r[0] for r in reaction.reactants) + (1 if thirdBody else 0) if rStoich < 1: raise InputParseError('No reactant species for reaction {1}.'.format(reaction)) length_dim = 3 * (rStoich - 1) quantity_dim = rStoich - 1 kunits = self.getRateConstantUnits(length_dim, 'cm', quantity_dim, quantity_units) klow_units = self.getRateConstantUnits(length_dim + 3, 'cm', quantity_dim + 1, quantity_units) # The rest of the first line contains Arrhenius parameters arrhenius = Arrhenius( A=(A,kunits), b=b, Ea=(Ea, energy_units), T0=(1,"K"), parser=self ) arrheniusLow = None arrheniusHigh = None falloff = None chebyshev = None pdepArrhenius = None efficiencies = {} chebyshevCoeffs = [] revReaction = None # Note that the subsequent lines could be in any order for line in lines[1:]: tokens = line.split('/') if 'dup' in line.lower(): # Duplicate reaction reaction.duplicate = True elif 'low' in line.lower(): # Low-pressure-limit Arrhenius parameters for "falloff" reaction tokens = tokens[1].split() arrheniusLow = Arrhenius( A=(float(tokens[0].strip()),klow_units), b=float(tokens[1].strip()), Ea=(float(tokens[2].strip()),energy_units), T0=(1,"K"), parser=self ) elif 'high' in line.lower(): # High-pressure-limit Arrhenius parameters for "chemically # activated" reaction tokens = tokens[1].split() arrheniusHigh = Arrhenius( A=(float(tokens[0].strip()),kunits), b=float(tokens[1].strip()), Ea=(float(tokens[2].strip()),energy_units), T0=(1,"K"), parser=self ) # Need to fix units on the base reaction: arrhenius.A = (arrhenius.A[0], klow_units) elif 'rev' in line.lower(): reaction.reversible = False # Create a reaction proceeding in the opposite direction revReaction = Reaction(reactants=reaction.products, products=reaction.reactants, thirdBody=reaction.thirdBody, reversible=False) tokens = tokens[1].split() revReaction.kinetics = Arrhenius( A=(float(tokens[0].strip()),klow_units), b=float(tokens[1].strip()), Ea=(float(tokens[2].strip()),energy_units), T0=(1,"K"), parser=self ) if thirdBody: revReaction.kinetics = ThirdBody( arrheniusHigh=revReaction.kinetics, parser=self) elif 'ford' in line.lower(): tokens = tokens[1].split() reaction.fwdOrders[tokens[0].strip()] = tokens[1].strip() elif 'troe' in line.lower(): # Troe falloff parameters tokens = tokens[1].split() alpha = float(tokens[0].strip()) T3 = float(tokens[1].strip()) T1 = float(tokens[2].strip()) try: T2 = float(tokens[3].strip()) except (IndexError, ValueError): T2 = None falloff = Troe( alpha=(alpha,''), T3=(T3,"K"), T1=(T1,"K"), T2=(T2,"K") if T2 is not None else None, ) elif 'sri' in line.lower(): # SRI falloff parameters tokens = tokens[1].split() A = float(tokens[0].strip()) B = float(tokens[1].strip()) C = float(tokens[2].strip()) try: D = float(tokens[3].strip()) E = float(tokens[4].strip()) except (IndexError, ValueError): D = None E = None if D is None or E is None: falloff = Sri(A=A, B=B, C=C) else: falloff = Sri(A=A, B=B, C=C, D=D, E=E) elif 'cheb' in line.lower(): # Chebyshev parameters if chebyshev is None: chebyshev = Chebyshev() tokens = [t.strip() for t in tokens] if contains(tokens, 'TCHEB'): index = get_index(tokens, 'TCHEB') tokens2 = tokens[index+1].split() chebyshev.Tmin = float(tokens2[0].strip()) chebyshev.Tmax = float(tokens2[1].strip()) if contains(tokens, 'PCHEB'): index = get_index(tokens, 'PCHEB') tokens2 = tokens[index+1].split() chebyshev.Pmin = (float(tokens2[0].strip()), 'atm') chebyshev.Pmax = (float(tokens2[1].strip()), 'atm') if contains(tokens, 'TCHEB') or contains(tokens, 'PCHEB'): pass elif chebyshev.degreeT == 0 or chebyshev.degreeP == 0: tokens2 = tokens[1].split() chebyshev.degreeT = int(float(tokens2[0].strip())) chebyshev.degreeP = int(float(tokens2[1].strip())) chebyshev.coeffs = np.zeros((chebyshev.degreeT,chebyshev.degreeP), np.float64) chebyshevCoeffs.extend([float(t.strip()) for t in tokens2[2:]]) else: tokens2 = tokens[1].split() chebyshevCoeffs.extend([float(t.strip()) for t in tokens2]) elif 'plog' in line.lower(): # Pressure-dependent Arrhenius parameters if pdepArrhenius is None: pdepArrhenius = [] tokens = tokens[1].split() pdepArrhenius.append([float(tokens[0].strip()), Arrhenius( A=(float(tokens[1].strip()),kunits), b=float(tokens[2].strip()), Ea=(float(tokens[3].strip()),energy_units), T0=(1,"K"), parser=self )]) else: # Assume a list of collider efficiencies for collider, efficiency in zip(tokens[0::2], tokens[1::2]): efficiencies[collider.strip()] = float(efficiency.strip()) # Decide which kinetics to keep and store them on the reaction object # Only one of these should be true at a time! if chebyshev is not None: if chebyshev.Tmin is None or chebyshev.Tmax is None: raise InputParseError('Missing TCHEB line for reaction {0}'.format(reaction)) if chebyshev.Pmin is None or chebyshev.Pmax is None: raise InputParseError('Missing PCHEB line for reaction {0}'.format(reaction)) index = 0 for t in range(chebyshev.degreeT): for p in range(chebyshev.degreeP): chebyshev.coeffs[t,p] = chebyshevCoeffs[index] index += 1 reaction.kinetics = chebyshev elif pdepArrhenius is not None: reaction.kinetics = PDepArrhenius( pressures=([P for P, arrh in pdepArrhenius],"atm"), arrhenius=[arrh for P, arrh in pdepArrhenius], parser=self ) elif arrheniusLow is not None: reaction.kinetics = Falloff(arrheniusHigh=arrhenius, arrheniusLow=arrheniusLow, F=falloff, parser=self, efficiencies=efficiencies) elif arrheniusHigh is not None: reaction.kinetics = ChemicallyActivated(arrheniusHigh=arrheniusHigh, arrheniusLow=arrhenius, F=falloff, parser=self, efficiencies=efficiencies) elif thirdBody: reaction.kinetics = ThirdBody(arrheniusHigh=arrhenius, parser=self, efficiencies=efficiencies) else: reaction.kinetics = arrhenius if revReaction: revReaction.duplicate = reaction.duplicate revReaction.kinetics.efficiencies = reaction.kinetics.efficiencies return reaction, revReaction
[ "def", "readKineticsEntry", "(", "self", ",", "entry", ")", ":", "# Handle non-default units which apply to this entry", "energy_units", "=", "self", ".", "energy_units", "quantity_units", "=", "self", ".", "quantity_units", "if", "'units'", "in", "entry", ".", "lower", "(", ")", ":", "for", "units", "in", "sorted", "(", "QUANTITY_UNITS", ",", "key", "=", "lambda", "k", ":", "-", "len", "(", "k", ")", ")", ":", "pattern", "=", "re", ".", "compile", "(", "r'units *\\/ *%s *\\/'", "%", "re", ".", "escape", "(", "units", ")", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "m", "=", "pattern", ".", "search", "(", "entry", ")", "if", "m", ":", "entry", "=", "pattern", ".", "sub", "(", "''", ",", "entry", ")", "quantity_units", "=", "QUANTITY_UNITS", "[", "units", "]", "break", "for", "units", "in", "sorted", "(", "ENERGY_UNITS", ",", "key", "=", "lambda", "k", ":", "-", "len", "(", "k", ")", ")", ":", "pattern", "=", "re", ".", "compile", "(", "r'units *\\/ *%s *\\/'", "%", "re", ".", "escape", "(", "units", ")", ",", "re", ".", "IGNORECASE", ")", "m", "=", "pattern", ".", "search", "(", "entry", ")", "if", "m", ":", "entry", "=", "pattern", ".", "sub", "(", "''", ",", "entry", ")", "energy_units", "=", "ENERGY_UNITS", "[", "units", "]", "break", "lines", "=", "entry", ".", "strip", "(", ")", ".", "splitlines", "(", ")", "# The first line contains the reaction equation and a set of modified Arrhenius parameters", "tokens", "=", "lines", "[", "0", "]", ".", "split", "(", ")", "A", "=", "float", "(", "tokens", "[", "-", "3", "]", ")", "b", "=", "float", "(", "tokens", "[", "-", "2", "]", ")", "Ea", "=", "float", "(", "tokens", "[", "-", "1", "]", ")", "reaction", "=", "''", ".", "join", "(", "tokens", "[", ":", "-", "3", "]", ")", "+", "'\\n'", "# Identify species tokens in the reaction expression in order of", "# decreasing length", "locs", "=", "{", "}", "for", "i", "in", "range", "(", "self", ".", "Slen", ",", "0", ",", "-", "1", ")", ":", "for", "j", "in", "range", "(", "len", "(", "reaction", ")", "-", "i", "+", "1", ")", ":", "test", "=", "reaction", "[", "j", ":", "j", "+", "i", "]", "if", "test", "in", "self", ".", "species_tokens", ":", "reaction", "=", "reaction", "[", ":", "j", "]", "+", "' '", "*", "(", "i", "-", "1", ")", "+", "reaction", "[", "j", "+", "i", "-", "1", ":", "]", "locs", "[", "j", "]", "=", "test", "[", ":", "-", "1", "]", ",", "'species'", "# Identify other tokens in the reaction expression in order of", "# descending length", "for", "i", "in", "range", "(", "self", ".", "Slen", ",", "0", ",", "-", "1", ")", ":", "for", "j", "in", "range", "(", "len", "(", "reaction", ")", "-", "i", "+", "1", ")", ":", "test", "=", "reaction", "[", "j", ":", "j", "+", "i", "]", "if", "test", "in", "self", ".", "other_tokens", ":", "reaction", "=", "reaction", "[", ":", "j", "]", "+", "' '", "*", "i", "+", "reaction", "[", "j", "+", "i", ":", "]", "locs", "[", "j", "]", "=", "test", ",", "self", ".", "other_tokens", "[", "test", "]", "# Anything that's left should be a stoichiometric coefficient or a '+'", "# between species", "for", "token", "in", "reaction", ".", "split", "(", ")", ":", "j", "=", "reaction", ".", "find", "(", "token", ")", "i", "=", "len", "(", "token", ")", "reaction", "=", "reaction", "[", ":", "j", "]", "+", "' '", "*", "i", "+", "reaction", "[", "j", "+", "i", ":", "]", "if", "token", "==", "'+'", ":", "continue", "try", ":", "locs", "[", "j", "]", "=", "int", "(", "token", ")", ",", "'coeff'", "except", "ValueError", ":", "try", ":", "locs", "[", "j", "]", "=", "float", "(", "token", ")", ",", "'coeff'", "except", "ValueError", ":", "raise", "InputParseError", "(", "'Unexpected token \"{0}\" in reaction expression \"{1}\".'", ".", "format", "(", "token", ",", "reaction", ")", ")", "reactants", "=", "[", "]", "products", "=", "[", "]", "stoichiometry", "=", "1", "lhs", "=", "True", "for", "token", ",", "kind", "in", "[", "v", "for", "k", ",", "v", "in", "sorted", "(", "locs", ".", "items", "(", ")", ")", "]", ":", "if", "kind", "==", "'equal'", ":", "reversible", "=", "token", "in", "(", "'<=>'", ",", "'='", ")", "lhs", "=", "False", "elif", "kind", "==", "'coeff'", ":", "stoichiometry", "=", "token", "elif", "lhs", ":", "reactants", ".", "append", "(", "(", "stoichiometry", ",", "token", ",", "kind", ")", ")", "stoichiometry", "=", "1", "else", ":", "products", ".", "append", "(", "(", "stoichiometry", ",", "token", ",", "kind", ")", ")", "stoichiometry", "=", "1", "if", "lhs", "is", "True", ":", "raise", "InputParseError", "(", "\"Failed to find reactant/product delimiter in reaction string.\"", ")", "# Create a new Reaction object for this reaction", "reaction", "=", "Reaction", "(", "reactants", "=", "[", "]", ",", "products", "=", "[", "]", ",", "reversible", "=", "reversible", ")", "def", "parseExpression", "(", "expression", ",", "dest", ")", ":", "falloff3b", "=", "None", "thirdBody", "=", "False", "# simple third body reaction (non-falloff)", "for", "stoichiometry", ",", "species", ",", "kind", "in", "expression", ":", "if", "kind", "==", "'third-body'", ":", "thirdBody", "=", "True", "elif", "kind", "==", "'falloff3b'", ":", "falloff3b", "=", "'M'", "elif", "kind", ".", "startswith", "(", "'falloff3b:'", ")", ":", "falloff3b", "=", "kind", ".", "split", "(", ")", "[", "1", "]", "else", ":", "dest", ".", "append", "(", "(", "stoichiometry", ",", "self", ".", "speciesDict", "[", "species", "]", ")", ")", "return", "falloff3b", ",", "thirdBody", "falloff_3b_r", ",", "thirdBody", "=", "parseExpression", "(", "reactants", ",", "reaction", ".", "reactants", ")", "falloff_3b_p", ",", "thirdBody", "=", "parseExpression", "(", "products", ",", "reaction", ".", "products", ")", "if", "falloff_3b_r", "!=", "falloff_3b_p", ":", "raise", "InputParseError", "(", "'Third bodies do not match: \"{0}\" and \"{1}\" in'", "' reaction entry:\\n\\n{2}'", ".", "format", "(", "falloff_3b_r", ",", "falloff_3b_p", ",", "entry", ")", ")", "reaction", ".", "thirdBody", "=", "falloff_3b_r", "# Determine the appropriate units for k(T) and k(T,P) based on the number of reactants", "# This assumes elementary kinetics for all reactions", "rStoich", "=", "sum", "(", "r", "[", "0", "]", "for", "r", "in", "reaction", ".", "reactants", ")", "+", "(", "1", "if", "thirdBody", "else", "0", ")", "if", "rStoich", "<", "1", ":", "raise", "InputParseError", "(", "'No reactant species for reaction {1}.'", ".", "format", "(", "reaction", ")", ")", "length_dim", "=", "3", "*", "(", "rStoich", "-", "1", ")", "quantity_dim", "=", "rStoich", "-", "1", "kunits", "=", "self", ".", "getRateConstantUnits", "(", "length_dim", ",", "'cm'", ",", "quantity_dim", ",", "quantity_units", ")", "klow_units", "=", "self", ".", "getRateConstantUnits", "(", "length_dim", "+", "3", ",", "'cm'", ",", "quantity_dim", "+", "1", ",", "quantity_units", ")", "# The rest of the first line contains Arrhenius parameters", "arrhenius", "=", "Arrhenius", "(", "A", "=", "(", "A", ",", "kunits", ")", ",", "b", "=", "b", ",", "Ea", "=", "(", "Ea", ",", "energy_units", ")", ",", "T0", "=", "(", "1", ",", "\"K\"", ")", ",", "parser", "=", "self", ")", "arrheniusLow", "=", "None", "arrheniusHigh", "=", "None", "falloff", "=", "None", "chebyshev", "=", "None", "pdepArrhenius", "=", "None", "efficiencies", "=", "{", "}", "chebyshevCoeffs", "=", "[", "]", "revReaction", "=", "None", "# Note that the subsequent lines could be in any order", "for", "line", "in", "lines", "[", "1", ":", "]", ":", "tokens", "=", "line", ".", "split", "(", "'/'", ")", "if", "'dup'", "in", "line", ".", "lower", "(", ")", ":", "# Duplicate reaction", "reaction", ".", "duplicate", "=", "True", "elif", "'low'", "in", "line", ".", "lower", "(", ")", ":", "# Low-pressure-limit Arrhenius parameters for \"falloff\" reaction", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "arrheniusLow", "=", "Arrhenius", "(", "A", "=", "(", "float", "(", "tokens", "[", "0", "]", ".", "strip", "(", ")", ")", ",", "klow_units", ")", ",", "b", "=", "float", "(", "tokens", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "Ea", "=", "(", "float", "(", "tokens", "[", "2", "]", ".", "strip", "(", ")", ")", ",", "energy_units", ")", ",", "T0", "=", "(", "1", ",", "\"K\"", ")", ",", "parser", "=", "self", ")", "elif", "'high'", "in", "line", ".", "lower", "(", ")", ":", "# High-pressure-limit Arrhenius parameters for \"chemically", "# activated\" reaction", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "arrheniusHigh", "=", "Arrhenius", "(", "A", "=", "(", "float", "(", "tokens", "[", "0", "]", ".", "strip", "(", ")", ")", ",", "kunits", ")", ",", "b", "=", "float", "(", "tokens", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "Ea", "=", "(", "float", "(", "tokens", "[", "2", "]", ".", "strip", "(", ")", ")", ",", "energy_units", ")", ",", "T0", "=", "(", "1", ",", "\"K\"", ")", ",", "parser", "=", "self", ")", "# Need to fix units on the base reaction:", "arrhenius", ".", "A", "=", "(", "arrhenius", ".", "A", "[", "0", "]", ",", "klow_units", ")", "elif", "'rev'", "in", "line", ".", "lower", "(", ")", ":", "reaction", ".", "reversible", "=", "False", "# Create a reaction proceeding in the opposite direction", "revReaction", "=", "Reaction", "(", "reactants", "=", "reaction", ".", "products", ",", "products", "=", "reaction", ".", "reactants", ",", "thirdBody", "=", "reaction", ".", "thirdBody", ",", "reversible", "=", "False", ")", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "revReaction", ".", "kinetics", "=", "Arrhenius", "(", "A", "=", "(", "float", "(", "tokens", "[", "0", "]", ".", "strip", "(", ")", ")", ",", "klow_units", ")", ",", "b", "=", "float", "(", "tokens", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "Ea", "=", "(", "float", "(", "tokens", "[", "2", "]", ".", "strip", "(", ")", ")", ",", "energy_units", ")", ",", "T0", "=", "(", "1", ",", "\"K\"", ")", ",", "parser", "=", "self", ")", "if", "thirdBody", ":", "revReaction", ".", "kinetics", "=", "ThirdBody", "(", "arrheniusHigh", "=", "revReaction", ".", "kinetics", ",", "parser", "=", "self", ")", "elif", "'ford'", "in", "line", ".", "lower", "(", ")", ":", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "reaction", ".", "fwdOrders", "[", "tokens", "[", "0", "]", ".", "strip", "(", ")", "]", "=", "tokens", "[", "1", "]", ".", "strip", "(", ")", "elif", "'troe'", "in", "line", ".", "lower", "(", ")", ":", "# Troe falloff parameters", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "alpha", "=", "float", "(", "tokens", "[", "0", "]", ".", "strip", "(", ")", ")", "T3", "=", "float", "(", "tokens", "[", "1", "]", ".", "strip", "(", ")", ")", "T1", "=", "float", "(", "tokens", "[", "2", "]", ".", "strip", "(", ")", ")", "try", ":", "T2", "=", "float", "(", "tokens", "[", "3", "]", ".", "strip", "(", ")", ")", "except", "(", "IndexError", ",", "ValueError", ")", ":", "T2", "=", "None", "falloff", "=", "Troe", "(", "alpha", "=", "(", "alpha", ",", "''", ")", ",", "T3", "=", "(", "T3", ",", "\"K\"", ")", ",", "T1", "=", "(", "T1", ",", "\"K\"", ")", ",", "T2", "=", "(", "T2", ",", "\"K\"", ")", "if", "T2", "is", "not", "None", "else", "None", ",", ")", "elif", "'sri'", "in", "line", ".", "lower", "(", ")", ":", "# SRI falloff parameters", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "A", "=", "float", "(", "tokens", "[", "0", "]", ".", "strip", "(", ")", ")", "B", "=", "float", "(", "tokens", "[", "1", "]", ".", "strip", "(", ")", ")", "C", "=", "float", "(", "tokens", "[", "2", "]", ".", "strip", "(", ")", ")", "try", ":", "D", "=", "float", "(", "tokens", "[", "3", "]", ".", "strip", "(", ")", ")", "E", "=", "float", "(", "tokens", "[", "4", "]", ".", "strip", "(", ")", ")", "except", "(", "IndexError", ",", "ValueError", ")", ":", "D", "=", "None", "E", "=", "None", "if", "D", "is", "None", "or", "E", "is", "None", ":", "falloff", "=", "Sri", "(", "A", "=", "A", ",", "B", "=", "B", ",", "C", "=", "C", ")", "else", ":", "falloff", "=", "Sri", "(", "A", "=", "A", ",", "B", "=", "B", ",", "C", "=", "C", ",", "D", "=", "D", ",", "E", "=", "E", ")", "elif", "'cheb'", "in", "line", ".", "lower", "(", ")", ":", "# Chebyshev parameters", "if", "chebyshev", "is", "None", ":", "chebyshev", "=", "Chebyshev", "(", ")", "tokens", "=", "[", "t", ".", "strip", "(", ")", "for", "t", "in", "tokens", "]", "if", "contains", "(", "tokens", ",", "'TCHEB'", ")", ":", "index", "=", "get_index", "(", "tokens", ",", "'TCHEB'", ")", "tokens2", "=", "tokens", "[", "index", "+", "1", "]", ".", "split", "(", ")", "chebyshev", ".", "Tmin", "=", "float", "(", "tokens2", "[", "0", "]", ".", "strip", "(", ")", ")", "chebyshev", ".", "Tmax", "=", "float", "(", "tokens2", "[", "1", "]", ".", "strip", "(", ")", ")", "if", "contains", "(", "tokens", ",", "'PCHEB'", ")", ":", "index", "=", "get_index", "(", "tokens", ",", "'PCHEB'", ")", "tokens2", "=", "tokens", "[", "index", "+", "1", "]", ".", "split", "(", ")", "chebyshev", ".", "Pmin", "=", "(", "float", "(", "tokens2", "[", "0", "]", ".", "strip", "(", ")", ")", ",", "'atm'", ")", "chebyshev", ".", "Pmax", "=", "(", "float", "(", "tokens2", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "'atm'", ")", "if", "contains", "(", "tokens", ",", "'TCHEB'", ")", "or", "contains", "(", "tokens", ",", "'PCHEB'", ")", ":", "pass", "elif", "chebyshev", ".", "degreeT", "==", "0", "or", "chebyshev", ".", "degreeP", "==", "0", ":", "tokens2", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "chebyshev", ".", "degreeT", "=", "int", "(", "float", "(", "tokens2", "[", "0", "]", ".", "strip", "(", ")", ")", ")", "chebyshev", ".", "degreeP", "=", "int", "(", "float", "(", "tokens2", "[", "1", "]", ".", "strip", "(", ")", ")", ")", "chebyshev", ".", "coeffs", "=", "np", ".", "zeros", "(", "(", "chebyshev", ".", "degreeT", ",", "chebyshev", ".", "degreeP", ")", ",", "np", ".", "float64", ")", "chebyshevCoeffs", ".", "extend", "(", "[", "float", "(", "t", ".", "strip", "(", ")", ")", "for", "t", "in", "tokens2", "[", "2", ":", "]", "]", ")", "else", ":", "tokens2", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "chebyshevCoeffs", ".", "extend", "(", "[", "float", "(", "t", ".", "strip", "(", ")", ")", "for", "t", "in", "tokens2", "]", ")", "elif", "'plog'", "in", "line", ".", "lower", "(", ")", ":", "# Pressure-dependent Arrhenius parameters", "if", "pdepArrhenius", "is", "None", ":", "pdepArrhenius", "=", "[", "]", "tokens", "=", "tokens", "[", "1", "]", ".", "split", "(", ")", "pdepArrhenius", ".", "append", "(", "[", "float", "(", "tokens", "[", "0", "]", ".", "strip", "(", ")", ")", ",", "Arrhenius", "(", "A", "=", "(", "float", "(", "tokens", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "kunits", ")", ",", "b", "=", "float", "(", "tokens", "[", "2", "]", ".", "strip", "(", ")", ")", ",", "Ea", "=", "(", "float", "(", "tokens", "[", "3", "]", ".", "strip", "(", ")", ")", ",", "energy_units", ")", ",", "T0", "=", "(", "1", ",", "\"K\"", ")", ",", "parser", "=", "self", ")", "]", ")", "else", ":", "# Assume a list of collider efficiencies", "for", "collider", ",", "efficiency", "in", "zip", "(", "tokens", "[", "0", ":", ":", "2", "]", ",", "tokens", "[", "1", ":", ":", "2", "]", ")", ":", "efficiencies", "[", "collider", ".", "strip", "(", ")", "]", "=", "float", "(", "efficiency", ".", "strip", "(", ")", ")", "# Decide which kinetics to keep and store them on the reaction object", "# Only one of these should be true at a time!", "if", "chebyshev", "is", "not", "None", ":", "if", "chebyshev", ".", "Tmin", "is", "None", "or", "chebyshev", ".", "Tmax", "is", "None", ":", "raise", "InputParseError", "(", "'Missing TCHEB line for reaction {0}'", ".", "format", "(", "reaction", ")", ")", "if", "chebyshev", ".", "Pmin", "is", "None", "or", "chebyshev", ".", "Pmax", "is", "None", ":", "raise", "InputParseError", "(", "'Missing PCHEB line for reaction {0}'", ".", "format", "(", "reaction", ")", ")", "index", "=", "0", "for", "t", "in", "range", "(", "chebyshev", ".", "degreeT", ")", ":", "for", "p", "in", "range", "(", "chebyshev", ".", "degreeP", ")", ":", "chebyshev", ".", "coeffs", "[", "t", ",", "p", "]", "=", "chebyshevCoeffs", "[", "index", "]", "index", "+=", "1", "reaction", ".", "kinetics", "=", "chebyshev", "elif", "pdepArrhenius", "is", "not", "None", ":", "reaction", ".", "kinetics", "=", "PDepArrhenius", "(", "pressures", "=", "(", "[", "P", "for", "P", ",", "arrh", "in", "pdepArrhenius", "]", ",", "\"atm\"", ")", ",", "arrhenius", "=", "[", "arrh", "for", "P", ",", "arrh", "in", "pdepArrhenius", "]", ",", "parser", "=", "self", ")", "elif", "arrheniusLow", "is", "not", "None", ":", "reaction", ".", "kinetics", "=", "Falloff", "(", "arrheniusHigh", "=", "arrhenius", ",", "arrheniusLow", "=", "arrheniusLow", ",", "F", "=", "falloff", ",", "parser", "=", "self", ",", "efficiencies", "=", "efficiencies", ")", "elif", "arrheniusHigh", "is", "not", "None", ":", "reaction", ".", "kinetics", "=", "ChemicallyActivated", "(", "arrheniusHigh", "=", "arrheniusHigh", ",", "arrheniusLow", "=", "arrhenius", ",", "F", "=", "falloff", ",", "parser", "=", "self", ",", "efficiencies", "=", "efficiencies", ")", "elif", "thirdBody", ":", "reaction", ".", "kinetics", "=", "ThirdBody", "(", "arrheniusHigh", "=", "arrhenius", ",", "parser", "=", "self", ",", "efficiencies", "=", "efficiencies", ")", "else", ":", "reaction", ".", "kinetics", "=", "arrhenius", "if", "revReaction", ":", "revReaction", ".", "duplicate", "=", "reaction", ".", "duplicate", "revReaction", ".", "kinetics", ".", "efficiencies", "=", "reaction", ".", "kinetics", ".", "efficiencies", "return", "reaction", ",", "revReaction" ]
[ 1057, 4 ]
[ 1387, 36 ]
python
en
['en', 'error', 'th']
False
Parser.loadChemkinFile
(self, path, skipUndeclaredSpecies=True)
Load a Chemkin-format input file to `path` on disk.
Load a Chemkin-format input file to `path` on disk.
def loadChemkinFile(self, path, skipUndeclaredSpecies=True): """ Load a Chemkin-format input file to `path` on disk. """ transportLines = [] self.line_number = 0 with open(path, 'rU') as ck_file: def readline(): self.line_number += 1 line = strip_nonascii(ck_file.readline()) if '!' in line: return line.split('!', 1) elif line: return line, '' else: return None, None line, comment = readline() advance = True inHeader = True while line is not None: tokens = line.split() or [''] if inHeader and not line.strip(): self.headerLines.append(comment.rstrip()) if tokens[0].upper().startswith('ELEM'): inHeader = False tokens = tokens[1:] while line is not None and not contains(line, 'END'): # Grudging support for implicit end of section if line.strip()[:4].upper() == 'SPEC': self.warn('"ELEMENTS" section implicitly ended by start of ' 'next section on line {0}.'.format(self.line_number)) advance = False tokens.pop() break line, comment = readline() tokens.extend(line.split()) for token in tokens: if token.upper() == 'END': break self.elements.append(token.capitalize()) elif tokens[0].upper().startswith('SPEC'): # List of species identifiers tokens = tokens[1:] inHeader = False while line is not None and not contains(line, 'END'): # Grudging support for implicit end of section if line.strip()[:4].upper() in ('REAC', 'TRAN', 'THER'): self.warn('"SPECIES" section implicitly ended by start of ' 'next section on line {0}.'.format(self.line_number)) advance = False tokens.pop() # Fix the case where there THERMO ALL or REAC UNITS # ends the species section if (tokens[-1].upper().startswith('THER') or tokens[-1].upper().startswith('REAC')): tokens.pop() break line, comment = readline() tokens.extend(line.split()) for token in tokens: if token.upper() == 'END': break if token in self.speciesDict: species = self.speciesDict[token] self.warn('Found additional declaration of species {0}'.format(species)) else: species = Species(label=token) self.speciesDict[token] = species self.speciesList.append(species) elif tokens[0].upper().startswith('THER') and contains(line, 'NASA9'): inHeader = False entryPosition = 0 entryLength = None entry = [] while line is not None and not get_index(line, 'END') == 0: # Grudging support for implicit end of section if line.strip()[:4].upper() in ('REAC', 'TRAN'): self.warn('"THERMO" section implicitly ended by start of ' 'next section on line {0}.'.format(self.line_number)) advance = False tokens.pop() break line, comment = readline() if not line: continue if entryLength is None: entryLength = 0 # special case if (redundant) temperature ranges are # given as the first line try: s = line.split() float(s[0]), float(s[1]), float(s[2]) continue except (IndexError, ValueError): pass if entryPosition == 0: entry.append(line) elif entryPosition == 1: entryLength = 2 + 3 * int(line.split()[0]) entry.append(line) elif entryPosition < entryLength: entry.append(line) if entryPosition == entryLength-1: label, thermo, comp, note = self.readNasa9Entry(entry) if label not in self.speciesDict: if skipUndeclaredSpecies: logging.info('Skipping unexpected species "{0}" while reading thermodynamics entry.'.format(label)) thermo = [] continue else: # Add a new species entry species = Species(label=label) self.speciesDict[label] = species self.speciesList.append(species) else: species = self.speciesDict[label] # use the first set of thermo data found if species.thermo is not None: self.warn('Found additional thermo entry for species {0}. ' 'If --permissive was given, the first entry is used.'.format(label)) else: species.thermo = thermo species.composition = comp species.note = note entryPosition = -1 entry = [] entryPosition += 1 elif tokens[0].upper().startswith('THER'): # List of thermodynamics (hopefully one per species!) inHeader = False line, comment = readline() if line is not None and not contains(line, 'END'): TintDefault = float(line.split()[1]) thermo = [] while line is not None and not contains(line, 'END'): # Grudging support for implicit end of section if line.strip()[:4].upper() in ('REAC', 'TRAN'): self.warn('"THERMO" section implicitly ended by start of ' 'next section on line {0}.'.format(self.line_number)) advance = False tokens.pop() break if len(line) >= 80 and line[79] in ['1', '2', '3', '4']: thermo.append(line) if line[79] == '4': label, thermo, comp, note = self.readThermoEntry(thermo, TintDefault) if label not in self.speciesDict: if skipUndeclaredSpecies: logging.info('Skipping unexpected species "{0}" while reading thermodynamics entry.'.format(label)) thermo = [] line, comment = readline() continue else: # Add a new species entry species = Species(label=label) self.speciesDict[label] = species self.speciesList.append(species) else: species = self.speciesDict[label] # use the first set of thermo data found if species.thermo is not None: self.warn('Found additional thermo entry for species {0}. ' 'If --permissive was given, the first entry is used.'.format(label)) else: species.thermo = thermo species.composition = comp species.note = note thermo = [] line, comment = readline() elif tokens[0].upper().startswith('REAC'): # Reactions section inHeader = False for token in tokens[1:]: units = token.upper() if units in ENERGY_UNITS: if (self.processed_units and self.energy_units != ENERGY_UNITS[units]): raise InputParseError("Multiple REACTIONS sections with " "different units are not supported.") self.energy_units = ENERGY_UNITS[units] elif units in QUANTITY_UNITS: if (self.processed_units and self.quantity_units != QUANTITY_UNITS[units]): raise InputParseError("Multiple REACTIONS sections with " "different units are not supported.") self.quantity_units = QUANTITY_UNITS[units] else: raise InputParseError("Unrecognized energy or quantity unit, {0!r}".format(units)) if len(tokens) > 1: self.processed_units = True kineticsList = [] commentsList = [] startLines = [] kinetics = '' comments = '' line, comment = readline() while line is not None and not contains(line, 'END'): # Grudging support for implicit end of section if line.strip()[:4].upper() == 'TRAN': self.warn('"REACTIONS" section implicitly ended by start of ' 'next section on line {0}.'.format(self.line_number)) advance = False break lineStartsWithComment = not line and comment line = line.strip() comment = comment.rstrip() if '=' in line and not lineStartsWithComment: # Finish previous record kineticsList.append(kinetics) commentsList.append(comments) startLines.append(self.line_number) kinetics = '' comments = '' if line: kinetics += line + '\n' if comment: comments += comment + '\n' line, comment = readline() # Don't forget the last reaction! if kinetics.strip() != '': kineticsList.append(kinetics) commentsList.append(comments) # We don't actually know whether comments belong to the # previous or next reaction, but to keep them positioned # correctly, we associate them with the next reaction (and # keep track of the final trailing comment separately) if kineticsList and kineticsList[0] == '': kineticsList.pop(0) self.finalReactionComment = commentsList.pop() self.setupKinetics() for kinetics, comment, line_number in zip(kineticsList, commentsList, startLines): try: reaction,revReaction = self.readKineticsEntry(kinetics) except Exception as e: logging.error('Error reading reaction entry starting on line {0}:'.format(line_number)) raise reaction.line_number = line_number reaction.comment = comment self.reactions.append(reaction) if revReaction is not None: revReaction.line_number = line_number self.reactions.append(revReaction) elif tokens[0].upper().startswith('TRAN'): inHeader = False line, comment = readline() transport_start_line = self.line_number while line is not None and not contains(line, 'END'): # Grudging support for implicit end of section if line.strip()[:4].upper() == 'REAC': self.warn('"TRANSPORT" section implicitly ended by start of ' 'next section on line {0}.'.format(self.line_number)) advance = False tokens.pop() break if comment: transportLines.append('!'.join((line, comment))) else: transportLines.append(line) line, comment = readline() if advance: line, comment = readline() else: advance = True self.checkDuplicateReactions() index = 0 for reaction in self.reactions: index += 1 reaction.index = index if transportLines: self.parseTransportData(transportLines, path, transport_start_line)
[ "def", "loadChemkinFile", "(", "self", ",", "path", ",", "skipUndeclaredSpecies", "=", "True", ")", ":", "transportLines", "=", "[", "]", "self", ".", "line_number", "=", "0", "with", "open", "(", "path", ",", "'rU'", ")", "as", "ck_file", ":", "def", "readline", "(", ")", ":", "self", ".", "line_number", "+=", "1", "line", "=", "strip_nonascii", "(", "ck_file", ".", "readline", "(", ")", ")", "if", "'!'", "in", "line", ":", "return", "line", ".", "split", "(", "'!'", ",", "1", ")", "elif", "line", ":", "return", "line", ",", "''", "else", ":", "return", "None", ",", "None", "line", ",", "comment", "=", "readline", "(", ")", "advance", "=", "True", "inHeader", "=", "True", "while", "line", "is", "not", "None", ":", "tokens", "=", "line", ".", "split", "(", ")", "or", "[", "''", "]", "if", "inHeader", "and", "not", "line", ".", "strip", "(", ")", ":", "self", ".", "headerLines", ".", "append", "(", "comment", ".", "rstrip", "(", ")", ")", "if", "tokens", "[", "0", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'ELEM'", ")", ":", "inHeader", "=", "False", "tokens", "=", "tokens", "[", "1", ":", "]", "while", "line", "is", "not", "None", "and", "not", "contains", "(", "line", ",", "'END'", ")", ":", "# Grudging support for implicit end of section", "if", "line", ".", "strip", "(", ")", "[", ":", "4", "]", ".", "upper", "(", ")", "==", "'SPEC'", ":", "self", ".", "warn", "(", "'\"ELEMENTS\" section implicitly ended by start of '", "'next section on line {0}.'", ".", "format", "(", "self", ".", "line_number", ")", ")", "advance", "=", "False", "tokens", ".", "pop", "(", ")", "break", "line", ",", "comment", "=", "readline", "(", ")", "tokens", ".", "extend", "(", "line", ".", "split", "(", ")", ")", "for", "token", "in", "tokens", ":", "if", "token", ".", "upper", "(", ")", "==", "'END'", ":", "break", "self", ".", "elements", ".", "append", "(", "token", ".", "capitalize", "(", ")", ")", "elif", "tokens", "[", "0", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'SPEC'", ")", ":", "# List of species identifiers", "tokens", "=", "tokens", "[", "1", ":", "]", "inHeader", "=", "False", "while", "line", "is", "not", "None", "and", "not", "contains", "(", "line", ",", "'END'", ")", ":", "# Grudging support for implicit end of section", "if", "line", ".", "strip", "(", ")", "[", ":", "4", "]", ".", "upper", "(", ")", "in", "(", "'REAC'", ",", "'TRAN'", ",", "'THER'", ")", ":", "self", ".", "warn", "(", "'\"SPECIES\" section implicitly ended by start of '", "'next section on line {0}.'", ".", "format", "(", "self", ".", "line_number", ")", ")", "advance", "=", "False", "tokens", ".", "pop", "(", ")", "# Fix the case where there THERMO ALL or REAC UNITS", "# ends the species section", "if", "(", "tokens", "[", "-", "1", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'THER'", ")", "or", "tokens", "[", "-", "1", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'REAC'", ")", ")", ":", "tokens", ".", "pop", "(", ")", "break", "line", ",", "comment", "=", "readline", "(", ")", "tokens", ".", "extend", "(", "line", ".", "split", "(", ")", ")", "for", "token", "in", "tokens", ":", "if", "token", ".", "upper", "(", ")", "==", "'END'", ":", "break", "if", "token", "in", "self", ".", "speciesDict", ":", "species", "=", "self", ".", "speciesDict", "[", "token", "]", "self", ".", "warn", "(", "'Found additional declaration of species {0}'", ".", "format", "(", "species", ")", ")", "else", ":", "species", "=", "Species", "(", "label", "=", "token", ")", "self", ".", "speciesDict", "[", "token", "]", "=", "species", "self", ".", "speciesList", ".", "append", "(", "species", ")", "elif", "tokens", "[", "0", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'THER'", ")", "and", "contains", "(", "line", ",", "'NASA9'", ")", ":", "inHeader", "=", "False", "entryPosition", "=", "0", "entryLength", "=", "None", "entry", "=", "[", "]", "while", "line", "is", "not", "None", "and", "not", "get_index", "(", "line", ",", "'END'", ")", "==", "0", ":", "# Grudging support for implicit end of section", "if", "line", ".", "strip", "(", ")", "[", ":", "4", "]", ".", "upper", "(", ")", "in", "(", "'REAC'", ",", "'TRAN'", ")", ":", "self", ".", "warn", "(", "'\"THERMO\" section implicitly ended by start of '", "'next section on line {0}.'", ".", "format", "(", "self", ".", "line_number", ")", ")", "advance", "=", "False", "tokens", ".", "pop", "(", ")", "break", "line", ",", "comment", "=", "readline", "(", ")", "if", "not", "line", ":", "continue", "if", "entryLength", "is", "None", ":", "entryLength", "=", "0", "# special case if (redundant) temperature ranges are", "# given as the first line", "try", ":", "s", "=", "line", ".", "split", "(", ")", "float", "(", "s", "[", "0", "]", ")", ",", "float", "(", "s", "[", "1", "]", ")", ",", "float", "(", "s", "[", "2", "]", ")", "continue", "except", "(", "IndexError", ",", "ValueError", ")", ":", "pass", "if", "entryPosition", "==", "0", ":", "entry", ".", "append", "(", "line", ")", "elif", "entryPosition", "==", "1", ":", "entryLength", "=", "2", "+", "3", "*", "int", "(", "line", ".", "split", "(", ")", "[", "0", "]", ")", "entry", ".", "append", "(", "line", ")", "elif", "entryPosition", "<", "entryLength", ":", "entry", ".", "append", "(", "line", ")", "if", "entryPosition", "==", "entryLength", "-", "1", ":", "label", ",", "thermo", ",", "comp", ",", "note", "=", "self", ".", "readNasa9Entry", "(", "entry", ")", "if", "label", "not", "in", "self", ".", "speciesDict", ":", "if", "skipUndeclaredSpecies", ":", "logging", ".", "info", "(", "'Skipping unexpected species \"{0}\" while reading thermodynamics entry.'", ".", "format", "(", "label", ")", ")", "thermo", "=", "[", "]", "continue", "else", ":", "# Add a new species entry", "species", "=", "Species", "(", "label", "=", "label", ")", "self", ".", "speciesDict", "[", "label", "]", "=", "species", "self", ".", "speciesList", ".", "append", "(", "species", ")", "else", ":", "species", "=", "self", ".", "speciesDict", "[", "label", "]", "# use the first set of thermo data found", "if", "species", ".", "thermo", "is", "not", "None", ":", "self", ".", "warn", "(", "'Found additional thermo entry for species {0}. '", "'If --permissive was given, the first entry is used.'", ".", "format", "(", "label", ")", ")", "else", ":", "species", ".", "thermo", "=", "thermo", "species", ".", "composition", "=", "comp", "species", ".", "note", "=", "note", "entryPosition", "=", "-", "1", "entry", "=", "[", "]", "entryPosition", "+=", "1", "elif", "tokens", "[", "0", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'THER'", ")", ":", "# List of thermodynamics (hopefully one per species!)", "inHeader", "=", "False", "line", ",", "comment", "=", "readline", "(", ")", "if", "line", "is", "not", "None", "and", "not", "contains", "(", "line", ",", "'END'", ")", ":", "TintDefault", "=", "float", "(", "line", ".", "split", "(", ")", "[", "1", "]", ")", "thermo", "=", "[", "]", "while", "line", "is", "not", "None", "and", "not", "contains", "(", "line", ",", "'END'", ")", ":", "# Grudging support for implicit end of section", "if", "line", ".", "strip", "(", ")", "[", ":", "4", "]", ".", "upper", "(", ")", "in", "(", "'REAC'", ",", "'TRAN'", ")", ":", "self", ".", "warn", "(", "'\"THERMO\" section implicitly ended by start of '", "'next section on line {0}.'", ".", "format", "(", "self", ".", "line_number", ")", ")", "advance", "=", "False", "tokens", ".", "pop", "(", ")", "break", "if", "len", "(", "line", ")", ">=", "80", "and", "line", "[", "79", "]", "in", "[", "'1'", ",", "'2'", ",", "'3'", ",", "'4'", "]", ":", "thermo", ".", "append", "(", "line", ")", "if", "line", "[", "79", "]", "==", "'4'", ":", "label", ",", "thermo", ",", "comp", ",", "note", "=", "self", ".", "readThermoEntry", "(", "thermo", ",", "TintDefault", ")", "if", "label", "not", "in", "self", ".", "speciesDict", ":", "if", "skipUndeclaredSpecies", ":", "logging", ".", "info", "(", "'Skipping unexpected species \"{0}\" while reading thermodynamics entry.'", ".", "format", "(", "label", ")", ")", "thermo", "=", "[", "]", "line", ",", "comment", "=", "readline", "(", ")", "continue", "else", ":", "# Add a new species entry", "species", "=", "Species", "(", "label", "=", "label", ")", "self", ".", "speciesDict", "[", "label", "]", "=", "species", "self", ".", "speciesList", ".", "append", "(", "species", ")", "else", ":", "species", "=", "self", ".", "speciesDict", "[", "label", "]", "# use the first set of thermo data found", "if", "species", ".", "thermo", "is", "not", "None", ":", "self", ".", "warn", "(", "'Found additional thermo entry for species {0}. '", "'If --permissive was given, the first entry is used.'", ".", "format", "(", "label", ")", ")", "else", ":", "species", ".", "thermo", "=", "thermo", "species", ".", "composition", "=", "comp", "species", ".", "note", "=", "note", "thermo", "=", "[", "]", "line", ",", "comment", "=", "readline", "(", ")", "elif", "tokens", "[", "0", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'REAC'", ")", ":", "# Reactions section", "inHeader", "=", "False", "for", "token", "in", "tokens", "[", "1", ":", "]", ":", "units", "=", "token", ".", "upper", "(", ")", "if", "units", "in", "ENERGY_UNITS", ":", "if", "(", "self", ".", "processed_units", "and", "self", ".", "energy_units", "!=", "ENERGY_UNITS", "[", "units", "]", ")", ":", "raise", "InputParseError", "(", "\"Multiple REACTIONS sections with \"", "\"different units are not supported.\"", ")", "self", ".", "energy_units", "=", "ENERGY_UNITS", "[", "units", "]", "elif", "units", "in", "QUANTITY_UNITS", ":", "if", "(", "self", ".", "processed_units", "and", "self", ".", "quantity_units", "!=", "QUANTITY_UNITS", "[", "units", "]", ")", ":", "raise", "InputParseError", "(", "\"Multiple REACTIONS sections with \"", "\"different units are not supported.\"", ")", "self", ".", "quantity_units", "=", "QUANTITY_UNITS", "[", "units", "]", "else", ":", "raise", "InputParseError", "(", "\"Unrecognized energy or quantity unit, {0!r}\"", ".", "format", "(", "units", ")", ")", "if", "len", "(", "tokens", ")", ">", "1", ":", "self", ".", "processed_units", "=", "True", "kineticsList", "=", "[", "]", "commentsList", "=", "[", "]", "startLines", "=", "[", "]", "kinetics", "=", "''", "comments", "=", "''", "line", ",", "comment", "=", "readline", "(", ")", "while", "line", "is", "not", "None", "and", "not", "contains", "(", "line", ",", "'END'", ")", ":", "# Grudging support for implicit end of section", "if", "line", ".", "strip", "(", ")", "[", ":", "4", "]", ".", "upper", "(", ")", "==", "'TRAN'", ":", "self", ".", "warn", "(", "'\"REACTIONS\" section implicitly ended by start of '", "'next section on line {0}.'", ".", "format", "(", "self", ".", "line_number", ")", ")", "advance", "=", "False", "break", "lineStartsWithComment", "=", "not", "line", "and", "comment", "line", "=", "line", ".", "strip", "(", ")", "comment", "=", "comment", ".", "rstrip", "(", ")", "if", "'='", "in", "line", "and", "not", "lineStartsWithComment", ":", "# Finish previous record", "kineticsList", ".", "append", "(", "kinetics", ")", "commentsList", ".", "append", "(", "comments", ")", "startLines", ".", "append", "(", "self", ".", "line_number", ")", "kinetics", "=", "''", "comments", "=", "''", "if", "line", ":", "kinetics", "+=", "line", "+", "'\\n'", "if", "comment", ":", "comments", "+=", "comment", "+", "'\\n'", "line", ",", "comment", "=", "readline", "(", ")", "# Don't forget the last reaction!", "if", "kinetics", ".", "strip", "(", ")", "!=", "''", ":", "kineticsList", ".", "append", "(", "kinetics", ")", "commentsList", ".", "append", "(", "comments", ")", "# We don't actually know whether comments belong to the", "# previous or next reaction, but to keep them positioned", "# correctly, we associate them with the next reaction (and", "# keep track of the final trailing comment separately)", "if", "kineticsList", "and", "kineticsList", "[", "0", "]", "==", "''", ":", "kineticsList", ".", "pop", "(", "0", ")", "self", ".", "finalReactionComment", "=", "commentsList", ".", "pop", "(", ")", "self", ".", "setupKinetics", "(", ")", "for", "kinetics", ",", "comment", ",", "line_number", "in", "zip", "(", "kineticsList", ",", "commentsList", ",", "startLines", ")", ":", "try", ":", "reaction", ",", "revReaction", "=", "self", ".", "readKineticsEntry", "(", "kinetics", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "'Error reading reaction entry starting on line {0}:'", ".", "format", "(", "line_number", ")", ")", "raise", "reaction", ".", "line_number", "=", "line_number", "reaction", ".", "comment", "=", "comment", "self", ".", "reactions", ".", "append", "(", "reaction", ")", "if", "revReaction", "is", "not", "None", ":", "revReaction", ".", "line_number", "=", "line_number", "self", ".", "reactions", ".", "append", "(", "revReaction", ")", "elif", "tokens", "[", "0", "]", ".", "upper", "(", ")", ".", "startswith", "(", "'TRAN'", ")", ":", "inHeader", "=", "False", "line", ",", "comment", "=", "readline", "(", ")", "transport_start_line", "=", "self", ".", "line_number", "while", "line", "is", "not", "None", "and", "not", "contains", "(", "line", ",", "'END'", ")", ":", "# Grudging support for implicit end of section", "if", "line", ".", "strip", "(", ")", "[", ":", "4", "]", ".", "upper", "(", ")", "==", "'REAC'", ":", "self", ".", "warn", "(", "'\"TRANSPORT\" section implicitly ended by start of '", "'next section on line {0}.'", ".", "format", "(", "self", ".", "line_number", ")", ")", "advance", "=", "False", "tokens", ".", "pop", "(", ")", "break", "if", "comment", ":", "transportLines", ".", "append", "(", "'!'", ".", "join", "(", "(", "line", ",", "comment", ")", ")", ")", "else", ":", "transportLines", ".", "append", "(", "line", ")", "line", ",", "comment", "=", "readline", "(", ")", "if", "advance", ":", "line", ",", "comment", "=", "readline", "(", ")", "else", ":", "advance", "=", "True", "self", ".", "checkDuplicateReactions", "(", ")", "index", "=", "0", "for", "reaction", "in", "self", ".", "reactions", ":", "index", "+=", "1", "reaction", ".", "index", "=", "index", "if", "transportLines", ":", "self", ".", "parseTransportData", "(", "transportLines", ",", "path", ",", "transport_start_line", ")" ]
[ 1389, 4 ]
[ 1697, 79 ]
python
en
['en', 'error', 'th']
False
Parser.checkDuplicateReactions
(self)
Check for marked (and unmarked!) duplicate reactions. Raise exception for unmarked duplicate reactions. Pressure-independent and pressure-dependent reactions are treated as different, so they don't need to be marked as duplicate.
Check for marked (and unmarked!) duplicate reactions. Raise exception for unmarked duplicate reactions.
def checkDuplicateReactions(self): """ Check for marked (and unmarked!) duplicate reactions. Raise exception for unmarked duplicate reactions. Pressure-independent and pressure-dependent reactions are treated as different, so they don't need to be marked as duplicate. """ message = ('Encountered unmarked duplicate reaction {0} ' '(See lines {1} and {2} of the input file.).') possible_duplicates = defaultdict(list) for r in self.reactions: k = (tuple(r.reactants), tuple(r.products), r.kinetics.isPressureDependent()) possible_duplicates[k].append(r) for reactions in possible_duplicates.values(): for r1,r2 in itertools.combinations(reactions, 2): if r1.duplicate and r2.duplicate: pass # marked duplicate reaction elif (r1.thirdBody and r1.thirdBody.upper() == 'M' and r1.kinetics.efficiencies.get(r2.thirdBody) == 0): pass # explicit zero efficiency elif (r2.thirdBody and r2.thirdBody.upper() == 'M' and r2.kinetics.efficiencies.get(r1.thirdBody) == 0): pass # explicit zero efficiency elif r1.thirdBody != r2.thirdBody: pass # distinct third bodies else: raise InputParseError(message.format(r1, r1.line_number, r2.line_number))
[ "def", "checkDuplicateReactions", "(", "self", ")", ":", "message", "=", "(", "'Encountered unmarked duplicate reaction {0} '", "'(See lines {1} and {2} of the input file.).'", ")", "possible_duplicates", "=", "defaultdict", "(", "list", ")", "for", "r", "in", "self", ".", "reactions", ":", "k", "=", "(", "tuple", "(", "r", ".", "reactants", ")", ",", "tuple", "(", "r", ".", "products", ")", ",", "r", ".", "kinetics", ".", "isPressureDependent", "(", ")", ")", "possible_duplicates", "[", "k", "]", ".", "append", "(", "r", ")", "for", "reactions", "in", "possible_duplicates", ".", "values", "(", ")", ":", "for", "r1", ",", "r2", "in", "itertools", ".", "combinations", "(", "reactions", ",", "2", ")", ":", "if", "r1", ".", "duplicate", "and", "r2", ".", "duplicate", ":", "pass", "# marked duplicate reaction", "elif", "(", "r1", ".", "thirdBody", "and", "r1", ".", "thirdBody", ".", "upper", "(", ")", "==", "'M'", "and", "r1", ".", "kinetics", ".", "efficiencies", ".", "get", "(", "r2", ".", "thirdBody", ")", "==", "0", ")", ":", "pass", "# explicit zero efficiency", "elif", "(", "r2", ".", "thirdBody", "and", "r2", ".", "thirdBody", ".", "upper", "(", ")", "==", "'M'", "and", "r2", ".", "kinetics", ".", "efficiencies", ".", "get", "(", "r1", ".", "thirdBody", ")", "==", "0", ")", ":", "pass", "# explicit zero efficiency", "elif", "r1", ".", "thirdBody", "!=", "r2", ".", "thirdBody", ":", "pass", "# distinct third bodies", "else", ":", "raise", "InputParseError", "(", "message", ".", "format", "(", "r1", ",", "r1", ".", "line_number", ",", "r2", ".", "line_number", ")", ")" ]
[ 1699, 4 ]
[ 1728, 93 ]
python
en
['en', 'error', 'th']
False
Parser.parseTransportData
(self, lines, filename, line_offset)
Parse the Chemkin-format transport data in ``lines`` (a list of strings) and add that transport data to the previously-loaded species.
Parse the Chemkin-format transport data in ``lines`` (a list of strings) and add that transport data to the previously-loaded species.
def parseTransportData(self, lines, filename, line_offset): """ Parse the Chemkin-format transport data in ``lines`` (a list of strings) and add that transport data to the previously-loaded species. """ for i,line in enumerate(lines): line = line.strip() if not line or line.startswith('!'): continue if get_index(line, 'END') == 0: break if '!' in line: line, comment = line.split('!', 1) data = line.split() + [comment] else: data = line.split() if len(data) < 7: raise InputParseError('Unable to parse transport data: not' ' enough parameters on line {0} of "{1}".'.format( line_offset + i, filename)) speciesName = data[0] if speciesName in self.speciesDict: if self.speciesDict[speciesName].transport is None: self.speciesDict[speciesName].transport = TransportData(*data) else: self.warn('Ignoring duplicate transport data' ' for species "{0} on line {1} of "{2}".'.format( speciesName, line_offset + i, filename))
[ "def", "parseTransportData", "(", "self", ",", "lines", ",", "filename", ",", "line_offset", ")", ":", "for", "i", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "not", "line", "or", "line", ".", "startswith", "(", "'!'", ")", ":", "continue", "if", "get_index", "(", "line", ",", "'END'", ")", "==", "0", ":", "break", "if", "'!'", "in", "line", ":", "line", ",", "comment", "=", "line", ".", "split", "(", "'!'", ",", "1", ")", "data", "=", "line", ".", "split", "(", ")", "+", "[", "comment", "]", "else", ":", "data", "=", "line", ".", "split", "(", ")", "if", "len", "(", "data", ")", "<", "7", ":", "raise", "InputParseError", "(", "'Unable to parse transport data: not'", "' enough parameters on line {0} of \"{1}\".'", ".", "format", "(", "line_offset", "+", "i", ",", "filename", ")", ")", "speciesName", "=", "data", "[", "0", "]", "if", "speciesName", "in", "self", ".", "speciesDict", ":", "if", "self", ".", "speciesDict", "[", "speciesName", "]", ".", "transport", "is", "None", ":", "self", ".", "speciesDict", "[", "speciesName", "]", ".", "transport", "=", "TransportData", "(", "*", "data", ")", "else", ":", "self", ".", "warn", "(", "'Ignoring duplicate transport data'", "' for species \"{0} on line {1} of \"{2}\".'", ".", "format", "(", "speciesName", ",", "line_offset", "+", "i", ",", "filename", ")", ")" ]
[ 1730, 4 ]
[ 1760, 68 ]
python
en
['en', 'error', 'th']
False
get_cipher
()
Return decryption function and length of key. Async friendly.
Return decryption function and length of key.
def get_cipher(): """Return decryption function and length of key. Async friendly. """ def decrypt(ciphertext, key): """Decrypt ciphertext using key.""" return SecretBox(key).decrypt(ciphertext, encoder=Base64Encoder) return (SecretBox.KEY_SIZE, decrypt)
[ "def", "get_cipher", "(", ")", ":", "def", "decrypt", "(", "ciphertext", ",", "key", ")", ":", "\"\"\"Decrypt ciphertext using key.\"\"\"", "return", "SecretBox", "(", "key", ")", ".", "decrypt", "(", "ciphertext", ",", "encoder", "=", "Base64Encoder", ")", "return", "(", "SecretBox", ".", "KEY_SIZE", ",", "decrypt", ")" ]
[ 22, 0 ]
[ 32, 40 ]
python
en
['en', 'en', 'en']
True
_parse_topic
(topic, subscribe_topic)
Parse an MQTT topic {sub_topic}/user/dev, return (user, dev) tuple. Async friendly.
Parse an MQTT topic {sub_topic}/user/dev, return (user, dev) tuple.
def _parse_topic(topic, subscribe_topic): """Parse an MQTT topic {sub_topic}/user/dev, return (user, dev) tuple. Async friendly. """ subscription = subscribe_topic.split("/") try: user_index = subscription.index("#") except ValueError: _LOGGER.error("Can't parse subscription topic: '%s'", subscribe_topic) raise topic_list = topic.split("/") try: user, device = topic_list[user_index], topic_list[user_index + 1] except IndexError: _LOGGER.error("Can't parse topic: '%s'", topic) raise return user, device
[ "def", "_parse_topic", "(", "topic", ",", "subscribe_topic", ")", ":", "subscription", "=", "subscribe_topic", ".", "split", "(", "\"/\"", ")", "try", ":", "user_index", "=", "subscription", ".", "index", "(", "\"#\"", ")", "except", "ValueError", ":", "_LOGGER", ".", "error", "(", "\"Can't parse subscription topic: '%s'\"", ",", "subscribe_topic", ")", "raise", "topic_list", "=", "topic", ".", "split", "(", "\"/\"", ")", "try", ":", "user", ",", "device", "=", "topic_list", "[", "user_index", "]", ",", "topic_list", "[", "user_index", "+", "1", "]", "except", "IndexError", ":", "_LOGGER", ".", "error", "(", "\"Can't parse topic: '%s'\"", ",", "topic", ")", "raise", "return", "user", ",", "device" ]
[ 35, 0 ]
[ 54, 23 ]
python
en
['en', 'mt', 'en']
True
_parse_see_args
(message, subscribe_topic)
Parse the OwnTracks location parameters, into the format see expects. Async friendly.
Parse the OwnTracks location parameters, into the format see expects.
def _parse_see_args(message, subscribe_topic): """Parse the OwnTracks location parameters, into the format see expects. Async friendly. """ user, device = _parse_topic(message["topic"], subscribe_topic) dev_id = slugify(f"{user}_{device}") kwargs = {"dev_id": dev_id, "host_name": user, "attributes": {}} if message["lat"] is not None and message["lon"] is not None: kwargs["gps"] = (message["lat"], message["lon"]) else: kwargs["gps"] = None if "acc" in message: kwargs["gps_accuracy"] = message["acc"] if "batt" in message: kwargs["battery"] = message["batt"] if "vel" in message: kwargs["attributes"]["velocity"] = message["vel"] if "tid" in message: kwargs["attributes"]["tid"] = message["tid"] if "addr" in message: kwargs["attributes"]["address"] = message["addr"] if "cog" in message: kwargs["attributes"]["course"] = message["cog"] if "bs" in message: kwargs["attributes"]["battery_status"] = message["bs"] if "t" in message: if message["t"] in ("c", "u"): kwargs["source_type"] = SOURCE_TYPE_GPS if message["t"] == "b": kwargs["source_type"] = SOURCE_TYPE_BLUETOOTH_LE return dev_id, kwargs
[ "def", "_parse_see_args", "(", "message", ",", "subscribe_topic", ")", ":", "user", ",", "device", "=", "_parse_topic", "(", "message", "[", "\"topic\"", "]", ",", "subscribe_topic", ")", "dev_id", "=", "slugify", "(", "f\"{user}_{device}\"", ")", "kwargs", "=", "{", "\"dev_id\"", ":", "dev_id", ",", "\"host_name\"", ":", "user", ",", "\"attributes\"", ":", "{", "}", "}", "if", "message", "[", "\"lat\"", "]", "is", "not", "None", "and", "message", "[", "\"lon\"", "]", "is", "not", "None", ":", "kwargs", "[", "\"gps\"", "]", "=", "(", "message", "[", "\"lat\"", "]", ",", "message", "[", "\"lon\"", "]", ")", "else", ":", "kwargs", "[", "\"gps\"", "]", "=", "None", "if", "\"acc\"", "in", "message", ":", "kwargs", "[", "\"gps_accuracy\"", "]", "=", "message", "[", "\"acc\"", "]", "if", "\"batt\"", "in", "message", ":", "kwargs", "[", "\"battery\"", "]", "=", "message", "[", "\"batt\"", "]", "if", "\"vel\"", "in", "message", ":", "kwargs", "[", "\"attributes\"", "]", "[", "\"velocity\"", "]", "=", "message", "[", "\"vel\"", "]", "if", "\"tid\"", "in", "message", ":", "kwargs", "[", "\"attributes\"", "]", "[", "\"tid\"", "]", "=", "message", "[", "\"tid\"", "]", "if", "\"addr\"", "in", "message", ":", "kwargs", "[", "\"attributes\"", "]", "[", "\"address\"", "]", "=", "message", "[", "\"addr\"", "]", "if", "\"cog\"", "in", "message", ":", "kwargs", "[", "\"attributes\"", "]", "[", "\"course\"", "]", "=", "message", "[", "\"cog\"", "]", "if", "\"bs\"", "in", "message", ":", "kwargs", "[", "\"attributes\"", "]", "[", "\"battery_status\"", "]", "=", "message", "[", "\"bs\"", "]", "if", "\"t\"", "in", "message", ":", "if", "message", "[", "\"t\"", "]", "in", "(", "\"c\"", ",", "\"u\"", ")", ":", "kwargs", "[", "\"source_type\"", "]", "=", "SOURCE_TYPE_GPS", "if", "message", "[", "\"t\"", "]", "==", "\"b\"", ":", "kwargs", "[", "\"source_type\"", "]", "=", "SOURCE_TYPE_BLUETOOTH_LE", "return", "dev_id", ",", "kwargs" ]
[ 57, 0 ]
[ 90, 25 ]
python
en
['en', 'en', 'en']
True
_set_gps_from_zone
(kwargs, location, zone)
Set the see parameters from the zone parameters. Async friendly.
Set the see parameters from the zone parameters.
def _set_gps_from_zone(kwargs, location, zone): """Set the see parameters from the zone parameters. Async friendly. """ if zone is not None: kwargs["gps"] = ( zone.attributes[ATTR_LATITUDE], zone.attributes[ATTR_LONGITUDE], ) kwargs["gps_accuracy"] = zone.attributes["radius"] kwargs["location_name"] = location return kwargs
[ "def", "_set_gps_from_zone", "(", "kwargs", ",", "location", ",", "zone", ")", ":", "if", "zone", "is", "not", "None", ":", "kwargs", "[", "\"gps\"", "]", "=", "(", "zone", ".", "attributes", "[", "ATTR_LATITUDE", "]", ",", "zone", ".", "attributes", "[", "ATTR_LONGITUDE", "]", ",", ")", "kwargs", "[", "\"gps_accuracy\"", "]", "=", "zone", ".", "attributes", "[", "\"radius\"", "]", "kwargs", "[", "\"location_name\"", "]", "=", "location", "return", "kwargs" ]
[ 93, 0 ]
[ 105, 17 ]
python
en
['en', 'en', 'en']
True
_decrypt_payload
(secret, topic, ciphertext)
Decrypt encrypted payload.
Decrypt encrypted payload.
def _decrypt_payload(secret, topic, ciphertext): """Decrypt encrypted payload.""" try: if supports_encryption(): keylen, decrypt = get_cipher() else: _LOGGER.warning("Ignoring encrypted payload because nacl not installed") return None except OSError: _LOGGER.warning("Ignoring encrypted payload because nacl not installed") return None if isinstance(secret, dict): key = secret.get(topic) else: key = secret if key is None: _LOGGER.warning( "Ignoring encrypted payload because no decryption key known for topic %s", topic, ) return None key = key.encode("utf-8") key = key[:keylen] key = key.ljust(keylen, b"\0") try: message = decrypt(ciphertext, key) message = message.decode("utf-8") _LOGGER.debug("Decrypted payload: %s", message) return message except ValueError: _LOGGER.warning( "Ignoring encrypted payload because unable to decrypt using key for topic %s", topic, ) return None
[ "def", "_decrypt_payload", "(", "secret", ",", "topic", ",", "ciphertext", ")", ":", "try", ":", "if", "supports_encryption", "(", ")", ":", "keylen", ",", "decrypt", "=", "get_cipher", "(", ")", "else", ":", "_LOGGER", ".", "warning", "(", "\"Ignoring encrypted payload because nacl not installed\"", ")", "return", "None", "except", "OSError", ":", "_LOGGER", ".", "warning", "(", "\"Ignoring encrypted payload because nacl not installed\"", ")", "return", "None", "if", "isinstance", "(", "secret", ",", "dict", ")", ":", "key", "=", "secret", ".", "get", "(", "topic", ")", "else", ":", "key", "=", "secret", "if", "key", "is", "None", ":", "_LOGGER", ".", "warning", "(", "\"Ignoring encrypted payload because no decryption key known for topic %s\"", ",", "topic", ",", ")", "return", "None", "key", "=", "key", ".", "encode", "(", "\"utf-8\"", ")", "key", "=", "key", "[", ":", "keylen", "]", "key", "=", "key", ".", "ljust", "(", "keylen", ",", "b\"\\0\"", ")", "try", ":", "message", "=", "decrypt", "(", "ciphertext", ",", "key", ")", "message", "=", "message", ".", "decode", "(", "\"utf-8\"", ")", "_LOGGER", ".", "debug", "(", "\"Decrypted payload: %s\"", ",", "message", ")", "return", "message", "except", "ValueError", ":", "_LOGGER", ".", "warning", "(", "\"Ignoring encrypted payload because unable to decrypt using key for topic %s\"", ",", "topic", ",", ")", "return", "None" ]
[ 108, 0 ]
[ 146, 19 ]
python
en
['fr', 'en', 'en']
True
encrypt_message
(secret, topic, message)
Encrypt message.
Encrypt message.
def encrypt_message(secret, topic, message): """Encrypt message.""" keylen = SecretBox.KEY_SIZE if isinstance(secret, dict): key = secret.get(topic) else: key = secret if key is None: _LOGGER.warning( "Unable to encrypt payload because no decryption key known " "for topic %s", topic, ) return None key = key.encode("utf-8") key = key[:keylen] key = key.ljust(keylen, b"\0") try: message = message.encode("utf-8") payload = SecretBox(key).encrypt(message, encoder=Base64Encoder) _LOGGER.debug("Encrypted message: %s to %s", message, payload) return payload.decode("utf-8") except ValueError: _LOGGER.warning("Unable to encrypt message for topic %s", topic) return None
[ "def", "encrypt_message", "(", "secret", ",", "topic", ",", "message", ")", ":", "keylen", "=", "SecretBox", ".", "KEY_SIZE", "if", "isinstance", "(", "secret", ",", "dict", ")", ":", "key", "=", "secret", ".", "get", "(", "topic", ")", "else", ":", "key", "=", "secret", "if", "key", "is", "None", ":", "_LOGGER", ".", "warning", "(", "\"Unable to encrypt payload because no decryption key known \"", "\"for topic %s\"", ",", "topic", ",", ")", "return", "None", "key", "=", "key", ".", "encode", "(", "\"utf-8\"", ")", "key", "=", "key", "[", ":", "keylen", "]", "key", "=", "key", ".", "ljust", "(", "keylen", ",", "b\"\\0\"", ")", "try", ":", "message", "=", "message", ".", "encode", "(", "\"utf-8\"", ")", "payload", "=", "SecretBox", "(", "key", ")", ".", "encrypt", "(", "message", ",", "encoder", "=", "Base64Encoder", ")", "_LOGGER", ".", "debug", "(", "\"Encrypted message: %s to %s\"", ",", "message", ",", "payload", ")", "return", "payload", ".", "decode", "(", "\"utf-8\"", ")", "except", "ValueError", ":", "_LOGGER", ".", "warning", "(", "\"Unable to encrypt message for topic %s\"", ",", "topic", ")", "return", "None" ]
[ 149, 0 ]
[ 177, 19 ]
python
en
['en', 'el-Latn', 'en']
False
async_handle_location_message
(hass, context, message)
Handle a location message.
Handle a location message.
async def async_handle_location_message(hass, context, message): """Handle a location message.""" if not context.async_valid_accuracy(message): return if context.events_only: _LOGGER.debug("Location update ignored due to events_only setting") return dev_id, kwargs = _parse_see_args(message, context.mqtt_topic) if context.regions_entered[dev_id]: _LOGGER.debug( "Location update ignored, inside region %s", context.regions_entered[-1] ) return context.async_see(**kwargs) context.async_see_beacons(hass, dev_id, kwargs)
[ "async", "def", "async_handle_location_message", "(", "hass", ",", "context", ",", "message", ")", ":", "if", "not", "context", ".", "async_valid_accuracy", "(", "message", ")", ":", "return", "if", "context", ".", "events_only", ":", "_LOGGER", ".", "debug", "(", "\"Location update ignored due to events_only setting\"", ")", "return", "dev_id", ",", "kwargs", "=", "_parse_see_args", "(", "message", ",", "context", ".", "mqtt_topic", ")", "if", "context", ".", "regions_entered", "[", "dev_id", "]", ":", "_LOGGER", ".", "debug", "(", "\"Location update ignored, inside region %s\"", ",", "context", ".", "regions_entered", "[", "-", "1", "]", ")", "return", "context", ".", "async_see", "(", "*", "*", "kwargs", ")", "context", ".", "async_see_beacons", "(", "hass", ",", "dev_id", ",", "kwargs", ")" ]
[ 181, 0 ]
[ 199, 51 ]
python
en
['es', 'en', 'en']
True
_async_transition_message_enter
(hass, context, message, location)
Execute enter event.
Execute enter event.
async def _async_transition_message_enter(hass, context, message, location): """Execute enter event.""" zone = hass.states.get(f"zone.{slugify(location)}") dev_id, kwargs = _parse_see_args(message, context.mqtt_topic) if zone is None and message.get("t") == "b": # Not a HA zone, and a beacon so mobile beacon. # kwargs will contain the lat/lon of the beacon # which is not where the beacon actually is # and is probably set to 0/0 beacons = context.mobile_beacons_active[dev_id] if location not in beacons: beacons.add(location) _LOGGER.info("Added beacon %s", location) context.async_see_beacons(hass, dev_id, kwargs) else: # Normal region regions = context.regions_entered[dev_id] if location not in regions: regions.append(location) _LOGGER.info("Enter region %s", location) _set_gps_from_zone(kwargs, location, zone) context.async_see(**kwargs) context.async_see_beacons(hass, dev_id, kwargs)
[ "async", "def", "_async_transition_message_enter", "(", "hass", ",", "context", ",", "message", ",", "location", ")", ":", "zone", "=", "hass", ".", "states", ".", "get", "(", "f\"zone.{slugify(location)}\"", ")", "dev_id", ",", "kwargs", "=", "_parse_see_args", "(", "message", ",", "context", ".", "mqtt_topic", ")", "if", "zone", "is", "None", "and", "message", ".", "get", "(", "\"t\"", ")", "==", "\"b\"", ":", "# Not a HA zone, and a beacon so mobile beacon.", "# kwargs will contain the lat/lon of the beacon", "# which is not where the beacon actually is", "# and is probably set to 0/0", "beacons", "=", "context", ".", "mobile_beacons_active", "[", "dev_id", "]", "if", "location", "not", "in", "beacons", ":", "beacons", ".", "add", "(", "location", ")", "_LOGGER", ".", "info", "(", "\"Added beacon %s\"", ",", "location", ")", "context", ".", "async_see_beacons", "(", "hass", ",", "dev_id", ",", "kwargs", ")", "else", ":", "# Normal region", "regions", "=", "context", ".", "regions_entered", "[", "dev_id", "]", "if", "location", "not", "in", "regions", ":", "regions", ".", "append", "(", "location", ")", "_LOGGER", ".", "info", "(", "\"Enter region %s\"", ",", "location", ")", "_set_gps_from_zone", "(", "kwargs", ",", "location", ",", "zone", ")", "context", ".", "async_see", "(", "*", "*", "kwargs", ")", "context", ".", "async_see_beacons", "(", "hass", ",", "dev_id", ",", "kwargs", ")" ]
[ 202, 0 ]
[ 225, 55 ]
python
en
['fr', 'gl', 'en']
False
_async_transition_message_leave
(hass, context, message, location)
Execute leave event.
Execute leave event.
async def _async_transition_message_leave(hass, context, message, location): """Execute leave event.""" dev_id, kwargs = _parse_see_args(message, context.mqtt_topic) regions = context.regions_entered[dev_id] if location in regions: regions.remove(location) beacons = context.mobile_beacons_active[dev_id] if location in beacons: beacons.remove(location) _LOGGER.info("Remove beacon %s", location) context.async_see_beacons(hass, dev_id, kwargs) else: new_region = regions[-1] if regions else None if new_region: # Exit to previous region zone = hass.states.get(f"zone.{slugify(new_region)}") _set_gps_from_zone(kwargs, new_region, zone) _LOGGER.info("Exit to %s", new_region) context.async_see(**kwargs) context.async_see_beacons(hass, dev_id, kwargs) return _LOGGER.info("Exit to GPS") # Check for GPS accuracy if context.async_valid_accuracy(message): context.async_see(**kwargs) context.async_see_beacons(hass, dev_id, kwargs)
[ "async", "def", "_async_transition_message_leave", "(", "hass", ",", "context", ",", "message", ",", "location", ")", ":", "dev_id", ",", "kwargs", "=", "_parse_see_args", "(", "message", ",", "context", ".", "mqtt_topic", ")", "regions", "=", "context", ".", "regions_entered", "[", "dev_id", "]", "if", "location", "in", "regions", ":", "regions", ".", "remove", "(", "location", ")", "beacons", "=", "context", ".", "mobile_beacons_active", "[", "dev_id", "]", "if", "location", "in", "beacons", ":", "beacons", ".", "remove", "(", "location", ")", "_LOGGER", ".", "info", "(", "\"Remove beacon %s\"", ",", "location", ")", "context", ".", "async_see_beacons", "(", "hass", ",", "dev_id", ",", "kwargs", ")", "else", ":", "new_region", "=", "regions", "[", "-", "1", "]", "if", "regions", "else", "None", "if", "new_region", ":", "# Exit to previous region", "zone", "=", "hass", ".", "states", ".", "get", "(", "f\"zone.{slugify(new_region)}\"", ")", "_set_gps_from_zone", "(", "kwargs", ",", "new_region", ",", "zone", ")", "_LOGGER", ".", "info", "(", "\"Exit to %s\"", ",", "new_region", ")", "context", ".", "async_see", "(", "*", "*", "kwargs", ")", "context", ".", "async_see_beacons", "(", "hass", ",", "dev_id", ",", "kwargs", ")", "return", "_LOGGER", ".", "info", "(", "\"Exit to GPS\"", ")", "# Check for GPS accuracy", "if", "context", ".", "async_valid_accuracy", "(", "message", ")", ":", "context", ".", "async_see", "(", "*", "*", "kwargs", ")", "context", ".", "async_see_beacons", "(", "hass", ",", "dev_id", ",", "kwargs", ")" ]
[ 228, 0 ]
[ 257, 59 ]
python
en
['en', 'en', 'en']
True
async_handle_transition_message
(hass, context, message)
Handle a transition message.
Handle a transition message.
async def async_handle_transition_message(hass, context, message): """Handle a transition message.""" if message.get("desc") is None: _LOGGER.error( "Location missing from `Entering/Leaving` message - " "please turn `Share` on in OwnTracks app" ) return # OwnTracks uses - at the start of a beacon zone # to switch on 'hold mode' - ignore this location = message["desc"].lstrip("-") # Create a layer of indirection for Owntracks instances that may name # regions differently than their HA names if location in context.region_mapping: location = context.region_mapping[location] if location.lower() == "home": location = STATE_HOME if message["event"] == "enter": await _async_transition_message_enter(hass, context, message, location) elif message["event"] == "leave": await _async_transition_message_leave(hass, context, message, location) else: _LOGGER.error( "Misformatted mqtt msgs, _type=transition, event=%s", message["event"] )
[ "async", "def", "async_handle_transition_message", "(", "hass", ",", "context", ",", "message", ")", ":", "if", "message", ".", "get", "(", "\"desc\"", ")", "is", "None", ":", "_LOGGER", ".", "error", "(", "\"Location missing from `Entering/Leaving` message - \"", "\"please turn `Share` on in OwnTracks app\"", ")", "return", "# OwnTracks uses - at the start of a beacon zone", "# to switch on 'hold mode' - ignore this", "location", "=", "message", "[", "\"desc\"", "]", ".", "lstrip", "(", "\"-\"", ")", "# Create a layer of indirection for Owntracks instances that may name", "# regions differently than their HA names", "if", "location", "in", "context", ".", "region_mapping", ":", "location", "=", "context", ".", "region_mapping", "[", "location", "]", "if", "location", ".", "lower", "(", ")", "==", "\"home\"", ":", "location", "=", "STATE_HOME", "if", "message", "[", "\"event\"", "]", "==", "\"enter\"", ":", "await", "_async_transition_message_enter", "(", "hass", ",", "context", ",", "message", ",", "location", ")", "elif", "message", "[", "\"event\"", "]", "==", "\"leave\"", ":", "await", "_async_transition_message_leave", "(", "hass", ",", "context", ",", "message", ",", "location", ")", "else", ":", "_LOGGER", ".", "error", "(", "\"Misformatted mqtt msgs, _type=transition, event=%s\"", ",", "message", "[", "\"event\"", "]", ")" ]
[ 261, 0 ]
[ 288, 9 ]
python
en
['en', 'en', 'en']
True
async_handle_waypoint
(hass, name_base, waypoint)
Handle a waypoint.
Handle a waypoint.
async def async_handle_waypoint(hass, name_base, waypoint): """Handle a waypoint.""" name = waypoint["desc"] pretty_name = f"{name_base} - {name}" lat = waypoint["lat"] lon = waypoint["lon"] rad = waypoint["rad"] # check zone exists entity_id = zone_comp.ENTITY_ID_FORMAT.format(slugify(pretty_name)) # Check if state already exists if hass.states.get(entity_id) is not None: return zone = zone_comp.Zone( { zone_comp.CONF_NAME: pretty_name, zone_comp.CONF_LATITUDE: lat, zone_comp.CONF_LONGITUDE: lon, zone_comp.CONF_RADIUS: rad, zone_comp.CONF_ICON: zone_comp.ICON_IMPORT, zone_comp.CONF_PASSIVE: False, }, False, ) zone.hass = hass zone.entity_id = entity_id zone.async_write_ha_state()
[ "async", "def", "async_handle_waypoint", "(", "hass", ",", "name_base", ",", "waypoint", ")", ":", "name", "=", "waypoint", "[", "\"desc\"", "]", "pretty_name", "=", "f\"{name_base} - {name}\"", "lat", "=", "waypoint", "[", "\"lat\"", "]", "lon", "=", "waypoint", "[", "\"lon\"", "]", "rad", "=", "waypoint", "[", "\"rad\"", "]", "# check zone exists", "entity_id", "=", "zone_comp", ".", "ENTITY_ID_FORMAT", ".", "format", "(", "slugify", "(", "pretty_name", ")", ")", "# Check if state already exists", "if", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "is", "not", "None", ":", "return", "zone", "=", "zone_comp", ".", "Zone", "(", "{", "zone_comp", ".", "CONF_NAME", ":", "pretty_name", ",", "zone_comp", ".", "CONF_LATITUDE", ":", "lat", ",", "zone_comp", ".", "CONF_LONGITUDE", ":", "lon", ",", "zone_comp", ".", "CONF_RADIUS", ":", "rad", ",", "zone_comp", ".", "CONF_ICON", ":", "zone_comp", ".", "ICON_IMPORT", ",", "zone_comp", ".", "CONF_PASSIVE", ":", "False", ",", "}", ",", "False", ",", ")", "zone", ".", "hass", "=", "hass", "zone", ".", "entity_id", "=", "entity_id", "zone", ".", "async_write_ha_state", "(", ")" ]
[ 291, 0 ]
[ 319, 31 ]
python
en
['en', 'en', 'en']
True
async_handle_waypoints_message
(hass, context, message)
Handle a waypoints message.
Handle a waypoints message.
async def async_handle_waypoints_message(hass, context, message): """Handle a waypoints message.""" if not context.import_waypoints: return if context.waypoint_whitelist is not None: user = _parse_topic(message["topic"], context.mqtt_topic)[0] if user not in context.waypoint_whitelist: return if "waypoints" in message: wayps = message["waypoints"] else: wayps = [message] _LOGGER.info("Got %d waypoints from %s", len(wayps), message["topic"]) name_base = " ".join(_parse_topic(message["topic"], context.mqtt_topic)) for wayp in wayps: await async_handle_waypoint(hass, name_base, wayp)
[ "async", "def", "async_handle_waypoints_message", "(", "hass", ",", "context", ",", "message", ")", ":", "if", "not", "context", ".", "import_waypoints", ":", "return", "if", "context", ".", "waypoint_whitelist", "is", "not", "None", ":", "user", "=", "_parse_topic", "(", "message", "[", "\"topic\"", "]", ",", "context", ".", "mqtt_topic", ")", "[", "0", "]", "if", "user", "not", "in", "context", ".", "waypoint_whitelist", ":", "return", "if", "\"waypoints\"", "in", "message", ":", "wayps", "=", "message", "[", "\"waypoints\"", "]", "else", ":", "wayps", "=", "[", "message", "]", "_LOGGER", ".", "info", "(", "\"Got %d waypoints from %s\"", ",", "len", "(", "wayps", ")", ",", "message", "[", "\"topic\"", "]", ")", "name_base", "=", "\" \"", ".", "join", "(", "_parse_topic", "(", "message", "[", "\"topic\"", "]", ",", "context", ".", "mqtt_topic", ")", ")", "for", "wayp", "in", "wayps", ":", "await", "async_handle_waypoint", "(", "hass", ",", "name_base", ",", "wayp", ")" ]
[ 324, 0 ]
[ 345, 58 ]
python
en
['en', 'en', 'en']
True
async_handle_encrypted_message
(hass, context, message)
Handle an encrypted message.
Handle an encrypted message.
async def async_handle_encrypted_message(hass, context, message): """Handle an encrypted message.""" if "topic" not in message and isinstance(context.secret, dict): _LOGGER.error("You cannot set per topic secrets when using HTTP") return plaintext_payload = _decrypt_payload( context.secret, message.get("topic"), message["data"] ) if plaintext_payload is None: return decrypted = json.loads(plaintext_payload) if "topic" in message and "topic" not in decrypted: decrypted["topic"] = message["topic"] await async_handle_message(hass, context, decrypted)
[ "async", "def", "async_handle_encrypted_message", "(", "hass", ",", "context", ",", "message", ")", ":", "if", "\"topic\"", "not", "in", "message", "and", "isinstance", "(", "context", ".", "secret", ",", "dict", ")", ":", "_LOGGER", ".", "error", "(", "\"You cannot set per topic secrets when using HTTP\"", ")", "return", "plaintext_payload", "=", "_decrypt_payload", "(", "context", ".", "secret", ",", "message", ".", "get", "(", "\"topic\"", ")", ",", "message", "[", "\"data\"", "]", ")", "if", "plaintext_payload", "is", "None", ":", "return", "decrypted", "=", "json", ".", "loads", "(", "plaintext_payload", ")", "if", "\"topic\"", "in", "message", "and", "\"topic\"", "not", "in", "decrypted", ":", "decrypted", "[", "\"topic\"", "]", "=", "message", "[", "\"topic\"", "]", "await", "async_handle_message", "(", "hass", ",", "context", ",", "decrypted", ")" ]
[ 349, 0 ]
[ 366, 56 ]
python
en
['br', 'en', 'en']
True
async_handle_not_impl_msg
(hass, context, message)
Handle valid but not implemented message types.
Handle valid but not implemented message types.
async def async_handle_not_impl_msg(hass, context, message): """Handle valid but not implemented message types.""" _LOGGER.debug("Not handling %s message: %s", message.get("_type"), message)
[ "async", "def", "async_handle_not_impl_msg", "(", "hass", ",", "context", ",", "message", ")", ":", "_LOGGER", ".", "debug", "(", "\"Not handling %s message: %s\"", ",", "message", ".", "get", "(", "\"_type\"", ")", ",", "message", ")" ]
[ 375, 0 ]
[ 377, 79 ]
python
en
['en', 'en', 'en']
True
async_handle_unsupported_msg
(hass, context, message)
Handle an unsupported or invalid message type.
Handle an unsupported or invalid message type.
async def async_handle_unsupported_msg(hass, context, message): """Handle an unsupported or invalid message type.""" _LOGGER.warning("Received unsupported message type: %s", message.get("_type"))
[ "async", "def", "async_handle_unsupported_msg", "(", "hass", ",", "context", ",", "message", ")", ":", "_LOGGER", ".", "warning", "(", "\"Received unsupported message type: %s\"", ",", "message", ".", "get", "(", "\"_type\"", ")", ")" ]
[ 380, 0 ]
[ 382, 82 ]
python
en
['en', 'en', 'en']
True
async_handle_message
(hass, context, message)
Handle an OwnTracks message.
Handle an OwnTracks message.
async def async_handle_message(hass, context, message): """Handle an OwnTracks message.""" msgtype = message.get("_type") _LOGGER.debug("Received %s", message) handler = HANDLERS.get(msgtype, async_handle_unsupported_msg) await handler(hass, context, message)
[ "async", "def", "async_handle_message", "(", "hass", ",", "context", ",", "message", ")", ":", "msgtype", "=", "message", ".", "get", "(", "\"_type\"", ")", "_LOGGER", ".", "debug", "(", "\"Received %s\"", ",", "message", ")", "handler", "=", "HANDLERS", ".", "get", "(", "msgtype", ",", "async_handle_unsupported_msg", ")", "await", "handler", "(", "hass", ",", "context", ",", "message", ")" ]
[ 385, 0 ]
[ 393, 41 ]
python
en
['en', 'en', 'en']
True
test_async_setup_entry
(hass)
Test the sensor.
Test the sensor.
async def test_async_setup_entry(hass): """Test the sensor.""" fake_async_add_entities = MagicMock() fake_srp_energy_client = MagicMock() fake_srp_energy_client.usage.return_value = [{1, 2, 3, 1.999, 4}] fake_config = MagicMock( data={ "name": "SRP Energy", "is_tou": False, "id": "0123456789", "username": "[email protected]", "password": "mypassword", } ) hass.data[SRP_ENERGY_DOMAIN] = fake_srp_energy_client await async_setup_entry(hass, fake_config, fake_async_add_entities)
[ "async", "def", "test_async_setup_entry", "(", "hass", ")", ":", "fake_async_add_entities", "=", "MagicMock", "(", ")", "fake_srp_energy_client", "=", "MagicMock", "(", ")", "fake_srp_energy_client", ".", "usage", ".", "return_value", "=", "[", "{", "1", ",", "2", ",", "3", ",", "1.999", ",", "4", "}", "]", "fake_config", "=", "MagicMock", "(", "data", "=", "{", "\"name\"", ":", "\"SRP Energy\"", ",", "\"is_tou\"", ":", "False", ",", "\"id\"", ":", "\"0123456789\"", ",", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"mypassword\"", ",", "}", ")", "hass", ".", "data", "[", "SRP_ENERGY_DOMAIN", "]", "=", "fake_srp_energy_client", "await", "async_setup_entry", "(", "hass", ",", "fake_config", ",", "fake_async_add_entities", ")" ]
[ 15, 0 ]
[ 31, 71 ]
python
en
['en', 'sq', 'en']
True
test_async_setup_entry_timeout_error
(hass)
Test fetching usage data. Failed the first time because was too get response.
Test fetching usage data. Failed the first time because was too get response.
async def test_async_setup_entry_timeout_error(hass): """Test fetching usage data. Failed the first time because was too get response.""" fake_async_add_entities = MagicMock() fake_srp_energy_client = MagicMock() fake_srp_energy_client.usage.return_value = [{1, 2, 3, 1.999, 4}] fake_config = MagicMock( data={ "name": "SRP Energy", "is_tou": False, "id": "0123456789", "username": "[email protected]", "password": "mypassword", } ) hass.data[SRP_ENERGY_DOMAIN] = fake_srp_energy_client fake_srp_energy_client.usage.side_effect = TimeoutError() await async_setup_entry(hass, fake_config, fake_async_add_entities) assert not fake_async_add_entities.call_args[0][0][ 0 ].coordinator.last_update_success
[ "async", "def", "test_async_setup_entry_timeout_error", "(", "hass", ")", ":", "fake_async_add_entities", "=", "MagicMock", "(", ")", "fake_srp_energy_client", "=", "MagicMock", "(", ")", "fake_srp_energy_client", ".", "usage", ".", "return_value", "=", "[", "{", "1", ",", "2", ",", "3", ",", "1.999", ",", "4", "}", "]", "fake_config", "=", "MagicMock", "(", "data", "=", "{", "\"name\"", ":", "\"SRP Energy\"", ",", "\"is_tou\"", ":", "False", ",", "\"id\"", ":", "\"0123456789\"", ",", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"mypassword\"", ",", "}", ")", "hass", ".", "data", "[", "SRP_ENERGY_DOMAIN", "]", "=", "fake_srp_energy_client", "fake_srp_energy_client", ".", "usage", ".", "side_effect", "=", "TimeoutError", "(", ")", "await", "async_setup_entry", "(", "hass", ",", "fake_config", ",", "fake_async_add_entities", ")", "assert", "not", "fake_async_add_entities", ".", "call_args", "[", "0", "]", "[", "0", "]", "[", "0", "]", ".", "coordinator", ".", "last_update_success" ]
[ 34, 0 ]
[ 54, 37 ]
python
en
['en', 'en', 'en']
True
test_async_setup_entry_connect_error
(hass)
Test fetching usage data. Failed the first time because was too get response.
Test fetching usage data. Failed the first time because was too get response.
async def test_async_setup_entry_connect_error(hass): """Test fetching usage data. Failed the first time because was too get response.""" fake_async_add_entities = MagicMock() fake_srp_energy_client = MagicMock() fake_srp_energy_client.usage.return_value = [{1, 2, 3, 1.999, 4}] fake_config = MagicMock( data={ "name": "SRP Energy", "is_tou": False, "id": "0123456789", "username": "[email protected]", "password": "mypassword", } ) hass.data[SRP_ENERGY_DOMAIN] = fake_srp_energy_client fake_srp_energy_client.usage.side_effect = ValueError() await async_setup_entry(hass, fake_config, fake_async_add_entities) assert not fake_async_add_entities.call_args[0][0][ 0 ].coordinator.last_update_success
[ "async", "def", "test_async_setup_entry_connect_error", "(", "hass", ")", ":", "fake_async_add_entities", "=", "MagicMock", "(", ")", "fake_srp_energy_client", "=", "MagicMock", "(", ")", "fake_srp_energy_client", ".", "usage", ".", "return_value", "=", "[", "{", "1", ",", "2", ",", "3", ",", "1.999", ",", "4", "}", "]", "fake_config", "=", "MagicMock", "(", "data", "=", "{", "\"name\"", ":", "\"SRP Energy\"", ",", "\"is_tou\"", ":", "False", ",", "\"id\"", ":", "\"0123456789\"", ",", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"mypassword\"", ",", "}", ")", "hass", ".", "data", "[", "SRP_ENERGY_DOMAIN", "]", "=", "fake_srp_energy_client", "fake_srp_energy_client", ".", "usage", ".", "side_effect", "=", "ValueError", "(", ")", "await", "async_setup_entry", "(", "hass", ",", "fake_config", ",", "fake_async_add_entities", ")", "assert", "not", "fake_async_add_entities", ".", "call_args", "[", "0", "]", "[", "0", "]", "[", "0", "]", ".", "coordinator", ".", "last_update_success" ]
[ 57, 0 ]
[ 77, 37 ]
python
en
['en', 'en', 'en']
True
test_srp_entity
(hass)
Test the SrpEntity.
Test the SrpEntity.
async def test_srp_entity(hass): """Test the SrpEntity.""" fake_coordinator = MagicMock(data=1.99999999999) srp_entity = SrpEntity(fake_coordinator) assert srp_entity is not None assert srp_entity.name == f"{DEFAULT_NAME} {SENSOR_NAME}" assert srp_entity.unique_id == SENSOR_TYPE assert srp_entity.state is None assert srp_entity.unit_of_measurement == ENERGY_KILO_WATT_HOUR assert srp_entity.icon == ICON assert srp_entity.usage == "2.00" assert srp_entity.should_poll is False assert srp_entity.device_state_attributes[ATTR_ATTRIBUTION] == ATTRIBUTION assert srp_entity.available is not None await srp_entity.async_added_to_hass() assert srp_entity.state is not None assert fake_coordinator.async_add_listener.called assert not fake_coordinator.async_add_listener.data.called
[ "async", "def", "test_srp_entity", "(", "hass", ")", ":", "fake_coordinator", "=", "MagicMock", "(", "data", "=", "1.99999999999", ")", "srp_entity", "=", "SrpEntity", "(", "fake_coordinator", ")", "assert", "srp_entity", "is", "not", "None", "assert", "srp_entity", ".", "name", "==", "f\"{DEFAULT_NAME} {SENSOR_NAME}\"", "assert", "srp_entity", ".", "unique_id", "==", "SENSOR_TYPE", "assert", "srp_entity", ".", "state", "is", "None", "assert", "srp_entity", ".", "unit_of_measurement", "==", "ENERGY_KILO_WATT_HOUR", "assert", "srp_entity", ".", "icon", "==", "ICON", "assert", "srp_entity", ".", "usage", "==", "\"2.00\"", "assert", "srp_entity", ".", "should_poll", "is", "False", "assert", "srp_entity", ".", "device_state_attributes", "[", "ATTR_ATTRIBUTION", "]", "==", "ATTRIBUTION", "assert", "srp_entity", ".", "available", "is", "not", "None", "await", "srp_entity", ".", "async_added_to_hass", "(", ")", "assert", "srp_entity", ".", "state", "is", "not", "None", "assert", "fake_coordinator", ".", "async_add_listener", ".", "called", "assert", "not", "fake_coordinator", ".", "async_add_listener", ".", "data", ".", "called" ]
[ 80, 0 ]
[ 99, 62 ]
python
en
['en', 'en', 'en']
True
test_srp_entity_no_data
(hass)
Test the SrpEntity.
Test the SrpEntity.
async def test_srp_entity_no_data(hass): """Test the SrpEntity.""" fake_coordinator = MagicMock(data=False) srp_entity = SrpEntity(fake_coordinator) assert srp_entity.device_state_attributes is None
[ "async", "def", "test_srp_entity_no_data", "(", "hass", ")", ":", "fake_coordinator", "=", "MagicMock", "(", "data", "=", "False", ")", "srp_entity", "=", "SrpEntity", "(", "fake_coordinator", ")", "assert", "srp_entity", ".", "device_state_attributes", "is", "None" ]
[ 102, 0 ]
[ 106, 53 ]
python
en
['en', 'en', 'en']
True
test_srp_entity_no_coord_data
(hass)
Test the SrpEntity.
Test the SrpEntity.
async def test_srp_entity_no_coord_data(hass): """Test the SrpEntity.""" fake_coordinator = MagicMock(data=False) srp_entity = SrpEntity(fake_coordinator) assert srp_entity.usage is None
[ "async", "def", "test_srp_entity_no_coord_data", "(", "hass", ")", ":", "fake_coordinator", "=", "MagicMock", "(", "data", "=", "False", ")", "srp_entity", "=", "SrpEntity", "(", "fake_coordinator", ")", "assert", "srp_entity", ".", "usage", "is", "None" ]
[ 109, 0 ]
[ 114, 35 ]
python
en
['en', 'en', 'en']
True
test_srp_entity_async_update
(hass)
Test the SrpEntity.
Test the SrpEntity.
async def test_srp_entity_async_update(hass): """Test the SrpEntity.""" async def async_magic(): pass MagicMock.__await__ = lambda x: async_magic().__await__() fake_coordinator = MagicMock(data=False) srp_entity = SrpEntity(fake_coordinator) await srp_entity.async_update() assert fake_coordinator.async_request_refresh.called
[ "async", "def", "test_srp_entity_async_update", "(", "hass", ")", ":", "async", "def", "async_magic", "(", ")", ":", "pass", "MagicMock", ".", "__await__", "=", "lambda", "x", ":", "async_magic", "(", ")", ".", "__await__", "(", ")", "fake_coordinator", "=", "MagicMock", "(", "data", "=", "False", ")", "srp_entity", "=", "SrpEntity", "(", "fake_coordinator", ")", "await", "srp_entity", ".", "async_update", "(", ")", "assert", "fake_coordinator", ".", "async_request_refresh", ".", "called" ]
[ 117, 0 ]
[ 128, 56 ]
python
en
['en', 'en', 'en']
True
build_nn
()
Create a function that returns a compiled neural network :return: compiled Keras neural network model
Create a function that returns a compiled neural network :return: compiled Keras neural network model
def build_nn(): """ Create a function that returns a compiled neural network :return: compiled Keras neural network model """ nn = Sequential() nn.add(Dense(500, activation='relu', input_shape=(N_FEATURES,))) nn.add(Dense(150, activation='relu')) nn.add(Dense(N_CLASSES, activation='softmax')) nn.compile( loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'] ) return nn
[ "def", "build_nn", "(", ")", ":", "nn", "=", "Sequential", "(", ")", "nn", ".", "add", "(", "Dense", "(", "500", ",", "activation", "=", "'relu'", ",", "input_shape", "=", "(", "N_FEATURES", ",", ")", ")", ")", "nn", ".", "add", "(", "Dense", "(", "150", ",", "activation", "=", "'relu'", ")", ")", "nn", ".", "add", "(", "Dense", "(", "N_CLASSES", ",", "activation", "=", "'softmax'", ")", ")", "nn", ".", "compile", "(", "loss", "=", "'categorical_crossentropy'", ",", "optimizer", "=", "'adam'", ",", "metrics", "=", "[", "'accuracy'", "]", ")", "return", "nn" ]
[ 40, 0 ]
[ 54, 13 ]
python
en
['en', 'error', 'th']
False
train_model
(path, model, saveto=None, cv=12, **kwargs)
Trains model from corpus at specified path; fitting the model on the full data and writing it to disk at the saveto directory if specified. Returns the scores.
Trains model from corpus at specified path; fitting the model on the full data and writing it to disk at the saveto directory if specified. Returns the scores.
def train_model(path, model, saveto=None, cv=12, **kwargs): """ Trains model from corpus at specified path; fitting the model on the full data and writing it to disk at the saveto directory if specified. Returns the scores. """ # Load the corpus data and labels for classification # corpus = PickledReviewsReader(path) # for Pitchfork corpus = PickledAmazonReviewsReader(path) X = documents(corpus) # y = categorical(corpus) # for Pitchfork y = binarize(corpus) # Compute cross validation scores # mp note: http://scikit-learn.org/stable/faq.html#why-do-i-sometime-get-a-crash-freeze-with-n-jobs-1-under-osx-or-linux scores = cross_val_score(model, X, y, cv=cv, scoring='accuracy') # Fit the model on entire data set model.fit(X, y) # Write to disk if specified if saveto: # have to save the keras part using keras' save method model.steps[-1][1].model.save(saveto['keras_model']) model.steps.pop(-1) # ... and use joblib to save the rest of the pipeline joblib.dump(model, saveto['sklearn_pipe']) # Return scores as well as training time via decorator return scores
[ "def", "train_model", "(", "path", ",", "model", ",", "saveto", "=", "None", ",", "cv", "=", "12", ",", "*", "*", "kwargs", ")", ":", "# Load the corpus data and labels for classification", "# corpus = PickledReviewsReader(path) # for Pitchfork", "corpus", "=", "PickledAmazonReviewsReader", "(", "path", ")", "X", "=", "documents", "(", "corpus", ")", "# y = categorical(corpus) # for Pitchfork", "y", "=", "binarize", "(", "corpus", ")", "# Compute cross validation scores", "# mp note: http://scikit-learn.org/stable/faq.html#why-do-i-sometime-get-a-crash-freeze-with-n-jobs-1-under-osx-or-linux", "scores", "=", "cross_val_score", "(", "model", ",", "X", ",", "y", ",", "cv", "=", "cv", ",", "scoring", "=", "'accuracy'", ")", "# Fit the model on entire data set", "model", ".", "fit", "(", "X", ",", "y", ")", "# Write to disk if specified", "if", "saveto", ":", "# have to save the keras part using keras' save method", "model", ".", "steps", "[", "-", "1", "]", "[", "1", "]", ".", "model", ".", "save", "(", "saveto", "[", "'keras_model'", "]", ")", "model", ".", "steps", ".", "pop", "(", "-", "1", ")", "# ... and use joblib to save the rest of the pipeline", "joblib", ".", "dump", "(", "model", ",", "saveto", "[", "'sklearn_pipe'", "]", ")", "# Return scores as well as training time via decorator", "return", "scores" ]
[ 71, 0 ]
[ 101, 17 ]
python
en
['en', 'error', 'th']
False
gen_cim_frame
(port_num: int, vessel_num: int, stop_nums: tuple, snapshots_num: int)
Define and generate cim frame. Args: port_num (int): Number of ports. vessel_num (int): Number of vessels. stop_nums (tuple): Past stops number and future stop number.
Define and generate cim frame.
def gen_cim_frame(port_num: int, vessel_num: int, stop_nums: tuple, snapshots_num: int): """Define and generate cim frame. Args: port_num (int): Number of ports. vessel_num (int): Number of vessels. stop_nums (tuple): Past stops number and future stop number. """ vessel_cls = gen_vessel_definition(stop_nums) matrix_cls = gen_matrix(port_num, vessel_num) class CimFrame(FrameBase): """Our cim frame that contains vessels, ports, and a general matrix.""" vessels = FrameNode(vessel_cls, vessel_num) ports = FrameNode(Port, port_num) matrix = FrameNode(matrix_cls, 1) def __init__(self): super().__init__(enable_snapshot=True, total_snapshot=snapshots_num) return CimFrame()
[ "def", "gen_cim_frame", "(", "port_num", ":", "int", ",", "vessel_num", ":", "int", ",", "stop_nums", ":", "tuple", ",", "snapshots_num", ":", "int", ")", ":", "vessel_cls", "=", "gen_vessel_definition", "(", "stop_nums", ")", "matrix_cls", "=", "gen_matrix", "(", "port_num", ",", "vessel_num", ")", "class", "CimFrame", "(", "FrameBase", ")", ":", "\"\"\"Our cim frame that contains vessels, ports, and a general matrix.\"\"\"", "vessels", "=", "FrameNode", "(", "vessel_cls", ",", "vessel_num", ")", "ports", "=", "FrameNode", "(", "Port", ",", "port_num", ")", "matrix", "=", "FrameNode", "(", "matrix_cls", ",", "1", ")", "def", "__init__", "(", "self", ")", ":", "super", "(", ")", ".", "__init__", "(", "enable_snapshot", "=", "True", ",", "total_snapshot", "=", "snapshots_num", ")", "return", "CimFrame", "(", ")" ]
[ 10, 0 ]
[ 30, 21 ]
python
en
['en', 'co', 'en']
True
test_form
(hass)
Test we get the form.
Test we get the form.
async def test_form(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.juicenet.config_flow.Api.get_devices", return_value=MagicMock(), ), patch( "homeassistant.components.juicenet.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.juicenet.async_setup_entry", return_value=True ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"} ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "JuiceNet" assert result2["data"] == {CONF_ACCESS_TOKEN: "access_token"} assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"homeassistant.components.juicenet.config_flow.Api.get_devices\"", ",", "return_value", "=", "MagicMock", "(", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.juicenet.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.juicenet.async_setup_entry\"", ",", "return_value", "=", "True", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "\"JuiceNet\"", "assert", "result2", "[", "\"data\"", "]", "==", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 17, 0 ]
[ 43, 48 ]
python
en
['en', 'en', 'en']
True
test_form_invalid_auth
(hass)
Test we handle invalid auth.
Test we handle invalid auth.
async def test_form_invalid_auth(hass): """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.juicenet.config_flow.Api.get_devices", side_effect=TokenError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"} ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth"}
[ "async", "def", "test_form_invalid_auth", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.juicenet.config_flow.Api.get_devices\"", ",", "side_effect", "=", "TokenError", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"invalid_auth\"", "}" ]
[ 46, 0 ]
[ 61, 56 ]
python
en
['en', 'en', 'en']
True
test_form_cannot_connect
(hass)
Test we handle cannot connect error.
Test we handle cannot connect error.
async def test_form_cannot_connect(hass): """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.juicenet.config_flow.Api.get_devices", side_effect=aiohttp.ClientError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"} ) assert result2["type"] == "form" assert result2["errors"] == {"base": "cannot_connect"}
[ "async", "def", "test_form_cannot_connect", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.juicenet.config_flow.Api.get_devices\"", ",", "side_effect", "=", "aiohttp", ".", "ClientError", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"cannot_connect\"", "}" ]
[ 64, 0 ]
[ 79, 58 ]
python
en
['en', 'en', 'en']
True
test_form_catch_unknown_errors
(hass)
Test we handle cannot connect error.
Test we handle cannot connect error.
async def test_form_catch_unknown_errors(hass): """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.juicenet.config_flow.Api.get_devices", side_effect=Exception, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"} ) assert result2["type"] == "form" assert result2["errors"] == {"base": "unknown"}
[ "async", "def", "test_form_catch_unknown_errors", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.juicenet.config_flow.Api.get_devices\"", ",", "side_effect", "=", "Exception", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"unknown\"", "}" ]
[ 82, 0 ]
[ 97, 51 ]
python
en
['en', 'en', 'en']
True
test_import
(hass)
Test that import works as expected.
Test that import works as expected.
async def test_import(hass): """Test that import works as expected.""" with patch( "homeassistant.components.juicenet.config_flow.Api.get_devices", return_value=MagicMock(), ), patch( "homeassistant.components.juicenet.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.juicenet.async_setup_entry", return_value=True ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={CONF_ACCESS_TOKEN: "access_token"}, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "JuiceNet" assert result["data"] == {CONF_ACCESS_TOKEN: "access_token"} assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_import", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.juicenet.config_flow.Api.get_devices\"", ",", "return_value", "=", "MagicMock", "(", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.juicenet.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.juicenet.async_setup_entry\"", ",", "return_value", "=", "True", ")", "as", "mock_setup_entry", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_IMPORT", "}", ",", "data", "=", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result", "[", "\"title\"", "]", "==", "\"JuiceNet\"", "assert", "result", "[", "\"data\"", "]", "==", "{", "CONF_ACCESS_TOKEN", ":", "\"access_token\"", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 100, 0 ]
[ 122, 48 ]
python
en
['en', 'en', 'en']
True
AugustSubscriberMixin.__init__
(self, hass, update_interval)
Initialize an subscriber.
Initialize an subscriber.
def __init__(self, hass, update_interval): """Initialize an subscriber.""" super().__init__() self._hass = hass self._update_interval = update_interval self._subscriptions = {} self._unsub_interval = None
[ "def", "__init__", "(", "self", ",", "hass", ",", "update_interval", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "_hass", "=", "hass", "self", ".", "_update_interval", "=", "update_interval", "self", ".", "_subscriptions", "=", "{", "}", "self", ".", "_unsub_interval", "=", "None" ]
[ 10, 4 ]
[ 16, 35 ]
python
en
['en', 'en', 'en']
True
AugustSubscriberMixin.async_subscribe_device_id
(self, device_id, update_callback)
Add an callback subscriber. Returns a callable that can be used to unsubscribe.
Add an callback subscriber.
def async_subscribe_device_id(self, device_id, update_callback): """Add an callback subscriber. Returns a callable that can be used to unsubscribe. """ if not self._subscriptions: self._unsub_interval = async_track_time_interval( self._hass, self._async_refresh, self._update_interval ) self._subscriptions.setdefault(device_id, []).append(update_callback) def _unsubscribe(): self.async_unsubscribe_device_id(device_id, update_callback) return _unsubscribe
[ "def", "async_subscribe_device_id", "(", "self", ",", "device_id", ",", "update_callback", ")", ":", "if", "not", "self", ".", "_subscriptions", ":", "self", ".", "_unsub_interval", "=", "async_track_time_interval", "(", "self", ".", "_hass", ",", "self", ".", "_async_refresh", ",", "self", ".", "_update_interval", ")", "self", ".", "_subscriptions", ".", "setdefault", "(", "device_id", ",", "[", "]", ")", ".", "append", "(", "update_callback", ")", "def", "_unsubscribe", "(", ")", ":", "self", ".", "async_unsubscribe_device_id", "(", "device_id", ",", "update_callback", ")", "return", "_unsubscribe" ]
[ 19, 4 ]
[ 33, 27 ]
python
en
['en', 'en', 'en']
True
AugustSubscriberMixin.async_unsubscribe_device_id
(self, device_id, update_callback)
Remove a callback subscriber.
Remove a callback subscriber.
def async_unsubscribe_device_id(self, device_id, update_callback): """Remove a callback subscriber.""" self._subscriptions[device_id].remove(update_callback) if not self._subscriptions[device_id]: del self._subscriptions[device_id] if not self._subscriptions: self._unsub_interval() self._unsub_interval = None
[ "def", "async_unsubscribe_device_id", "(", "self", ",", "device_id", ",", "update_callback", ")", ":", "self", ".", "_subscriptions", "[", "device_id", "]", ".", "remove", "(", "update_callback", ")", "if", "not", "self", ".", "_subscriptions", "[", "device_id", "]", ":", "del", "self", ".", "_subscriptions", "[", "device_id", "]", "if", "not", "self", ".", "_subscriptions", ":", "self", ".", "_unsub_interval", "(", ")", "self", ".", "_unsub_interval", "=", "None" ]
[ 36, 4 ]
[ 43, 39 ]
python
en
['es', 'it', 'en']
False
AugustSubscriberMixin.async_signal_device_id_update
(self, device_id)
Call the callbacks for a device_id.
Call the callbacks for a device_id.
def async_signal_device_id_update(self, device_id): """Call the callbacks for a device_id.""" if not self._subscriptions.get(device_id): return for update_callback in self._subscriptions[device_id]: update_callback()
[ "def", "async_signal_device_id_update", "(", "self", ",", "device_id", ")", ":", "if", "not", "self", ".", "_subscriptions", ".", "get", "(", "device_id", ")", ":", "return", "for", "update_callback", "in", "self", ".", "_subscriptions", "[", "device_id", "]", ":", "update_callback", "(", ")" ]
[ 46, 4 ]
[ 52, 29 ]
python
en
['en', 'en', 'en']
True
mock_controller_client_single
()
Mock a successful client.
Mock a successful client.
def mock_controller_client_single(): """Mock a successful client.""" with patch( "homeassistant.components.meteo_france.config_flow.MeteoFranceClient", update=False, ) as service_mock: service_mock.return_value.search_places.return_value = [CITY_1] yield service_mock
[ "def", "mock_controller_client_single", "(", ")", ":", "with", "patch", "(", "\"homeassistant.components.meteo_france.config_flow.MeteoFranceClient\"", ",", "update", "=", "False", ",", ")", "as", "service_mock", ":", "service_mock", ".", "return_value", ".", "search_places", ".", "return_value", "=", "[", "CITY_1", "]", "yield", "service_mock" ]
[ 72, 0 ]
[ 79, 26 ]
python
en
['en', 'en', 'en']
True
mock_setup
()
Prevent setup.
Prevent setup.
def mock_setup(): """Prevent setup.""" with patch( "homeassistant.components.meteo_france.async_setup", return_value=True, ), patch( "homeassistant.components.meteo_france.async_setup_entry", return_value=True, ): yield
[ "def", "mock_setup", "(", ")", ":", "with", "patch", "(", "\"homeassistant.components.meteo_france.async_setup\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.meteo_france.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", ":", "yield" ]
[ 83, 0 ]
[ 92, 13 ]
python
en
['en', 'pt', 'en']
False
mock_controller_client_multiple
()
Mock a successful client.
Mock a successful client.
def mock_controller_client_multiple(): """Mock a successful client.""" with patch( "homeassistant.components.meteo_france.config_flow.MeteoFranceClient", update=False, ) as service_mock: service_mock.return_value.search_places.return_value = [CITY_2, CITY_3] yield service_mock
[ "def", "mock_controller_client_multiple", "(", ")", ":", "with", "patch", "(", "\"homeassistant.components.meteo_france.config_flow.MeteoFranceClient\"", ",", "update", "=", "False", ",", ")", "as", "service_mock", ":", "service_mock", ".", "return_value", ".", "search_places", ".", "return_value", "=", "[", "CITY_2", ",", "CITY_3", "]", "yield", "service_mock" ]
[ 96, 0 ]
[ 103, 26 ]
python
en
['en', 'en', 'en']
True
mock_controller_client_empty
()
Mock a successful client.
Mock a successful client.
def mock_controller_client_empty(): """Mock a successful client.""" with patch( "homeassistant.components.meteo_france.config_flow.MeteoFranceClient", update=False, ) as service_mock: service_mock.return_value.search_places.return_value = [] yield service_mock
[ "def", "mock_controller_client_empty", "(", ")", ":", "with", "patch", "(", "\"homeassistant.components.meteo_france.config_flow.MeteoFranceClient\"", ",", "update", "=", "False", ",", ")", "as", "service_mock", ":", "service_mock", ".", "return_value", ".", "search_places", ".", "return_value", "=", "[", "]", "yield", "service_mock" ]
[ 107, 0 ]
[ 114, 26 ]
python
en
['en', 'en', 'en']
True
test_user
(hass, client_single)
Test user config.
Test user config.
async def test_user(hass, client_single): """Test user config.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # test with all provided with search returning only 1 place result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={CONF_CITY: CITY_1_POSTAL}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["result"].unique_id == f"{CITY_1_LAT}, {CITY_1_LON}" assert result["title"] == f"{CITY_1}" assert result["data"][CONF_LATITUDE] == str(CITY_1_LAT) assert result["data"][CONF_LONGITUDE] == str(CITY_1_LON)
[ "async", "def", "test_user", "(", "hass", ",", "client_single", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"user\"", "# test with all provided with search returning only 1 place", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "CONF_CITY", ":", "CITY_1_POSTAL", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"result\"", "]", ".", "unique_id", "==", "f\"{CITY_1_LAT}, {CITY_1_LON}\"", "assert", "result", "[", "\"title\"", "]", "==", "f\"{CITY_1}\"", "assert", "result", "[", "\"data\"", "]", "[", "CONF_LATITUDE", "]", "==", "str", "(", "CITY_1_LAT", ")", "assert", "result", "[", "\"data\"", "]", "[", "CONF_LONGITUDE", "]", "==", "str", "(", "CITY_1_LON", ")" ]
[ 117, 0 ]
[ 135, 60 ]
python
en
['en', 'da', 'en']
True
test_user_list
(hass, client_multiple)
Test user config.
Test user config.
async def test_user_list(hass, client_multiple): """Test user config.""" # test with all provided with search returning more than 1 place result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={CONF_CITY: CITY_2_NAME}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "cities" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_CITY: f"{CITY_3};{CITY_3_LAT};{CITY_3_LON}"}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["result"].unique_id == f"{CITY_3_LAT}, {CITY_3_LON}" assert result["title"] == f"{CITY_3}" assert result["data"][CONF_LATITUDE] == str(CITY_3_LAT) assert result["data"][CONF_LONGITUDE] == str(CITY_3_LON)
[ "async", "def", "test_user_list", "(", "hass", ",", "client_multiple", ")", ":", "# test with all provided with search returning more than 1 place", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "CONF_CITY", ":", "CITY_2_NAME", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"cities\"", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "{", "CONF_CITY", ":", "f\"{CITY_3};{CITY_3_LAT};{CITY_3_LON}\"", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"result\"", "]", ".", "unique_id", "==", "f\"{CITY_3_LAT}, {CITY_3_LON}\"", "assert", "result", "[", "\"title\"", "]", "==", "f\"{CITY_3}\"", "assert", "result", "[", "\"data\"", "]", "[", "CONF_LATITUDE", "]", "==", "str", "(", "CITY_3_LAT", ")", "assert", "result", "[", "\"data\"", "]", "[", "CONF_LONGITUDE", "]", "==", "str", "(", "CITY_3_LON", ")" ]
[ 138, 0 ]
[ 158, 60 ]
python
en
['en', 'da', 'en']
True
test_import
(hass, client_multiple)
Test import step.
Test import step.
async def test_import(hass, client_multiple): """Test import step.""" # import with all result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_CITY: CITY_2_NAME}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["result"].unique_id == f"{CITY_2_LAT}, {CITY_2_LON}" assert result["title"] == f"{CITY_2}" assert result["data"][CONF_LATITUDE] == str(CITY_2_LAT) assert result["data"][CONF_LONGITUDE] == str(CITY_2_LON)
[ "async", "def", "test_import", "(", "hass", ",", "client_multiple", ")", ":", "# import with all", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_IMPORT", "}", ",", "data", "=", "{", "CONF_CITY", ":", "CITY_2_NAME", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"result\"", "]", ".", "unique_id", "==", "f\"{CITY_2_LAT}, {CITY_2_LON}\"", "assert", "result", "[", "\"title\"", "]", "==", "f\"{CITY_2}\"", "assert", "result", "[", "\"data\"", "]", "[", "CONF_LATITUDE", "]", "==", "str", "(", "CITY_2_LAT", ")", "assert", "result", "[", "\"data\"", "]", "[", "CONF_LONGITUDE", "]", "==", "str", "(", "CITY_2_LON", ")" ]
[ 161, 0 ]
[ 173, 60 ]
python
de
['de', 'sd', 'en']
False
test_search_failed
(hass, client_empty)
Test error displayed if no result in search.
Test error displayed if no result in search.
async def test_search_failed(hass, client_empty): """Test error displayed if no result in search.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={CONF_CITY: CITY_1_POSTAL}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_CITY: "empty"}
[ "async", "def", "test_search_failed", "(", "hass", ",", "client_empty", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "CONF_CITY", ":", "CITY_1_POSTAL", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "CONF_CITY", ":", "\"empty\"", "}" ]
[ 176, 0 ]
[ 185, 51 ]
python
en
['en', 'en', 'en']
True
test_abort_if_already_setup
(hass, client_single)
Test we abort if already setup.
Test we abort if already setup.
async def test_abort_if_already_setup(hass, client_single): """Test we abort if already setup.""" MockConfigEntry( domain=DOMAIN, data={CONF_LATITUDE: CITY_1_LAT, CONF_LONGITUDE: CITY_1_LON}, unique_id=f"{CITY_1_LAT}, {CITY_1_LON}", ).add_to_hass(hass) # Should fail, same CITY same postal code (import) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_CITY: CITY_1_POSTAL}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" # Should fail, same CITY same postal code (flow) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={CONF_CITY: CITY_1_POSTAL}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured"
[ "async", "def", "test_abort_if_already_setup", "(", "hass", ",", "client_single", ")", ":", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "{", "CONF_LATITUDE", ":", "CITY_1_LAT", ",", "CONF_LONGITUDE", ":", "CITY_1_LON", "}", ",", "unique_id", "=", "f\"{CITY_1_LAT}, {CITY_1_LON}\"", ",", ")", ".", "add_to_hass", "(", "hass", ")", "# Should fail, same CITY same postal code (import)", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_IMPORT", "}", ",", "data", "=", "{", "CONF_CITY", ":", "CITY_1_POSTAL", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"", "# Should fail, same CITY same postal code (flow)", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "CONF_CITY", ":", "CITY_1_POSTAL", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"" ]
[ 188, 0 ]
[ 212, 51 ]
python
en
['en', 'zu', 'en']
True
test_options_flow
(hass: HomeAssistantType)
Test config flow options.
Test config flow options.
async def test_options_flow(hass: HomeAssistantType): """Test config flow options.""" config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_LATITUDE: CITY_1_LAT, CONF_LONGITUDE: CITY_1_LON}, unique_id=f"{CITY_1_LAT}, {CITY_1_LON}", ) config_entry.add_to_hass(hass) assert config_entry.options == {} result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "init" # Default result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert config_entry.options[CONF_MODE] == FORECAST_MODE_DAILY # Manual result = await hass.config_entries.options.async_init(config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_MODE: FORECAST_MODE_HOURLY}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert config_entry.options[CONF_MODE] == FORECAST_MODE_HOURLY
[ "async", "def", "test_options_flow", "(", "hass", ":", "HomeAssistantType", ")", ":", "config_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "{", "CONF_LATITUDE", ":", "CITY_1_LAT", ",", "CONF_LONGITUDE", ":", "CITY_1_LON", "}", ",", "unique_id", "=", "f\"{CITY_1_LAT}, {CITY_1_LON}\"", ",", ")", "config_entry", ".", "add_to_hass", "(", "hass", ")", "assert", "config_entry", ".", "options", "==", "{", "}", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_init", "(", "config_entry", ".", "entry_id", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"init\"", "# Default", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "{", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "config_entry", ".", "options", "[", "CONF_MODE", "]", "==", "FORECAST_MODE_DAILY", "# Manual", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_init", "(", "config_entry", ".", "entry_id", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "{", "CONF_MODE", ":", "FORECAST_MODE_HOURLY", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "config_entry", ".", "options", "[", "CONF_MODE", "]", "==", "FORECAST_MODE_HOURLY" ]
[ 215, 0 ]
[ 245, 66 ]
python
en
['en', 'fr', 'en']
True
async_setup
(hass, config)
Initialize the Mythic Beasts component.
Initialize the Mythic Beasts component.
async def async_setup(hass, config): """Initialize the Mythic Beasts component.""" domain = config[DOMAIN][CONF_DOMAIN] password = config[DOMAIN][CONF_PASSWORD] host = config[DOMAIN][CONF_HOST] update_interval = config[DOMAIN][CONF_SCAN_INTERVAL] session = async_get_clientsession(hass) result = await mbddns.update(domain, password, host, session=session) if not result: return False async def update_domain_interval(now): """Update the DNS entry.""" await mbddns.update(domain, password, host, session=session) async_track_time_interval(hass, update_domain_interval, update_interval) return True
[ "async", "def", "async_setup", "(", "hass", ",", "config", ")", ":", "domain", "=", "config", "[", "DOMAIN", "]", "[", "CONF_DOMAIN", "]", "password", "=", "config", "[", "DOMAIN", "]", "[", "CONF_PASSWORD", "]", "host", "=", "config", "[", "DOMAIN", "]", "[", "CONF_HOST", "]", "update_interval", "=", "config", "[", "DOMAIN", "]", "[", "CONF_SCAN_INTERVAL", "]", "session", "=", "async_get_clientsession", "(", "hass", ")", "result", "=", "await", "mbddns", ".", "update", "(", "domain", ",", "password", ",", "host", ",", "session", "=", "session", ")", "if", "not", "result", ":", "return", "False", "async", "def", "update_domain_interval", "(", "now", ")", ":", "\"\"\"Update the DNS entry.\"\"\"", "await", "mbddns", ".", "update", "(", "domain", ",", "password", ",", "host", ",", "session", "=", "session", ")", "async_track_time_interval", "(", "hass", ",", "update_domain_interval", ",", "update_interval", ")", "return", "True" ]
[ 37, 0 ]
[ 57, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
( hass, config, async_add_entities_callback, discovery_info=None )
Find and return test switches.
Find and return test switches.
async def async_setup_platform( hass, config, async_add_entities_callback, discovery_info=None ): """Find and return test switches.""" pass
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities_callback", ",", "discovery_info", "=", "None", ")", ":", "pass" ]
[ 3, 0 ]
[ 7, 8 ]
python
en
['en', 'en', 'en']
True
ServiceHandler._set_headers
(self)
Setting up the header response upon connection establishment.
Setting up the header response upon connection establishment.
def _set_headers(self): '''Setting up the header response upon connection establishment. ''' self.send_response(200) self.send_header('Content-type','text/json') # reads the length of the Headers length = int(self.headers['Content-Length']) # reads the contents of the request content = self.rfile.read(length) data_strm = str(content).strip('b\'') self.end_headers() return data_strm
[ "def", "_set_headers", "(", "self", ")", ":", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "'Content-type'", ",", "'text/json'", ")", "# reads the length of the Headers", "length", "=", "int", "(", "self", ".", "headers", "[", "'Content-Length'", "]", ")", "# reads the contents of the request", "content", "=", "self", ".", "rfile", ".", "read", "(", "length", ")", "data_strm", "=", "str", "(", "content", ")", ".", "strip", "(", "'b\\''", ")", "self", ".", "end_headers", "(", ")", "return", "data_strm" ]
[ 19, 4 ]
[ 32, 24 ]
python
en
['en', 'en', 'en']
True
ServiceHandler.do_VIEW
(self)
Uses VIEW Method to show the value for a given Key from the cb-exporter's targets.json file. Sample Curl Query: curl -X VIEW --data "tild23.pld.ai" server-url:port-no
Uses VIEW Method to show the value for a given Key from the cb-exporter's targets.json file. Sample Curl Query: curl -X VIEW --data "tild23.pld.ai" server-url:port-no
def do_VIEW(self): '''Uses VIEW Method to show the value for a given Key from the cb-exporter's targets.json file. Sample Curl Query: curl -X VIEW --data "tild23.pld.ai" server-url:port-no ''' # defining all the headers. display = {} data_strm = self._set_headers() display[data_strm] = get_util(data_strm) self.send_response(200) self.send_header('Content-type','text/json') self.end_headers() # prints values from given key as input. self.wfile.write(json.dumps(display).encode())
[ "def", "do_VIEW", "(", "self", ")", ":", "# defining all the headers.", "display", "=", "{", "}", "data_strm", "=", "self", ".", "_set_headers", "(", ")", "display", "[", "data_strm", "]", "=", "get_util", "(", "data_strm", ")", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "'Content-type'", ",", "'text/json'", ")", "self", ".", "end_headers", "(", ")", "# prints values from given key as input.", "self", ".", "wfile", ".", "write", "(", "json", ".", "dumps", "(", "display", ")", ".", "encode", "(", ")", ")" ]
[ 35, 4 ]
[ 48, 54 ]
python
en
['en', 'en', 'en']
True
ServiceHandler.do_GET
(self)
Uses GET Method to show all the keys and their respective values from the cb-exporter's targets.json file. Sample Curl Query: curl -X GET server-url:port-no
Uses GET Method to show all the keys and their respective values from the cb-exporter's targets.json file. Sample Curl Query: curl -X GET server-url:port-no
def do_GET(self): '''Uses GET Method to show all the keys and their respective values from the cb-exporter's targets.json file. Sample Curl Query: curl -X GET server-url:port-no ''' # defining all the headers. self.send_response(200) self.send_header('Content-type','text/json') self.end_headers() # prints all the keys and values of the json file. self.wfile.write(json.dumps(get_view()).encode())
[ "def", "do_GET", "(", "self", ")", ":", "# defining all the headers.", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "'Content-type'", ",", "'text/json'", ")", "self", ".", "end_headers", "(", ")", "# prints all the keys and values of the json file.", "self", ".", "wfile", ".", "write", "(", "json", ".", "dumps", "(", "get_view", "(", ")", ")", ".", "encode", "(", ")", ")" ]
[ 51, 4 ]
[ 62, 57 ]
python
en
['en', 'en', 'en']
True
ServiceHandler.do_POST
(self)
Uses POST Method to get empty port-no to start cb-exporter process & update targets.json for both our HTTP server and Prometheus. Sample Curl Query: curl -X POST --data "Hostname,UserName,Password" server-url:port-no
Uses POST Method to get empty port-no to start cb-exporter process & update targets.json for both our HTTP server and Prometheus. Sample Curl Query: curl -X POST --data "Hostname,UserName,Password" server-url:port-no
def do_POST(self): '''Uses POST Method to get empty port-no to start cb-exporter process & update targets.json for both our HTTP server and Prometheus. Sample Curl Query: curl -X POST --data "Hostname,UserName,Password" server-url:port-no ''' data_strm = self._set_headers() data_strm_list = data_strm.split(',') # get free port number port_no = get_empty_port() # store key, values in reference targets.json file write_util(data_strm_list[0], data_strm_list[1], data_strm_list[2], port_no) # change Prometheus server's target file. write_targets(port_no,data_strm_list[0], data_strm_list[1], data_strm_list[2]) # get messages & command that is executed to be posted as server response. eff_command, mssg_dgst = cbexport_start(data_strm_list[0]) eff_mssg = "Command Executed: "+str(eff_command)+" Command Execution Message: "+str(mssg_dgst) # Curl command gets hanged after Popen process execution. Hence, every change is made before it. # change Prometheus server's target file. # write_targets(port_no," "," ") self.send_response(200) self.wfile.write(bytes(eff_mssg,'utf-8'))
[ "def", "do_POST", "(", "self", ")", ":", "data_strm", "=", "self", ".", "_set_headers", "(", ")", "data_strm_list", "=", "data_strm", ".", "split", "(", "','", ")", "# get free port number", "port_no", "=", "get_empty_port", "(", ")", "# store key, values in reference targets.json file", "write_util", "(", "data_strm_list", "[", "0", "]", ",", "data_strm_list", "[", "1", "]", ",", "data_strm_list", "[", "2", "]", ",", "port_no", ")", "# change Prometheus server's target file.", "write_targets", "(", "port_no", ",", "data_strm_list", "[", "0", "]", ",", "data_strm_list", "[", "1", "]", ",", "data_strm_list", "[", "2", "]", ")", "# get messages & command that is executed to be posted as server response.", "eff_command", ",", "mssg_dgst", "=", "cbexport_start", "(", "data_strm_list", "[", "0", "]", ")", "eff_mssg", "=", "\"Command Executed: \"", "+", "str", "(", "eff_command", ")", "+", "\" Command Execution Message: \"", "+", "str", "(", "mssg_dgst", ")", "# Curl command gets hanged after Popen process execution. Hence, every change is made before it.", "# change Prometheus server's target file.", "# write_targets(port_no,\" \",\" \")", "self", ".", "send_response", "(", "200", ")", "self", ".", "wfile", ".", "write", "(", "bytes", "(", "eff_mssg", ",", "'utf-8'", ")", ")" ]
[ 65, 4 ]
[ 86, 49 ]
python
en
['en', 'en', 'en']
True
ServiceHandler.do_DELETE
(self)
Uses DELETE Method to kill a cb-exporter process baed on port-no & update targets.json for both our HTTP server and Prometheus. Sample Curl Query: curl -X DELETE --data "Hostname" server-url:port-no
Uses DELETE Method to kill a cb-exporter process baed on port-no & update targets.json for both our HTTP server and Prometheus. Sample Curl Query: curl -X DELETE --data "Hostname" server-url:port-no
def do_DELETE(self): '''Uses DELETE Method to kill a cb-exporter process baed on port-no & update targets.json for both our HTTP server and Prometheus. Sample Curl Query: curl -X DELETE --data "Hostname" server-url:port-no ''' # receive hostname value as key data_strm = self._set_headers() # delete value from cb-exporter's reference file. str_val, port_no = del_util(data_strm) eff_mssg = str_val+" Port Number in use: "+port_no # delete from targets.json file of Prometheus server del_targets(port_no) # kill the process with id in use cbexport_del(port_no) # delete from targets.json file of Prometheus server # del_targets(port_no) self.send_response(200) self.wfile.write(bytes(eff_mssg,'utf-8'))
[ "def", "do_DELETE", "(", "self", ")", ":", "# receive hostname value as key", "data_strm", "=", "self", ".", "_set_headers", "(", ")", "# delete value from cb-exporter's reference file.", "str_val", ",", "port_no", "=", "del_util", "(", "data_strm", ")", "eff_mssg", "=", "str_val", "+", "\" Port Number in use: \"", "+", "port_no", "# delete from targets.json file of Prometheus server", "del_targets", "(", "port_no", ")", "# kill the process with id in use", "cbexport_del", "(", "port_no", ")", "# delete from targets.json file of Prometheus server", "# del_targets(port_no)", "self", ".", "send_response", "(", "200", ")", "self", ".", "wfile", ".", "write", "(", "bytes", "(", "eff_mssg", ",", "'utf-8'", ")", ")" ]
[ 89, 4 ]
[ 107, 49 ]
python
en
['en', 'en', 'en']
True
get_mock_remote
(device_info=MOCK_DEVICE_INFO)
Return a mock remote.
Return a mock remote.
def get_mock_remote(device_info=MOCK_DEVICE_INFO): """Return a mock remote.""" mock_remote = Mock() async def async_create_remote_control(during_setup=False): return mock_remote.async_create_remote_control = async_create_remote_control async def async_get_device_info(): return device_info mock_remote.async_get_device_info = async_get_device_info return mock_remote
[ "def", "get_mock_remote", "(", "device_info", "=", "MOCK_DEVICE_INFO", ")", ":", "mock_remote", "=", "Mock", "(", ")", "async", "def", "async_create_remote_control", "(", "during_setup", "=", "False", ")", ":", "return", "mock_remote", ".", "async_create_remote_control", "=", "async_create_remote_control", "async", "def", "async_get_device_info", "(", ")", ":", "return", "device_info", "mock_remote", ".", "async_get_device_info", "=", "async_get_device_info", "return", "mock_remote" ]
[ 43, 0 ]
[ 57, 22 ]
python
en
['en', 'co', 'en']
True
test_setup_entry_encrypted
(hass)
Test setup with encrypted config entry.
Test setup with encrypted config entry.
async def test_setup_entry_encrypted(hass): """Test setup with encrypted config entry.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_DEVICE_INFO[ATTR_UDN], data={**MOCK_CONFIG_DATA, **MOCK_ENCRYPTION_DATA, **MOCK_DEVICE_INFO}, ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote() with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("media_player.panasonic_viera_tv") assert state assert state.name == DEFAULT_NAME
[ "async", "def", "test_setup_entry_encrypted", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_DEVICE_INFO", "[", "ATTR_UDN", "]", ",", "data", "=", "{", "*", "*", "MOCK_CONFIG_DATA", ",", "*", "*", "MOCK_ENCRYPTION_DATA", ",", "*", "*", "MOCK_DEVICE_INFO", "}", ",", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "assert", "state", ".", "name", "==", "DEFAULT_NAME" ]
[ 60, 0 ]
[ 82, 41 ]
python
en
['en', 'en', 'en']
True
test_setup_entry_encrypted_missing_device_info
(hass)
Test setup with encrypted config entry and missing device info.
Test setup with encrypted config entry and missing device info.
async def test_setup_entry_encrypted_missing_device_info(hass): """Test setup with encrypted config entry and missing device info.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_CONFIG_DATA[CONF_HOST], data={**MOCK_CONFIG_DATA, **MOCK_ENCRYPTION_DATA}, ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote() with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() assert mock_entry.data[ATTR_DEVICE_INFO] == MOCK_DEVICE_INFO assert mock_entry.unique_id == MOCK_DEVICE_INFO[ATTR_UDN] state = hass.states.get("media_player.panasonic_viera_tv") assert state assert state.name == DEFAULT_NAME
[ "async", "def", "test_setup_entry_encrypted_missing_device_info", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_CONFIG_DATA", "[", "CONF_HOST", "]", ",", "data", "=", "{", "*", "*", "MOCK_CONFIG_DATA", ",", "*", "*", "MOCK_ENCRYPTION_DATA", "}", ",", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "mock_entry", ".", "data", "[", "ATTR_DEVICE_INFO", "]", "==", "MOCK_DEVICE_INFO", "assert", "mock_entry", ".", "unique_id", "==", "MOCK_DEVICE_INFO", "[", "ATTR_UDN", "]", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "assert", "state", ".", "name", "==", "DEFAULT_NAME" ]
[ 85, 0 ]
[ 110, 41 ]
python
en
['en', 'en', 'en']
True
test_setup_entry_encrypted_missing_device_info_none
(hass)
Test setup with encrypted config entry and device info set to None.
Test setup with encrypted config entry and device info set to None.
async def test_setup_entry_encrypted_missing_device_info_none(hass): """Test setup with encrypted config entry and device info set to None.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_CONFIG_DATA[CONF_HOST], data={**MOCK_CONFIG_DATA, **MOCK_ENCRYPTION_DATA}, ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote(device_info=None) with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() assert mock_entry.data[ATTR_DEVICE_INFO] is None assert mock_entry.unique_id == MOCK_CONFIG_DATA[CONF_HOST] state = hass.states.get("media_player.panasonic_viera_tv") assert state assert state.name == DEFAULT_NAME
[ "async", "def", "test_setup_entry_encrypted_missing_device_info_none", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_CONFIG_DATA", "[", "CONF_HOST", "]", ",", "data", "=", "{", "*", "*", "MOCK_CONFIG_DATA", ",", "*", "*", "MOCK_ENCRYPTION_DATA", "}", ",", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", "device_info", "=", "None", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "mock_entry", ".", "data", "[", "ATTR_DEVICE_INFO", "]", "is", "None", "assert", "mock_entry", ".", "unique_id", "==", "MOCK_CONFIG_DATA", "[", "CONF_HOST", "]", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "assert", "state", ".", "name", "==", "DEFAULT_NAME" ]
[ 113, 0 ]
[ 138, 41 ]
python
en
['en', 'en', 'en']
True
test_setup_entry_unencrypted
(hass)
Test setup with unencrypted config entry.
Test setup with unencrypted config entry.
async def test_setup_entry_unencrypted(hass): """Test setup with unencrypted config entry.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_DEVICE_INFO[ATTR_UDN], data={**MOCK_CONFIG_DATA, **MOCK_DEVICE_INFO}, ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote() with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("media_player.panasonic_viera_tv") assert state assert state.name == DEFAULT_NAME
[ "async", "def", "test_setup_entry_unencrypted", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_DEVICE_INFO", "[", "ATTR_UDN", "]", ",", "data", "=", "{", "*", "*", "MOCK_CONFIG_DATA", ",", "*", "*", "MOCK_DEVICE_INFO", "}", ",", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "assert", "state", ".", "name", "==", "DEFAULT_NAME" ]
[ 141, 0 ]
[ 163, 41 ]
python
en
['en', 'en', 'en']
True
test_setup_entry_unencrypted_missing_device_info
(hass)
Test setup with unencrypted config entry and missing device info.
Test setup with unencrypted config entry and missing device info.
async def test_setup_entry_unencrypted_missing_device_info(hass): """Test setup with unencrypted config entry and missing device info.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_CONFIG_DATA[CONF_HOST], data=MOCK_CONFIG_DATA, ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote() with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() assert mock_entry.data[ATTR_DEVICE_INFO] == MOCK_DEVICE_INFO assert mock_entry.unique_id == MOCK_DEVICE_INFO[ATTR_UDN] state = hass.states.get("media_player.panasonic_viera_tv") assert state assert state.name == DEFAULT_NAME
[ "async", "def", "test_setup_entry_unencrypted_missing_device_info", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_CONFIG_DATA", "[", "CONF_HOST", "]", ",", "data", "=", "MOCK_CONFIG_DATA", ",", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "mock_entry", ".", "data", "[", "ATTR_DEVICE_INFO", "]", "==", "MOCK_DEVICE_INFO", "assert", "mock_entry", ".", "unique_id", "==", "MOCK_DEVICE_INFO", "[", "ATTR_UDN", "]", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "assert", "state", ".", "name", "==", "DEFAULT_NAME" ]
[ 166, 0 ]
[ 191, 41 ]
python
en
['en', 'en', 'en']
True
test_setup_entry_unencrypted_missing_device_info_none
(hass)
Test setup with unencrypted config entry and device info set to None.
Test setup with unencrypted config entry and device info set to None.
async def test_setup_entry_unencrypted_missing_device_info_none(hass): """Test setup with unencrypted config entry and device info set to None.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_CONFIG_DATA[CONF_HOST], data=MOCK_CONFIG_DATA, ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote(device_info=None) with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() assert mock_entry.data[ATTR_DEVICE_INFO] is None assert mock_entry.unique_id == MOCK_CONFIG_DATA[CONF_HOST] state = hass.states.get("media_player.panasonic_viera_tv") assert state assert state.name == DEFAULT_NAME
[ "async", "def", "test_setup_entry_unencrypted_missing_device_info_none", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_CONFIG_DATA", "[", "CONF_HOST", "]", ",", "data", "=", "MOCK_CONFIG_DATA", ",", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", "device_info", "=", "None", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "mock_entry", ".", "data", "[", "ATTR_DEVICE_INFO", "]", "is", "None", "assert", "mock_entry", ".", "unique_id", "==", "MOCK_CONFIG_DATA", "[", "CONF_HOST", "]", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "assert", "state", ".", "name", "==", "DEFAULT_NAME" ]
[ 194, 0 ]
[ 219, 41 ]
python
en
['en', 'en', 'en']
True
test_setup_config_flow_initiated
(hass)
Test if config flow is initiated in setup.
Test if config flow is initiated in setup.
async def test_setup_config_flow_initiated(hass): """Test if config flow is initiated in setup.""" assert ( await async_setup_component( hass, DOMAIN, {DOMAIN: {CONF_HOST: "0.0.0.0"}}, ) is True ) assert len(hass.config_entries.flow.async_progress()) == 1
[ "async", "def", "test_setup_config_flow_initiated", "(", "hass", ")", ":", "assert", "(", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_HOST", ":", "\"0.0.0.0\"", "}", "}", ",", ")", "is", "True", ")", "assert", "len", "(", "hass", ".", "config_entries", ".", "flow", ".", "async_progress", "(", ")", ")", "==", "1" ]
[ 222, 0 ]
[ 233, 62 ]
python
en
['en', 'en', 'en']
True
test_setup_unload_entry
(hass)
Test if config entry is unloaded.
Test if config entry is unloaded.
async def test_setup_unload_entry(hass): """Test if config entry is unloaded.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=MOCK_DEVICE_INFO[ATTR_UDN], data=MOCK_CONFIG_DATA ) mock_entry.add_to_hass(hass) mock_remote = get_mock_remote() with patch( "homeassistant.components.panasonic_viera.Remote", return_value=mock_remote, ): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() await hass.config_entries.async_unload(mock_entry.entry_id) assert mock_entry.state == ENTRY_STATE_NOT_LOADED state = hass.states.get("media_player.panasonic_viera_tv") assert state is None
[ "async", "def", "test_setup_unload_entry", "(", "hass", ")", ":", "mock_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "MOCK_DEVICE_INFO", "[", "ATTR_UDN", "]", ",", "data", "=", "MOCK_CONFIG_DATA", ")", "mock_entry", ".", "add_to_hass", "(", "hass", ")", "mock_remote", "=", "get_mock_remote", "(", ")", "with", "patch", "(", "\"homeassistant.components.panasonic_viera.Remote\"", ",", "return_value", "=", "mock_remote", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "mock_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "config_entries", ".", "async_unload", "(", "mock_entry", ".", "entry_id", ")", "assert", "mock_entry", ".", "state", "==", "ENTRY_STATE_NOT_LOADED", "state", "=", "hass", ".", "states", ".", "get", "(", "\"media_player.panasonic_viera_tv\"", ")", "assert", "state", "is", "None" ]
[ 236, 0 ]
[ 259, 24 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_devices, discovery_info=None)
Set up the Essent platform.
Set up the Essent platform.
def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the Essent platform.""" username = config[CONF_USERNAME] password = config[CONF_PASSWORD] essent = EssentBase(username, password) meters = [] for meter in essent.retrieve_meters(): data = essent.retrieve_meter_data(meter) for tariff in data["values"]["LVR"]: meters.append( EssentMeter( essent, meter, data["type"], tariff, data["values"]["LVR"][tariff]["unit"], ) ) if not meters: hass.components.persistent_notification.create( "Couldn't find any meter readings. " "Please ensure Verbruiks Manager is enabled in Mijn Essent " "and at least one reading has been logged to Meterstanden.", title="Essent", notification_id="essent_notification", ) return add_devices(meters, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_devices", ",", "discovery_info", "=", "None", ")", ":", "username", "=", "config", "[", "CONF_USERNAME", "]", "password", "=", "config", "[", "CONF_PASSWORD", "]", "essent", "=", "EssentBase", "(", "username", ",", "password", ")", "meters", "=", "[", "]", "for", "meter", "in", "essent", ".", "retrieve_meters", "(", ")", ":", "data", "=", "essent", ".", "retrieve_meter_data", "(", "meter", ")", "for", "tariff", "in", "data", "[", "\"values\"", "]", "[", "\"LVR\"", "]", ":", "meters", ".", "append", "(", "EssentMeter", "(", "essent", ",", "meter", ",", "data", "[", "\"type\"", "]", ",", "tariff", ",", "data", "[", "\"values\"", "]", "[", "\"LVR\"", "]", "[", "tariff", "]", "[", "\"unit\"", "]", ",", ")", ")", "if", "not", "meters", ":", "hass", ".", "components", ".", "persistent_notification", ".", "create", "(", "\"Couldn't find any meter readings. \"", "\"Please ensure Verbruiks Manager is enabled in Mijn Essent \"", "\"and at least one reading has been logged to Meterstanden.\"", ",", "title", "=", "\"Essent\"", ",", "notification_id", "=", "\"essent_notification\"", ",", ")", "return", "add_devices", "(", "meters", ",", "True", ")" ]
[ 20, 0 ]
[ 50, 29 ]
python
en
['en', 'lv', 'en']
True
EssentBase.__init__
(self, username, password)
Initialize the Essent API.
Initialize the Essent API.
def __init__(self, username, password): """Initialize the Essent API.""" self._username = username self._password = password self._meter_data = {} self.update()
[ "def", "__init__", "(", "self", ",", "username", ",", "password", ")", ":", "self", ".", "_username", "=", "username", "self", ".", "_password", "=", "password", "self", ".", "_meter_data", "=", "{", "}", "self", ".", "update", "(", ")" ]
[ 56, 4 ]
[ 62, 21 ]
python
en
['en', 'pt', 'en']
True
EssentBase.retrieve_meters
(self)
Retrieve the list of meters.
Retrieve the list of meters.
def retrieve_meters(self): """Retrieve the list of meters.""" return self._meter_data.keys()
[ "def", "retrieve_meters", "(", "self", ")", ":", "return", "self", ".", "_meter_data", ".", "keys", "(", ")" ]
[ 64, 4 ]
[ 66, 38 ]
python
en
['en', 'af', 'en']
True
EssentBase.retrieve_meter_data
(self, meter)
Retrieve the data for this meter.
Retrieve the data for this meter.
def retrieve_meter_data(self, meter): """Retrieve the data for this meter.""" return self._meter_data[meter]
[ "def", "retrieve_meter_data", "(", "self", ",", "meter", ")", ":", "return", "self", ".", "_meter_data", "[", "meter", "]" ]
[ 68, 4 ]
[ 70, 38 ]
python
en
['en', 'en', 'en']
True
EssentBase.update
(self)
Retrieve the latest meter data from Essent.
Retrieve the latest meter data from Essent.
def update(self): """Retrieve the latest meter data from Essent.""" essent = PyEssent(self._username, self._password) eans = set(essent.get_EANs()) for possible_meter in eans: meter_data = essent.read_meter(possible_meter, only_last_meter_reading=True) if meter_data: self._meter_data[possible_meter] = meter_data
[ "def", "update", "(", "self", ")", ":", "essent", "=", "PyEssent", "(", "self", ".", "_username", ",", "self", ".", "_password", ")", "eans", "=", "set", "(", "essent", ".", "get_EANs", "(", ")", ")", "for", "possible_meter", "in", "eans", ":", "meter_data", "=", "essent", ".", "read_meter", "(", "possible_meter", ",", "only_last_meter_reading", "=", "True", ")", "if", "meter_data", ":", "self", ".", "_meter_data", "[", "possible_meter", "]", "=", "meter_data" ]
[ 73, 4 ]
[ 80, 61 ]
python
en
['en', 'en', 'en']
True
EssentMeter.__init__
(self, essent_base, meter, meter_type, tariff, unit)
Initialize the sensor.
Initialize the sensor.
def __init__(self, essent_base, meter, meter_type, tariff, unit): """Initialize the sensor.""" self._state = None self._essent_base = essent_base self._meter = meter self._type = meter_type self._tariff = tariff self._unit = unit
[ "def", "__init__", "(", "self", ",", "essent_base", ",", "meter", ",", "meter_type", ",", "tariff", ",", "unit", ")", ":", "self", ".", "_state", "=", "None", "self", ".", "_essent_base", "=", "essent_base", "self", ".", "_meter", "=", "meter", "self", ".", "_type", "=", "meter_type", "self", ".", "_tariff", "=", "tariff", "self", ".", "_unit", "=", "unit" ]
[ 86, 4 ]
[ 93, 25 ]
python
en
['en', 'en', 'en']
True
EssentMeter.unique_id
(self)
Return a unique ID.
Return a unique ID.
def unique_id(self) -> Optional[str]: """Return a unique ID.""" return f"{self._meter}-{self._type}-{self._tariff}"
[ "def", "unique_id", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "return", "f\"{self._meter}-{self._type}-{self._tariff}\"" ]
[ 96, 4 ]
[ 98, 59 ]
python
ca
['fr', 'ca', 'en']
False
EssentMeter.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return f"Essent {self._type} ({self._tariff})"
[ "def", "name", "(", "self", ")", ":", "return", "f\"Essent {self._type} ({self._tariff})\"" ]
[ 101, 4 ]
[ 103, 54 ]
python
en
['en', 'mi', 'en']
True
EssentMeter.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 106, 4 ]
[ 108, 26 ]
python
en
['en', 'en', 'en']
True
EssentMeter.unit_of_measurement
(self)
Return the unit of measurement.
Return the unit of measurement.
def unit_of_measurement(self): """Return the unit of measurement.""" if self._unit.lower() == "kwh": return ENERGY_KILO_WATT_HOUR return self._unit
[ "def", "unit_of_measurement", "(", "self", ")", ":", "if", "self", ".", "_unit", ".", "lower", "(", ")", "==", "\"kwh\"", ":", "return", "ENERGY_KILO_WATT_HOUR", "return", "self", ".", "_unit" ]
[ 111, 4 ]
[ 116, 25 ]
python
en
['en', 'la', 'en']
True
EssentMeter.update
(self)
Fetch the energy usage.
Fetch the energy usage.
def update(self): """Fetch the energy usage.""" # Ensure our data isn't too old self._essent_base.update() # Retrieve our meter data = self._essent_base.retrieve_meter_data(self._meter) # Set our value self._state = next( iter(data["values"]["LVR"][self._tariff]["records"].values()) )
[ "def", "update", "(", "self", ")", ":", "# Ensure our data isn't too old", "self", ".", "_essent_base", ".", "update", "(", ")", "# Retrieve our meter", "data", "=", "self", ".", "_essent_base", ".", "retrieve_meter_data", "(", "self", ".", "_meter", ")", "# Set our value", "self", ".", "_state", "=", "next", "(", "iter", "(", "data", "[", "\"values\"", "]", "[", "\"LVR\"", "]", "[", "self", ".", "_tariff", "]", "[", "\"records\"", "]", ".", "values", "(", ")", ")", ")" ]
[ 118, 4 ]
[ 129, 9 ]
python
en
['en', 'en', 'en']
True
turn_on
(hass, entity_id=None, **service_data)
Turn specified entity on if possible. This is a legacy helper method. Do not use it for new tests.
Turn specified entity on if possible.
def turn_on(hass, entity_id=None, **service_data): """Turn specified entity on if possible. This is a legacy helper method. Do not use it for new tests. """ if entity_id is not None: service_data[ATTR_ENTITY_ID] = entity_id hass.services.call(ha.DOMAIN, SERVICE_TURN_ON, service_data)
[ "def", "turn_on", "(", "hass", ",", "entity_id", "=", "None", ",", "*", "*", "service_data", ")", ":", "if", "entity_id", "is", "not", "None", ":", "service_data", "[", "ATTR_ENTITY_ID", "]", "=", "entity_id", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_TURN_ON", ",", "service_data", ")" ]
[ 44, 0 ]
[ 52, 64 ]
python
en
['en', 'en', 'en']
True
turn_off
(hass, entity_id=None, **service_data)
Turn specified entity off. This is a legacy helper method. Do not use it for new tests.
Turn specified entity off.
def turn_off(hass, entity_id=None, **service_data): """Turn specified entity off. This is a legacy helper method. Do not use it for new tests. """ if entity_id is not None: service_data[ATTR_ENTITY_ID] = entity_id hass.services.call(ha.DOMAIN, SERVICE_TURN_OFF, service_data)
[ "def", "turn_off", "(", "hass", ",", "entity_id", "=", "None", ",", "*", "*", "service_data", ")", ":", "if", "entity_id", "is", "not", "None", ":", "service_data", "[", "ATTR_ENTITY_ID", "]", "=", "entity_id", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_TURN_OFF", ",", "service_data", ")" ]
[ 55, 0 ]
[ 63, 65 ]
python
en
['en', 'en', 'en']
True
toggle
(hass, entity_id=None, **service_data)
Toggle specified entity. This is a legacy helper method. Do not use it for new tests.
Toggle specified entity.
def toggle(hass, entity_id=None, **service_data): """Toggle specified entity. This is a legacy helper method. Do not use it for new tests. """ if entity_id is not None: service_data[ATTR_ENTITY_ID] = entity_id hass.services.call(ha.DOMAIN, SERVICE_TOGGLE, service_data)
[ "def", "toggle", "(", "hass", ",", "entity_id", "=", "None", ",", "*", "*", "service_data", ")", ":", "if", "entity_id", "is", "not", "None", ":", "service_data", "[", "ATTR_ENTITY_ID", "]", "=", "entity_id", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_TOGGLE", ",", "service_data", ")" ]
[ 66, 0 ]
[ 74, 63 ]
python
en
['en', 'en', 'en']
True
stop
(hass)
Stop Home Assistant. This is a legacy helper method. Do not use it for new tests.
Stop Home Assistant.
def stop(hass): """Stop Home Assistant. This is a legacy helper method. Do not use it for new tests. """ hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_STOP)
[ "def", "stop", "(", "hass", ")", ":", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_HOMEASSISTANT_STOP", ")" ]
[ 77, 0 ]
[ 82, 61 ]
python
en
['en', 'en', 'en']
True
restart
(hass)
Stop Home Assistant. This is a legacy helper method. Do not use it for new tests.
Stop Home Assistant.
def restart(hass): """Stop Home Assistant. This is a legacy helper method. Do not use it for new tests. """ hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART)
[ "def", "restart", "(", "hass", ")", ":", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_HOMEASSISTANT_RESTART", ")" ]
[ 85, 0 ]
[ 90, 64 ]
python
en
['en', 'en', 'en']
True
check_config
(hass)
Check the config files. This is a legacy helper method. Do not use it for new tests.
Check the config files.
def check_config(hass): """Check the config files. This is a legacy helper method. Do not use it for new tests. """ hass.services.call(ha.DOMAIN, SERVICE_CHECK_CONFIG)
[ "def", "check_config", "(", "hass", ")", ":", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_CHECK_CONFIG", ")" ]
[ 93, 0 ]
[ 98, 55 ]
python
en
['en', 'en', 'en']
True
reload_core_config
(hass)
Reload the core config. This is a legacy helper method. Do not use it for new tests.
Reload the core config.
def reload_core_config(hass): """Reload the core config. This is a legacy helper method. Do not use it for new tests. """ hass.services.call(ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG)
[ "def", "reload_core_config", "(", "hass", ")", ":", "hass", ".", "services", ".", "call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_RELOAD_CORE_CONFIG", ")" ]
[ 101, 0 ]
[ 106, 61 ]
python
en
['en', 'en', 'en']
True
test_turn_on_to_not_block_for_domains_without_service
(hass)
Test if turn_on is blocking domain with no service.
Test if turn_on is blocking domain with no service.
async def test_turn_on_to_not_block_for_domains_without_service(hass): """Test if turn_on is blocking domain with no service.""" await async_setup_component(hass, "homeassistant", {}) async_mock_service(hass, "light", SERVICE_TURN_ON) hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) # We can't test if our service call results in services being called # because by mocking out the call service method, we mock out all # So we mimic how the service registry calls services service_call = ha.ServiceCall( "homeassistant", "turn_on", {"entity_id": ["light.test", "sensor.bla", "light.bla"]}, ) service = hass.services._services["homeassistant"]["turn_on"] with patch( "homeassistant.core.ServiceRegistry.async_call", return_value=None, ) as mock_call: await service.job.target(service_call) assert mock_call.call_count == 2 assert mock_call.call_args_list[0][0] == ( "light", "turn_on", {"entity_id": ["light.bla", "light.test"]}, True, ) assert mock_call.call_args_list[1][0] == ( "sensor", "turn_on", {"entity_id": ["sensor.bla"]}, False, )
[ "async", "def", "test_turn_on_to_not_block_for_domains_without_service", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"homeassistant\"", ",", "{", "}", ")", "async_mock_service", "(", "hass", ",", "\"light\"", ",", "SERVICE_TURN_ON", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.Bowl\"", ",", "STATE_ON", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.Ceiling\"", ",", "STATE_OFF", ")", "# We can't test if our service call results in services being called", "# because by mocking out the call service method, we mock out all", "# So we mimic how the service registry calls services", "service_call", "=", "ha", ".", "ServiceCall", "(", "\"homeassistant\"", ",", "\"turn_on\"", ",", "{", "\"entity_id\"", ":", "[", "\"light.test\"", ",", "\"sensor.bla\"", ",", "\"light.bla\"", "]", "}", ",", ")", "service", "=", "hass", ".", "services", ".", "_services", "[", "\"homeassistant\"", "]", "[", "\"turn_on\"", "]", "with", "patch", "(", "\"homeassistant.core.ServiceRegistry.async_call\"", ",", "return_value", "=", "None", ",", ")", "as", "mock_call", ":", "await", "service", ".", "job", ".", "target", "(", "service_call", ")", "assert", "mock_call", ".", "call_count", "==", "2", "assert", "mock_call", ".", "call_args_list", "[", "0", "]", "[", "0", "]", "==", "(", "\"light\"", ",", "\"turn_on\"", ",", "{", "\"entity_id\"", ":", "[", "\"light.bla\"", ",", "\"light.test\"", "]", "}", ",", "True", ",", ")", "assert", "mock_call", ".", "call_args_list", "[", "1", "]", "[", "0", "]", "==", "(", "\"sensor\"", ",", "\"turn_on\"", ",", "{", "\"entity_id\"", ":", "[", "\"sensor.bla\"", "]", "}", ",", "False", ",", ")" ]
[ 250, 0 ]
[ 285, 5 ]
python
en
['en', 'en', 'en']
True
test_entity_update
(hass)
Test being able to call entity update.
Test being able to call entity update.
async def test_entity_update(hass): """Test being able to call entity update.""" await async_setup_component(hass, "homeassistant", {}) with patch( "homeassistant.helpers.entity_component.async_update_entity", return_value=None, ) as mock_update: await hass.services.async_call( "homeassistant", "update_entity", {"entity_id": ["light.kitchen"]}, blocking=True, ) assert len(mock_update.mock_calls) == 1 assert mock_update.mock_calls[0][1][1] == "light.kitchen"
[ "async", "def", "test_entity_update", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"homeassistant\"", ",", "{", "}", ")", "with", "patch", "(", "\"homeassistant.helpers.entity_component.async_update_entity\"", ",", "return_value", "=", "None", ",", ")", "as", "mock_update", ":", "await", "hass", ".", "services", ".", "async_call", "(", "\"homeassistant\"", ",", "\"update_entity\"", ",", "{", "\"entity_id\"", ":", "[", "\"light.kitchen\"", "]", "}", ",", "blocking", "=", "True", ",", ")", "assert", "len", "(", "mock_update", ".", "mock_calls", ")", "==", "1", "assert", "mock_update", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "==", "\"light.kitchen\"" ]
[ 288, 0 ]
[ 304, 61 ]
python
en
['en', 'en', 'en']
True
test_setting_location
(hass)
Test setting the location.
Test setting the location.
async def test_setting_location(hass): """Test setting the location.""" await async_setup_component(hass, "homeassistant", {}) events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) # Just to make sure that we are updating values. assert hass.config.latitude != 30 assert hass.config.longitude != 40 await hass.services.async_call( "homeassistant", "set_location", {"latitude": 30, "longitude": 40}, blocking=True, ) assert len(events) == 1 assert hass.config.latitude == 30 assert hass.config.longitude == 40
[ "async", "def", "test_setting_location", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"homeassistant\"", ",", "{", "}", ")", "events", "=", "async_capture_events", "(", "hass", ",", "EVENT_CORE_CONFIG_UPDATE", ")", "# Just to make sure that we are updating values.", "assert", "hass", ".", "config", ".", "latitude", "!=", "30", "assert", "hass", ".", "config", ".", "longitude", "!=", "40", "await", "hass", ".", "services", ".", "async_call", "(", "\"homeassistant\"", ",", "\"set_location\"", ",", "{", "\"latitude\"", ":", "30", ",", "\"longitude\"", ":", "40", "}", ",", "blocking", "=", "True", ",", ")", "assert", "len", "(", "events", ")", "==", "1", "assert", "hass", ".", "config", ".", "latitude", "==", "30", "assert", "hass", ".", "config", ".", "longitude", "==", "40" ]
[ 307, 0 ]
[ 322, 38 ]
python
en
['en', 'en', 'en']
True
test_require_admin
(hass, hass_read_only_user)
Test services requiring admin.
Test services requiring admin.
async def test_require_admin(hass, hass_read_only_user): """Test services requiring admin.""" await async_setup_component(hass, "homeassistant", {}) for service in ( SERVICE_HOMEASSISTANT_RESTART, SERVICE_HOMEASSISTANT_STOP, SERVICE_CHECK_CONFIG, SERVICE_RELOAD_CORE_CONFIG, ): with pytest.raises(Unauthorized): await hass.services.async_call( ha.DOMAIN, service, {}, context=ha.Context(user_id=hass_read_only_user.id), blocking=True, ) assert False, f"Should have raises for {service}" with pytest.raises(Unauthorized): await hass.services.async_call( ha.DOMAIN, SERVICE_SET_LOCATION, {"latitude": 0, "longitude": 0}, context=ha.Context(user_id=hass_read_only_user.id), blocking=True, )
[ "async", "def", "test_require_admin", "(", "hass", ",", "hass_read_only_user", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"homeassistant\"", ",", "{", "}", ")", "for", "service", "in", "(", "SERVICE_HOMEASSISTANT_RESTART", ",", "SERVICE_HOMEASSISTANT_STOP", ",", "SERVICE_CHECK_CONFIG", ",", "SERVICE_RELOAD_CORE_CONFIG", ",", ")", ":", "with", "pytest", ".", "raises", "(", "Unauthorized", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "ha", ".", "DOMAIN", ",", "service", ",", "{", "}", ",", "context", "=", "ha", ".", "Context", "(", "user_id", "=", "hass_read_only_user", ".", "id", ")", ",", "blocking", "=", "True", ",", ")", "assert", "False", ",", "f\"Should have raises for {service}\"", "with", "pytest", ".", "raises", "(", "Unauthorized", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "ha", ".", "DOMAIN", ",", "SERVICE_SET_LOCATION", ",", "{", "\"latitude\"", ":", "0", ",", "\"longitude\"", ":", "0", "}", ",", "context", "=", "ha", ".", "Context", "(", "user_id", "=", "hass_read_only_user", ".", "id", ")", ",", "blocking", "=", "True", ",", ")" ]
[ 325, 0 ]
[ 352, 9 ]
python
en
['en', 'en', 'en']
True