function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def testPathLineSentencesOneFile(self): """Does PathLineSentences work with a single file argument?""" test_file = os.path.join(datapath('PathLineSentences'), '1.txt') with utils.open(test_file, 'rb') as orig: sentences = word2vec.PathLineSentences(test_file) for words in sentences: self.assertEqual(words, utils.to_unicode(orig.readline()).split())
gojomo/gensim
[ 14, 13, 14, 1, 1401498617 ]
def assertLess(self, a, b, msg=None): self.assertTrue(a < b, msg="%s is not less than %s" % (a, b))
gojomo/gensim
[ 14, 13, 14, 1, 1401498617 ]
def __init__(self, data_folder: str, prj: 'hyo2.soundspeed.soundspeed import SoundSpeedLibrary') -> None: self.name = self.__class__.__name__ self.desc = "Abstract atlas" # a human-readable description self.data_folder = data_folder self.prj = prj self.g = Geodesy() self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened) self._file = None self._day_idx = 0 self._timestamp = None self._zeta = None self._siglay = None self._h = None self._lats = None self._lons = None self._lat = None self._lon = None self._loc_idx = None self._d = None self._temp = None self._sal = None
hydroffice/hyo_soundspeed
[ 15, 18, 15, 3, 1459123919 ]
def clear_data(self) -> None: """Delete the data and reset the last loaded day""" logger.debug("clearing data") if self._has_data_loaded: if self._file: self._file.close() self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened) self._file = None self._day_idx = 0 self._timestamp = None self._zeta = None self._siglay = None self._h = None self._lats = None self._lons = None self._lat = None self._lon = None self._loc_idx = None self._d = None self._temp = None self._sal = None
hydroffice/hyo_soundspeed
[ 15, 18, 15, 3, 1459123919 ]
def __init__(self): super(EmiOcc, self).__init__() self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInt = TagCaracter(nome='cInt', tamanho=[ 1, 10], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.IE = TagCaracter(nome='IE', tamanho=[ 2, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.UF = TagCaracter(nome='UF', tamanho=[ 2, 2] , raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.fone = TagInteiro(nome='fone', tamanho=[ 6, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<emiOcc>' xml += self.CNPJ.xml xml += self.cInt.xml xml += self.IE.xml xml += self.UF.xml xml += self.fone.xml xml += '</emiOcc>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Occ, self).__init__() self.serie = TagCaracter(nome='serie' , tamanho=[ 8, 8, 8], raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.nOcc = TagInteiro(nome='nOcc' , tamanho=[ 1, 6], raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.dEmi = TagData(nome='dEmi', raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.emiOcc = EmiOcc()
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.nOcc.valor or self.dEmi.valor or self.emiOcc is not None): return ''
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.serie.xml = arquivo self.nOcc.xml = arquivo self.dEmi.xml = arquivo self.emiOcc.xml = arquivo
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Rodo, self).__init__() self.RNTRC = TagCaracter(nome='RNTRC' , tamanho=[ 8, 8, 8], raiz='//rodo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.occ = []
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.RNTRC.xml = arquivo self.occ = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/rodo/occ', Occ, sigla_ns='cte')
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(InfTotAP, self).__init__() self.qTotProd = TagCaracter(nome='qTotProd' , tamanho=[ 1, 1, 1], raiz='//infTotAP', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.uniAP = TagCaracter(nome='uniAP' , tamanho=[ 1, 4], raiz='//infTotAP', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): xml = XMLNFe.get_xml(self) xml += u'<infTotAP>' xml += self.qTotProd.xml xml += self.uniAP.xml xml += '</infTotAP>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Peri, self).__init__() self.nONU = TagCaracter(nome='nONU' , tamanho=[ 4, 4, 4], raiz='//peri', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.qTotEmb = TagCaracter(nome='qTotEmb' , tamanho=[ 1, 20], raiz='//peri', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.infTotAP = InfTotAP()
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.nONU.valor or self.qTotEmb.valor or self.infTotAP is not None): return '' xml = XMLNFe.get_xml(self) xml += u'<peri>' xml += self.nONU.xml xml += self.qTotEmb.xml xml += self.infTotAP.xml xml += '</peri>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Tarifa, self).__init__() self.CL = TagCaracter(nome='CL' , tamanho=[ 1, 1, 1], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.cTar = TagCaracter(nome='cTar' , tamanho=[ 1, 4], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.vTar = TagDecimal(nome='vTar', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): xml = XMLNFe.get_xml(self) xml += u'<tarifa>' xml += self.CL.xml xml += self.cTar.xml xml += self.vTar.xml xml += '</tarifa>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self, *args, **kwargs): super(TagCInfManu, self).__init__(*args, **kwargs) self.nome = 'cInfManu' self.tamanho = [2, 2] self.raiz = '//natCarga'
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(NatCarga, self).__init__() self.xDime = TagCaracter(nome='xDime' , tamanho=[ 5, 14], raiz='//natCarga', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInfManu = []
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): xml = XMLNFe.get_xml(self) xml += u'<natCarga>' xml += self.xDime.xml for c in self.cInfManu: xml += c.xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.xDime.xml = arquivo self.cInfManu = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aereo/natCarga/cInfManu', TagCInfManu, sigla_ns='cte')
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Aereo, self).__init__() self.nMinu = TagInteiro(nome='nMinu' , tamanho=[ 9, 9, 9], raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.nOCA = TagInteiro(nome='nOCA' , tamanho=[ 11, 11, 11], raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.dPrevAereo = TagData(nome='dPrevAereo' , raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.natCarga = NatCarga() self.tarifa = Tarifa() self.peri = []
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.nMinu.xml = arquivo self.nOCA.xml = arquivo self.dPrevAereo.xml = arquivo self.natCarga.xml = arquivo self.tarifa.xml = arquivo self.peri = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aereo/peri', Peri, sigla_ns='cte')
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(InfNFeAquav, self).__init__() self.chave = TagCaracter(nome='chave', tamanho=[ 44, 44], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.unidRat = TagDecimal(nome='unidRat', tamanho=[1, 3, 1], decimais=[0, 2, 2], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.chave.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<infNF>' xml += self.chave.xml xml += self.unidRat.xml xml += '</infNF>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(InfNFAquav, self).__init__() self.serie = TagCaracter(nome='serie', tamanho=[ 1, 3], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nDoc = TagCaracter(nome='nDoc', tamanho=[ 1, 20], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.unidRat = TagDecimal(nome='unidRat', tamanho=[1, 3, 1], decimais=[0, 2, 2], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.serie.valor or self.nDoc.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<infNF>' xml += self.serie.xml xml += self.nDoc.xml xml += self.unidRat.xml xml += '</infNF>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(InfDocAquav, self).__init__() self.infNF = [] self.infNFe = []
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if (len(self.infNF)==0 and len(self.infNFe)==0): return '' xml = XMLNFe.get_xml(self) xml += u'<infDoc>' for inf in self.infNF: xml += inf.xml for infe in self.infNFe: xml += infe.xml xml += '</infDoc>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Lacre, self).__init__() self.nLacre = TagCaracter(nome='nLacre' , tamanho=[ 1, 20], raiz='//lacre', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.nLacre.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<lacre>' xml += self.nLacre.xml xml += '</lacre>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(DetCont, self).__init__() self.nCont = TagCaracter(nome='xBalsa' , tamanho=[ 1, 20], raiz='//detCont', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.lacre = [] ##Evitar conflito de nome com InfDoc self.infDoc = InfDocAquav()
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.nCont.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<detCont>' xml += self.nCont.xml for l in self.lacre: xml += l.xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.nCont.xml = arquivo self.infDoc.xml = arquivo self.lacre = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/lacre', Lacre, sigla_ns='cte')
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Balsa, self).__init__() self.xBalsa = TagCaracter(nome='xBalsa' , tamanho=[ 1, 60], raiz='//balsa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.xBalsa.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<balsa>' xml += self.xBalsa.xml xml += '</balsa>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Aquav, self).__init__() self.vPrest = TagDecimal(nome='vPrest', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.vAFRMM = TagDecimal(nome='vAFRMM', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.xNavio = TagCaracter(nome='xNavio' , tamanho=[1, 60], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.balsa = [] self.nViag = TagInteiro(nome='nViag', tamanho=[1, 10], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.direc = TagCaracter(nome='direc', tamanho=[1, 1, 1], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.irin = TagCaracter(nome='irin', tamanho=[1, 10], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.detCont = []
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.vPrest.xml = arquivo self.vAFRMM.xml = arquivo self.xNavio.xml = arquivo self.nViag.xml = arquivo self.direc.xml = arquivo self.irin.xml = arquivo self.balsa = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/balsa', Balsa, sigla_ns='cte') self.detCont = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont', DetCont, sigla_ns='cte')
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(EnderFerro, self).__init__() self.xLgr = TagCaracter(nome='xLgr' , tamanho=[ 2, 255] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nro = TagCaracter(nome='nro' , tamanho=[ 1, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xCpl = TagCaracter(nome='xCpl' , tamanho=[ 1, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xBairro = TagCaracter(nome='xBairro', tamanho=[ 2, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cMun = TagInteiro(nome='cMun' , tamanho=[ 7, 7, 7], raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.xMun = TagCaracter(nome='xMun' , tamanho=[ 2, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.CEP = TagCaracter(nome='CEP' , tamanho=[ 8, 8, 8], raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.UF = TagCaracter(nome='UF' , tamanho=[ 2, 2] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<enderFerro>' xml += self.xLgr.xml xml += self.nro.xml xml += self.xCpl.xml xml += self.xBairro.xml xml += self.cMun.xml xml += self.xMun.xml xml += self.CEP.xml xml += self.UF.xml xml += '</enderFerro>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(FerroEnv, self).__init__() self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInt = TagCaracter(nome='cInt', tamanho=[ 1, 10], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.IE = TagCaracter(nome='IE', tamanho=[ 2, 14], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xNome = TagCaracter(nome='xNome', tamanho=[ 2, 60] , raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.enderFerro = EnderFerro()
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.CNPJ.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<ferroEnv>' xml += self.CNPJ.xml xml += self.cInt.xml xml += self.IE.xml xml += self.xNome.xml xml += self.enderFerro.xml xml += '</ferroEnv>' return xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(TrafMut, self).__init__() self.respFat = TagInteiro(nome='respFat' , tamanho=[ 1, 1, 1], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.ferrEmi = TagInteiro(nome='ferrEmi' , tamanho=[ 1, 1, 1], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.vFrete = TagDecimal(nome='vFrete', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.chCTeFerroOrigem = TagCaracter(nome='chCTeFerroOrigem' , tamanho=[ 44, 44], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.ferroEnv = []
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def get_xml(self): if not (self.respFat.valor or self.ferrEmi.valor): return '' xml = XMLNFe.get_xml(self) xml += u'<trafMut>' xml += self.respFat.xml xml += self.ferrEmi.xml xml += self.vFrete.xml xml += self.chCTeFerroOrigem.xml for f in self.ferroEnv: xml += f.xml
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.respFat.xml = arquivo self.ferrEmi.xml = arquivo self.vFrete.xml = arquivo self.chCTeFerroOrigem.xml = arquivo self.ferroEnv = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/ferrov/trafMut/ferroEnv', FerroEnv, sigla_ns='cte')
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Ferrov, self).__init__() self.tpTraf = TagInteiro(nome='tpTraf', tamanho=[1, 1, 1], raiz='//ferrov', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.trafMut = TrafMut() self.fluxo = TagCaracter(nome='fluxo', tamanho=[ 1, 10], raiz='//ferrov', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.tpTraf.xml = arquivo self.trafMut.xml = arquivo self.fluxo.xml = arquivo
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Duto, self).__init__() self.vTar = TagDecimal(nome='vTar', tamanho=[1, 9, 1], decimais=[0, 6, 6], raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.dIni = TagData(nome='dIni', raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.dFim = TagData(nome='dFim', raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.vTar.xml = arquivo self.dIni.xml = arquivo self.dFim.xml = arquivo
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Seg, self).__init__() self.xSeg = TagCaracter(nome='xSeg', tamanho=[1, 30], raiz='//infSeg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//infSeg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.xSeg.xml = arquivo self.CNPJ.xml = arquivo
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Seg, self).__init__() self.infSeg = InfSeg() self.nApol = TagCaracter(nome='nApol', tamanho=[1, 20], raiz='//seg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nAver = TagCaracter(nome='nAver', tamanho=[1, 20], raiz='//seg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False)
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.infSeg.xml = arquivo self.nApol.xml = arquivo self.nAver.xml = arquivo
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self): super(Multimodal, self).__init__() self.COTM = TagCaracter(nome='COTM', tamanho=[1, 20], raiz='//multimodal', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.indNegociavel = TagInteiro(nome='indNegociavel', tamanho=[1, 1, 1], raiz='//multimodal', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.seg = Seg()
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def set_xml(self, arquivo): if self._le_xml(arquivo): self.COTM.xml = arquivo self.indNegociavel.xml = arquivo self.seg.xml = arquivo
thiagopena/PySIGNFe
[ 43, 35, 43, 4, 1480611950 ]
def __init__(self, job): super(MemUsageTimeseries, self).__init__(job) self._data = TimeseriesAccumulator(job.nodecount, self._job.walltime) self._hostdata = {} self._hostdevnames = {}
ubccr/supremm
[ 8, 16, 8, 8, 1452097272 ]
def results(self): values = self._data.get() if len(self._hostdata) > 64: # Compute min, max & median data and only save the host data # for these hosts memdata = values[:, :, 1] sortarr = numpy.argsort(memdata.T, axis=1) retdata = { "min": self.collatedata(sortarr[:, 0], memdata), "max": self.collatedata(sortarr[:, -1], memdata), "med": self.collatedata(sortarr[:, sortarr.shape[1] / 2], memdata), "times": values[0, :, 0].tolist(), "hosts": {} } uniqhosts = Counter(sortarr[:, 0]) uniqhosts.update(sortarr[:, -1]) uniqhosts.update(sortarr[:, sortarr.shape[1] / 2]) includelist = uniqhosts.keys() else: # Save data for all hosts retdata = { "times": values[0, :, 0].tolist(), "hosts": {} } includelist = self._hostdata.keys() for hostidx in includelist: retdata['hosts'][str(hostidx)] = {} retdata['hosts'][str(hostidx)]['all'] = values[hostidx, :, 1].tolist() retdata['hosts'][str(hostidx)]['dev'] = {} for devid in self._hostdevnames[hostidx].iterkeys(): dpnts = len(values[hostidx, :, 0]) retdata['hosts'][str(hostidx)]['dev'][devid] = self._hostdata[hostidx][:dpnts, numpy.int(devid)].tolist() retdata['hosts'][str(hostidx)]['names'] = self._hostdevnames[hostidx] return retdata
ubccr/supremm
[ 8, 16, 8, 8, 1452097272 ]
def __init__(self,args): ''' Constructor ''' logging.debug("UtlHttp object created") Http.__init__(self,args)
quentinhardy/odat
[ 1302, 325, 1302, 8, 1393592151 ]
def sendGetRequest(self,url): ''' send a HTTP get request to url Return False if the current user is not allowed to use the httpuritype lib, else return False or response data ''' logging.info('Send a HTTP GET request to {0}'.format(url)) query = "select utl_http.request('{0}') as data from dual".format(url) response = self. __execThisQuery__(query=query,ld=['data']) if isinstance(response,Exception): logging.info('Error with the SQL request {0}: {1}'.format(query,str(response))) return ErrorSQLRequest(response) elif isinstance(response,list) and isinstance(response[0],dict): return response[0]['data'] logging.info('Enough privileges') return ''
quentinhardy/odat
[ 1302, 325, 1302, 8, 1393592151 ]
def testAll (self): ''' Test all functions ''' self.args['print'].subtitle("UTL_HTTP library ?") logging.info('Try to make the server send a HTTP request to 0.0.0.0 with the UTL_HTTP library') response = self.sendGetRequest('http://0.0.0.0/') if isinstance(response,Exception) and self.ERROR_NO_PRIVILEGE in str(response) or self.ERROR_NO_PRIVILEGE_INVALID_ID in str(response) or self.ERROR_XML_DB_SECU_NOT_INST in str(response): #ERROR_NO_PRIVILEGE_INVALID_ID ==> For Oracle 10g logging.info('Not enough privileges: {0}'.format(str(response))) self.args['print'].badNews("KO") return False else: self.args['print'].goodNews("OK") return True
quentinhardy/odat
[ 1302, 325, 1302, 8, 1393592151 ]
def get_name_and_doc(val): if val in code_to_name_doc: return code_to_name_doc[val] else: if val & 1 << 31: return 'Bad', 'Unknown StatusCode value: {}'.format(val) elif val & 1 << 30: return 'UncertainIn', 'Unknown StatusCode value: {}'.format(val) else: return 'Good', 'Unknown StatusCode value: {}'.format(val)
FreeOpcUa/python-opcua
[ 1191, 640, 1191, 412, 1424450813 ]
def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: IamCheckerAsyncClient: The constructed client. """ return IamCheckerClient.from_service_account_info.__func__(IamCheckerAsyncClient, info, *args, **kwargs) # type: ignore
googleapis/python-policy-troubleshooter
[ 2, 4, 2, 1, 1616607476 ]
def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: IamCheckerAsyncClient: The constructed client. """ return IamCheckerClient.from_service_account_file.__func__(IamCheckerAsyncClient, filename, *args, **kwargs) # type: ignore
googleapis/python-policy-troubleshooter
[ 2, 4, 2, 1, 1616607476 ]
def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[ClientOptions] = None
googleapis/python-policy-troubleshooter
[ 2, 4, 2, 1, 1616607476 ]
def transport(self) -> IamCheckerTransport: """Returns the transport used by the client instance. Returns: IamCheckerTransport: The transport used by the client instance. """ return self._client.transport
googleapis/python-policy-troubleshooter
[ 2, 4, 2, 1, 1616607476 ]
def __init__( self, *, credentials: ga_credentials.Credentials = None, transport: Union[str, IamCheckerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
googleapis/python-policy-troubleshooter
[ 2, 4, 2, 1, 1616607476 ]
def sample_troubleshoot_iam_policy(): # Create a client client = policytroubleshooter_v1.IamCheckerClient() # Initialize request argument(s) request = policytroubleshooter_v1.TroubleshootIamPolicyRequest( ) # Make the request response = client.troubleshoot_iam_policy(request=request) # Handle the response print(response)
googleapis/python-policy-troubleshooter
[ 2, 4, 2, 1, 1616607476 ]
def get_model(model_name): """Get the corresponding model class based on the model string. API: model_builder, hps = get_model("fully_connected") ... modify/parse hparams model = model_builder(hps, num_classes) Args: model_name: (str) e.g. fully_connected. Returns: The model architecture (currently a flax Model) along with its default hparams. Raises: ValueError if model is unrecognized. """ try: return _ALL_MODELS[model_name][0] except KeyError: raise ValueError('Unrecognized model: {}'.format(model_name)) from None
google/init2winit
[ 62, 9, 62, 37, 1621022129 ]
def dumps( self, story_steps: List[StoryStep], is_appendable: bool = False, is_test_story: bool = False,
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def dump( self, target: Union[Text, Path, yaml.StringIO], story_steps: List[StoryStep], is_appendable: bool = False, is_test_story: bool = False,
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def stories_to_yaml( self, story_steps: List[StoryStep], is_test_story: bool = False
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def process_story_step(self, story_step: StoryStep) -> OrderedDict: """Converts a single story step into an ordered dict. Args: story_step: A single story step to be converted to the dict. Returns: Dict with a story step. """ result = OrderedDict() result[KEY_STORY_NAME] = story_step.block_name steps = self.process_checkpoints(story_step.start_checkpoints) for event in story_step.events: if not self._filter_event(event): continue processed = self.process_event(event) if processed: steps.append(processed) steps.extend(self.process_checkpoints(story_step.end_checkpoints)) result[KEY_STEPS] = steps return result
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def stories_contain_loops(stories: List[StoryStep]) -> bool: """Checks if the stories contain at least one active loop. Args: stories: Stories steps. Returns: `True` if the `stories` contain at least one active loop. `False` otherwise. """ return any( [ [event for event in story_step.events if isinstance(event, ActiveLoop)] for story_step in stories ] )
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def process_user_utterance( user_utterance: UserUttered, is_test_story: bool = False
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def process_action(action: ActionExecuted) -> Optional[OrderedDict]: """Converts a single action into an ordered dict. Args: action: Original action object. Returns: Dict with an action. """ if action.action_name == rasa.shared.core.constants.RULE_SNIPPET_ACTION_NAME: return None result = CommentedMap() if action.action_name: result[KEY_ACTION] = action.action_name elif action.action_text: result[KEY_BOT_END_TO_END_MESSAGE] = action.action_text if hasattr(action, "inline_comment"): comment = action.inline_comment() if KEY_ACTION in result and comment: result.yaml_add_eol_comment(comment, KEY_ACTION) elif KEY_BOT_END_TO_END_MESSAGE in result and comment: result.yaml_add_eol_comment(comment, KEY_BOT_END_TO_END_MESSAGE) return result
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def process_slot(event: SlotSet) -> OrderedDict: """Converts a single `SlotSet` event into an ordered dict. Args: event: Original `SlotSet` event. Returns: OrderedDict with an `SlotSet` event. """ return OrderedDict([(KEY_SLOT_NAME, [{event.key: event.value}])])
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def process_checkpoints(checkpoints: List[Checkpoint]) -> List[OrderedDict]: """Converts checkpoints event into an ordered dict. Args: checkpoints: List of original checkpoint. Returns: List of converted checkpoints. """ result = [] for checkpoint in checkpoints: if checkpoint.name == STORY_START: continue next_checkpoint = OrderedDict([(KEY_CHECKPOINT, checkpoint.name)]) if checkpoint.conditions: next_checkpoint[KEY_CHECKPOINT_SLOTS] = [ {key: value} for key, value in checkpoint.conditions.items() ] result.append(next_checkpoint) return result
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def process_active_loop(event: ActiveLoop) -> OrderedDict: """Converts ActiveLoop event into an ordered dict. Args: event: ActiveLoop event. Returns: Converted event. """ return OrderedDict([(KEY_ACTIVE_LOOP, event.name)])
RasaHQ/rasa_nlu
[ 15758, 4259, 15758, 111, 1476448069 ]
def add(isamAppliance, service_name, name, value, check_mode=False, force=False): """ Creates a service attribute """ check_value, warnings = _check(isamAppliance, service_name, name) if force is True or check_value is False: if check_mode is True: return isamAppliance.create_return_object(changed=True, warnings=warnings) else: return isamAppliance.invoke_post("Creating a service attribute", "{0}{1}/attributes".format(module_uri, service_name), { "name": name, "value": value }, requires_version=requires_versions, requires_modules=requires_modules, requires_model=requires_model) else: return isamAppliance.create_return_object(warnings=warnings)
IBM-Security/ibmsecurity
[ 45, 74, 45, 50, 1489855253 ]
def get(isamAppliance, service_name, attribute_name): """ Retrieving a service attribute """ return isamAppliance.invoke_get("Retrieving a service attribute", "{0}{1}/attributes/{2}".format(module_uri, service_name, attribute_name), requires_version=requires_versions, requires_modules=requires_modules, requires_model=requires_model)
IBM-Security/ibmsecurity
[ 45, 74, 45, 50, 1489855253 ]
def update(isamAppliance, service_name, attribute_name, attribute_value, check_mode=False, force=False): """ Updating a service attribute """ check_value, warnings = _check_add(isamAppliance, service_name, attribute_name, attribute_value) if force is True or check_value is True: if check_mode is True: return isamAppliance.create_return_object(changed=True, warnings=warnings) else: return isamAppliance.invoke_put("Updating a service attribute", "{0}{1}/attributes/{2}".format(module_uri, service_name, attribute_name), { "value": attribute_value }, requires_modules=requires_modules, requires_version=requires_versions, requires_model=requires_model) else: return isamAppliance.create_return_object(warnings=warnings)
IBM-Security/ibmsecurity
[ 45, 74, 45, 50, 1489855253 ]
def compare(isamAppliance1, service_name1, isamAppliance2, service_name2): """ Compare configuration between two appliances """ ret_obj1 = get_all(isamAppliance1, service_name1) ret_obj2 = get_all(isamAppliance2, service_name2) return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
IBM-Security/ibmsecurity
[ 45, 74, 45, 50, 1489855253 ]
def main(): cli = DerivaBackupCLI(DESC, INFO, hostname_required=True, config_file_required=False) return cli.main()
informatics-isi-edu/deriva-py
[ 3, 4, 3, 25, 1484858931 ]
def _filepath_to_module(filepath: str): filepath = os.path.relpath(os.path.abspath(filepath), ROOT_DIR) if filepath.endswith(".py"): filepath = filepath[: -(len(".py"))] return filepath.replace("/", ".")
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def _load_package_data(package_paths: Iterable[str]): schema = _load_schema() result = {} for provider_yaml_path in package_paths: with open(provider_yaml_path) as yaml_file: provider = yaml.load(yaml_file, SafeLoader) rel_path = os.path.relpath(provider_yaml_path, ROOT_DIR) try: jsonschema.validate(provider, schema=schema) except jsonschema.ValidationError: raise Exception(f"Unable to parse: {rel_path}.") result[rel_path] = provider return result
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_integration_duplicates(yaml_files: Dict[str, Dict]): """Integration names must be globally unique.""" print("Checking integration duplicates") all_integrations = get_all_integration_names(yaml_files) duplicates = [(k, v) for (k, v) in Counter(all_integrations).items() if v > 1] if duplicates: print( "Duplicate integration names found. Integration names must be globally unique. " "Please delete duplicates." ) print(tabulate(duplicates, headers=["Integration name", "Number of occurrences"])) sys.exit(3)
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_if_objects_belongs_to_package( object_names: List[str], provider_package: str, yaml_file_path: str, resource_type: str
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def parse_module_data(provider_data, resource_type, yaml_file_path): package_dir = ROOT_DIR + "/" + os.path.dirname(yaml_file_path) provider_package = os.path.dirname(yaml_file_path).replace(os.sep, ".") py_files = chain( glob(f"{package_dir}/**/{resource_type}/*.py"), glob(f"{package_dir}/{resource_type}/*.py") ) expected_modules = {_filepath_to_module(f) for f in py_files if not f.endswith("/__init__.py")} resource_data = provider_data.get(resource_type, []) return expected_modules, provider_package, resource_data
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_duplicates_in_integrations_names_of_hooks_sensors_operators(yaml_files: Dict[str, Dict]): print("Checking for duplicates in list of {sensors, hooks, operators}") for (yaml_file_path, provider_data), resource_type in product( yaml_files.items(), ["sensors", "operators", "hooks"] ): resource_data = provider_data.get(resource_type, []) current_integrations = [r.get("integration-name", "") for r in resource_data] if len(current_integrations) != len(set(current_integrations)): for integration in current_integrations: if current_integrations.count(integration) > 1: errors.append( f"Duplicated content of '{resource_type}/integration-name/{integration}' " f"in file: {yaml_file_path}" )
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_hook_classes(yaml_files: Dict[str, Dict]): print("Checking connection classes belong to package") resource_type = 'hook-class-names' for yaml_file_path, provider_data in yaml_files.items(): provider_package = os.path.dirname(yaml_file_path).replace(os.sep, ".") hook_class_names = provider_data.get(resource_type) if hook_class_names: check_if_objects_belongs_to_package( hook_class_names, provider_package, yaml_file_path, resource_type )
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_invalid_integration(yaml_files: Dict[str, Dict]): print("Detect unregistered integrations") all_integration_names = set(get_all_integration_names(yaml_files)) for (yaml_file_path, provider_data), resource_type in product( yaml_files.items(), ["sensors", "operators", "hooks"] ): resource_data = provider_data.get(resource_type, []) current_names = {r['integration-name'] for r in resource_data} invalid_names = current_names - all_integration_names if invalid_names: errors.append( f"Incorrect content of key '{resource_type}/integration-name' in file: {yaml_file_path}. " f"Invalid values: {invalid_names}" ) for (yaml_file_path, provider_data), key in product( yaml_files.items(), ['source-integration-name', 'target-integration-name'] ): resource_data = provider_data.get('transfers', []) current_names = {r[key] for r in resource_data} invalid_names = current_names - all_integration_names if invalid_names: errors.append( f"Incorrect content of key 'transfers/{key}' in file: {yaml_file_path}. " f"Invalid values: {invalid_names}" )
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_unique_provider_name(yaml_files: Dict[str, Dict]): provider_names = [d['name'] for d in yaml_files.values()] duplicates = {x for x in provider_names if provider_names.count(x) > 1} if duplicates: errors.append(f"Provider name must be unique. Duplicates: {duplicates}")
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def check_providers_have_all_documentation_files(yaml_files: Dict[str, Dict]): expected_files = ["commits.rst", "index.rst", "installing-providers-from-sources.rst"] for package_info in yaml_files.values(): package_name = package_info['package-name'] provider_dir = os.path.join(DOCS_DIR, package_name) for file in expected_files: if not os.path.isfile(os.path.join(provider_dir, file)): errors.append( f"The provider {package_name} misses `{file}` in documentation. " f"Please add the file to {provider_dir}" )
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def post_create(self): from codebox.apps.organizations.models import Organization # Fill our dashboard index for user in Organization.objects.get(self.org).get_all_members(): Snippet.objects.add_to_index(self.pk, dashboard=user.pk)
disqus/codebox
[ 13, 1, 13, 4, 1300643625 ]
def _process_request(self): self.log_message("<<<HEADERS") self.log_message(self.headers.as_string().strip()) self.log_message("HEADERS>>>") self.log_message("<<<BODY") try: content_length = int(self.headers['Content-Length']) except TypeError: self.log_message("ERROR: missing Content-Length") else: self.log_message(self.rfile.read(content_length).decode("utf-8")) self.log_message("BODY>>>") self.send_response(200) self.send_header('Content-Type', 'text/plain;charset=utf-8') self.end_headers() self.wfile.write("OK\n".encode("utf-8"))
zentralopensource/zentral
[ 671, 87, 671, 23, 1445349783 ]
def do_POST(self): self._process_request()
zentralopensource/zentral
[ 671, 87, 671, 23, 1445349783 ]
def __init__(self, hyp=None): """ :param hyp: (hyper)parameters of the likelihood model """ self.hyp = softplus_inv(hyp)
AaltoML/kalman-jax
[ 85, 12, 85, 2, 1585896736 ]
def evaluate_log_likelihood(self, y, f, hyp=None): raise NotImplementedError('direct evaluation of this log-likelihood is not implemented')
AaltoML/kalman-jax
[ 85, 12, 85, 2, 1585896736 ]
def moment_match_quadrature(self, y, m, v, hyp=None, power=1.0, num_quad_points=20): """ Perform moment matching via Gauss-Hermite quadrature. Moment matching invloves computing the log partition function, logZₙ, and its derivatives w.r.t. the cavity mean logZₙ = log ∫ pᵃ(yₙ|fₙ) 𝓝(fₙ|mₙ,vₙ) dfₙ with EP power a. :param y: observed data (yₙ) [scalar] :param m: cavity mean (mₙ) [scalar] :param v: cavity variance (vₙ) [scalar] :param hyp: likelihood hyperparameter [scalar] :param power: EP power / fraction (a) [scalar] :param num_quad_points: the number of Gauss-Hermite sigma points to use during quadrature [scalar] :return: lZ: the log partition function, logZₙ [scalar] dlZ: first derivative of logZₙ w.r.t. mₙ (if derivatives=True) [scalar] d2lZ: second derivative of logZₙ w.r.t. mₙ (if derivatives=True) [scalar] """ x, w = hermgauss(num_quad_points) # Gauss-Hermite sigma points and weights w = w / np.sqrt(pi) # scale weights by 1/√π sigma_points = np.sqrt(2) * np.sqrt(v) * x + m # scale locations according to cavity dist. # pre-compute wᵢ pᵃ(yₙ|xᵢ√(2vₙ) + mₙ) weighted_likelihood_eval = w * self.evaluate_likelihood(y, sigma_points, hyp) ** power # a different approach, based on the log-likelihood, which can be more stable: # ll = self.evaluate_log_likelihood(y, sigma_points) # lmax = np.max(ll) # weighted_likelihood_eval = np.exp(lmax * power) * w * np.exp(power * (ll - lmax)) # Compute partition function via quadrature: # Zₙ = ∫ pᵃ(yₙ|fₙ) 𝓝(fₙ|mₙ,vₙ) dfₙ # ≈ ∑ᵢ wᵢ pᵃ(yₙ|xᵢ√(2vₙ) + mₙ) Z = np.sum( weighted_likelihood_eval ) lZ = np.log(Z) Zinv = 1.0 / Z # Compute derivative of partition function via quadrature: # dZₙ/dmₙ = ∫ (fₙ-mₙ) vₙ⁻¹ pᵃ(yₙ|fₙ) 𝓝(fₙ|mₙ,vₙ) dfₙ # ≈ ∑ᵢ wᵢ (fₙ-mₙ) vₙ⁻¹ pᵃ(yₙ|xᵢ√(2vₙ) + mₙ) dZ = np.sum( (sigma_points - m) / v * weighted_likelihood_eval ) # dlogZₙ/dmₙ = (dZₙ/dmₙ) / Zₙ dlZ = Zinv * dZ # Compute second derivative of partition function via quadrature: # d²Zₙ/dmₙ² = ∫ [(fₙ-mₙ)² vₙ⁻² - vₙ⁻¹] pᵃ(yₙ|fₙ) 𝓝(fₙ|mₙ,vₙ) dfₙ # ≈ ∑ᵢ wᵢ [(fₙ-mₙ)² vₙ⁻² - vₙ⁻¹] pᵃ(yₙ|xᵢ√(2vₙ) + mₙ) d2Z = np.sum( ((sigma_points - m) ** 2 / v ** 2 - 1.0 / v) * weighted_likelihood_eval ) # d²logZₙ/dmₙ² = d[(dZₙ/dmₙ) / Zₙ]/dmₙ # = (d²Zₙ/dmₙ² * Zₙ - (dZₙ/dmₙ)²) / Zₙ² # = d²Zₙ/dmₙ² / Zₙ - (dlogZₙ/dmₙ)² d2lZ = -dlZ ** 2 + Zinv * d2Z site_mean = m - dlZ / d2lZ # approx. likelihood (site) mean (see Rasmussen & Williams p75) site_var = -power * (v + 1 / d2lZ) # approx. likelihood (site) variance return lZ, site_mean, site_var
AaltoML/kalman-jax
[ 85, 12, 85, 2, 1585896736 ]
def moment_match(self, y, m, v, hyp=None, power=1.0): """ If no custom moment matching method is provided, we use Gauss-Hermite quadrature. """ return self.moment_match_quadrature(y, m, v, hyp, power=power)
AaltoML/kalman-jax
[ 85, 12, 85, 2, 1585896736 ]