repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTCatalogue.select_catalogue_events
def select_catalogue_events(self, id0): ''' Orders the events in the catalogue according to an indexing vector :param np.ndarray id0: Pointer array indicating the locations of selected events ''' for key in self.data.keys(): if isinstance( self.data[key], np.ndarray) and len(self.data[key]) > 0: # Dictionary element is numpy array - use logical indexing self.data[key] = self.data[key][id0] elif isinstance( self.data[key], list) and len(self.data[key]) > 0: # Dictionary element is list self.data[key] = [self.data[key][iloc] for iloc in id0] else: continue if len(self.gcmts) > 0: self.gcmts = [self.gcmts[iloc] for iloc in id0] self.number_gcmts = self.get_number_tensors()
python
def select_catalogue_events(self, id0): for key in self.data.keys(): if isinstance( self.data[key], np.ndarray) and len(self.data[key]) > 0: self.data[key] = self.data[key][id0] elif isinstance( self.data[key], list) and len(self.data[key]) > 0: self.data[key] = [self.data[key][iloc] for iloc in id0] else: continue if len(self.gcmts) > 0: self.gcmts = [self.gcmts[iloc] for iloc in id0] self.number_gcmts = self.get_number_tensors()
[ "def", "select_catalogue_events", "(", "self", ",", "id0", ")", ":", "for", "key", "in", "self", ".", "data", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "self", ".", "data", "[", "key", "]", ",", "np", ".", "ndarray", ")", "and", "len", "(", "self", ".", "data", "[", "key", "]", ")", ">", "0", ":", "# Dictionary element is numpy array - use logical indexing", "self", ".", "data", "[", "key", "]", "=", "self", ".", "data", "[", "key", "]", "[", "id0", "]", "elif", "isinstance", "(", "self", ".", "data", "[", "key", "]", ",", "list", ")", "and", "len", "(", "self", ".", "data", "[", "key", "]", ")", ">", "0", ":", "# Dictionary element is list", "self", ".", "data", "[", "key", "]", "=", "[", "self", ".", "data", "[", "key", "]", "[", "iloc", "]", "for", "iloc", "in", "id0", "]", "else", ":", "continue", "if", "len", "(", "self", ".", "gcmts", ")", ">", "0", ":", "self", ".", "gcmts", "=", "[", "self", ".", "gcmts", "[", "iloc", "]", "for", "iloc", "in", "id0", "]", "self", ".", "number_gcmts", "=", "self", ".", "get_number_tensors", "(", ")" ]
Orders the events in the catalogue according to an indexing vector :param np.ndarray id0: Pointer array indicating the locations of selected events
[ "Orders", "the", "events", "in", "the", "catalogue", "according", "to", "an", "indexing", "vector" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L424-L445
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTCatalogue.gcmt_to_simple_array
def gcmt_to_simple_array(self, centroid_location=True): """ Converts the GCMT catalogue to a simple array of [ID, year, month, day, hour, minute, second, long., lat., depth, Mw, strike1, dip1, rake1, strike2, dip2, rake2, b-plunge, b-azimuth, b-eigenvalue, p-plunge, p-azimuth, p-eigenvalue, t-plunge, t-azimuth, t-eigenvalue, moment, f_clvd, erel] """ catalogue = np.zeros([self.get_number_tensors(), 29], dtype=float) for iloc, tensor in enumerate(self.gcmts): catalogue[iloc, 0] = iloc if centroid_location: catalogue[iloc, 1] = float(tensor.centroid.date.year) catalogue[iloc, 2] = float(tensor.centroid.date.month) catalogue[iloc, 3] = float(tensor.centroid.date.day) catalogue[iloc, 4] = float(tensor.centroid.time.hour) catalogue[iloc, 5] = float(tensor.centroid.time.minute) catalogue[iloc, 6] = np.round( np.float(tensor.centroid.time.second) + np.float(tensor.centroid.time.microsecond) / 1000000., 2) catalogue[iloc, 7] = tensor.centroid.longitude catalogue[iloc, 8] = tensor.centroid.latitude catalogue[iloc, 9] = tensor.centroid.depth else: catalogue[iloc, 1] = float(tensor.hypocentre.date.year) catalogue[iloc, 2] = float(tensor.hypocentre.date.month) catalogue[iloc, 3] = float(tensor.hypocentre.date.day) catalogue[iloc, 4] = float(tensor.hypocentre.time.hour) catalogue[iloc, 5] = float(tensor.hypocentre.time.minute) catalogue[iloc, 6] = np.round( np.float(tensor.centroid.time.second) + np.float(tensor.centroid.time.microsecond) / 1000000., 2) catalogue[iloc, 7] = tensor.hypocentre.longitude catalogue[iloc, 8] = tensor.hypocentre.latitude catalogue[iloc, 9] = tensor.hypocentre.depth catalogue[iloc, 10] = tensor.magnitude catalogue[iloc, 11] = tensor.moment catalogue[iloc, 12] = tensor.f_clvd catalogue[iloc, 13] = tensor.e_rel # Nodal planes catalogue[iloc, 14] = tensor.nodal_planes.nodal_plane_1['strike'] catalogue[iloc, 15] = tensor.nodal_planes.nodal_plane_1['dip'] catalogue[iloc, 16] = tensor.nodal_planes.nodal_plane_1['rake'] catalogue[iloc, 17] = tensor.nodal_planes.nodal_plane_2['strike'] catalogue[iloc, 18] = tensor.nodal_planes.nodal_plane_2['dip'] catalogue[iloc, 19] = tensor.nodal_planes.nodal_plane_2['rake'] # Principal axes catalogue[iloc, 20] = tensor.principal_axes.b_axis['eigenvalue'] catalogue[iloc, 21] = tensor.principal_axes.b_axis['azimuth'] catalogue[iloc, 22] = tensor.principal_axes.b_axis['plunge'] catalogue[iloc, 23] = tensor.principal_axes.p_axis['eigenvalue'] catalogue[iloc, 24] = tensor.principal_axes.p_axis['azimuth'] catalogue[iloc, 25] = tensor.principal_axes.p_axis['plunge'] catalogue[iloc, 26] = tensor.principal_axes.t_axis['eigenvalue'] catalogue[iloc, 27] = tensor.principal_axes.t_axis['azimuth'] catalogue[iloc, 28] = tensor.principal_axes.t_axis['plunge'] return catalogue
python
def gcmt_to_simple_array(self, centroid_location=True): catalogue = np.zeros([self.get_number_tensors(), 29], dtype=float) for iloc, tensor in enumerate(self.gcmts): catalogue[iloc, 0] = iloc if centroid_location: catalogue[iloc, 1] = float(tensor.centroid.date.year) catalogue[iloc, 2] = float(tensor.centroid.date.month) catalogue[iloc, 3] = float(tensor.centroid.date.day) catalogue[iloc, 4] = float(tensor.centroid.time.hour) catalogue[iloc, 5] = float(tensor.centroid.time.minute) catalogue[iloc, 6] = np.round( np.float(tensor.centroid.time.second) + np.float(tensor.centroid.time.microsecond) / 1000000., 2) catalogue[iloc, 7] = tensor.centroid.longitude catalogue[iloc, 8] = tensor.centroid.latitude catalogue[iloc, 9] = tensor.centroid.depth else: catalogue[iloc, 1] = float(tensor.hypocentre.date.year) catalogue[iloc, 2] = float(tensor.hypocentre.date.month) catalogue[iloc, 3] = float(tensor.hypocentre.date.day) catalogue[iloc, 4] = float(tensor.hypocentre.time.hour) catalogue[iloc, 5] = float(tensor.hypocentre.time.minute) catalogue[iloc, 6] = np.round( np.float(tensor.centroid.time.second) + np.float(tensor.centroid.time.microsecond) / 1000000., 2) catalogue[iloc, 7] = tensor.hypocentre.longitude catalogue[iloc, 8] = tensor.hypocentre.latitude catalogue[iloc, 9] = tensor.hypocentre.depth catalogue[iloc, 10] = tensor.magnitude catalogue[iloc, 11] = tensor.moment catalogue[iloc, 12] = tensor.f_clvd catalogue[iloc, 13] = tensor.e_rel catalogue[iloc, 14] = tensor.nodal_planes.nodal_plane_1['strike'] catalogue[iloc, 15] = tensor.nodal_planes.nodal_plane_1['dip'] catalogue[iloc, 16] = tensor.nodal_planes.nodal_plane_1['rake'] catalogue[iloc, 17] = tensor.nodal_planes.nodal_plane_2['strike'] catalogue[iloc, 18] = tensor.nodal_planes.nodal_plane_2['dip'] catalogue[iloc, 19] = tensor.nodal_planes.nodal_plane_2['rake'] catalogue[iloc, 20] = tensor.principal_axes.b_axis['eigenvalue'] catalogue[iloc, 21] = tensor.principal_axes.b_axis['azimuth'] catalogue[iloc, 22] = tensor.principal_axes.b_axis['plunge'] catalogue[iloc, 23] = tensor.principal_axes.p_axis['eigenvalue'] catalogue[iloc, 24] = tensor.principal_axes.p_axis['azimuth'] catalogue[iloc, 25] = tensor.principal_axes.p_axis['plunge'] catalogue[iloc, 26] = tensor.principal_axes.t_axis['eigenvalue'] catalogue[iloc, 27] = tensor.principal_axes.t_axis['azimuth'] catalogue[iloc, 28] = tensor.principal_axes.t_axis['plunge'] return catalogue
[ "def", "gcmt_to_simple_array", "(", "self", ",", "centroid_location", "=", "True", ")", ":", "catalogue", "=", "np", ".", "zeros", "(", "[", "self", ".", "get_number_tensors", "(", ")", ",", "29", "]", ",", "dtype", "=", "float", ")", "for", "iloc", ",", "tensor", "in", "enumerate", "(", "self", ".", "gcmts", ")", ":", "catalogue", "[", "iloc", ",", "0", "]", "=", "iloc", "if", "centroid_location", ":", "catalogue", "[", "iloc", ",", "1", "]", "=", "float", "(", "tensor", ".", "centroid", ".", "date", ".", "year", ")", "catalogue", "[", "iloc", ",", "2", "]", "=", "float", "(", "tensor", ".", "centroid", ".", "date", ".", "month", ")", "catalogue", "[", "iloc", ",", "3", "]", "=", "float", "(", "tensor", ".", "centroid", ".", "date", ".", "day", ")", "catalogue", "[", "iloc", ",", "4", "]", "=", "float", "(", "tensor", ".", "centroid", ".", "time", ".", "hour", ")", "catalogue", "[", "iloc", ",", "5", "]", "=", "float", "(", "tensor", ".", "centroid", ".", "time", ".", "minute", ")", "catalogue", "[", "iloc", ",", "6", "]", "=", "np", ".", "round", "(", "np", ".", "float", "(", "tensor", ".", "centroid", ".", "time", ".", "second", ")", "+", "np", ".", "float", "(", "tensor", ".", "centroid", ".", "time", ".", "microsecond", ")", "/", "1000000.", ",", "2", ")", "catalogue", "[", "iloc", ",", "7", "]", "=", "tensor", ".", "centroid", ".", "longitude", "catalogue", "[", "iloc", ",", "8", "]", "=", "tensor", ".", "centroid", ".", "latitude", "catalogue", "[", "iloc", ",", "9", "]", "=", "tensor", ".", "centroid", ".", "depth", "else", ":", "catalogue", "[", "iloc", ",", "1", "]", "=", "float", "(", "tensor", ".", "hypocentre", ".", "date", ".", "year", ")", "catalogue", "[", "iloc", ",", "2", "]", "=", "float", "(", "tensor", ".", "hypocentre", ".", "date", ".", "month", ")", "catalogue", "[", "iloc", ",", "3", "]", "=", "float", "(", "tensor", ".", "hypocentre", ".", "date", ".", "day", ")", "catalogue", "[", "iloc", ",", "4", "]", "=", "float", "(", "tensor", ".", "hypocentre", ".", "time", ".", "hour", ")", "catalogue", "[", "iloc", ",", "5", "]", "=", "float", "(", "tensor", ".", "hypocentre", ".", "time", ".", "minute", ")", "catalogue", "[", "iloc", ",", "6", "]", "=", "np", ".", "round", "(", "np", ".", "float", "(", "tensor", ".", "centroid", ".", "time", ".", "second", ")", "+", "np", ".", "float", "(", "tensor", ".", "centroid", ".", "time", ".", "microsecond", ")", "/", "1000000.", ",", "2", ")", "catalogue", "[", "iloc", ",", "7", "]", "=", "tensor", ".", "hypocentre", ".", "longitude", "catalogue", "[", "iloc", ",", "8", "]", "=", "tensor", ".", "hypocentre", ".", "latitude", "catalogue", "[", "iloc", ",", "9", "]", "=", "tensor", ".", "hypocentre", ".", "depth", "catalogue", "[", "iloc", ",", "10", "]", "=", "tensor", ".", "magnitude", "catalogue", "[", "iloc", ",", "11", "]", "=", "tensor", ".", "moment", "catalogue", "[", "iloc", ",", "12", "]", "=", "tensor", ".", "f_clvd", "catalogue", "[", "iloc", ",", "13", "]", "=", "tensor", ".", "e_rel", "# Nodal planes", "catalogue", "[", "iloc", ",", "14", "]", "=", "tensor", ".", "nodal_planes", ".", "nodal_plane_1", "[", "'strike'", "]", "catalogue", "[", "iloc", ",", "15", "]", "=", "tensor", ".", "nodal_planes", ".", "nodal_plane_1", "[", "'dip'", "]", "catalogue", "[", "iloc", ",", "16", "]", "=", "tensor", ".", "nodal_planes", ".", "nodal_plane_1", "[", "'rake'", "]", "catalogue", "[", "iloc", ",", "17", "]", "=", "tensor", ".", "nodal_planes", ".", "nodal_plane_2", "[", "'strike'", "]", "catalogue", "[", "iloc", ",", "18", "]", "=", "tensor", ".", "nodal_planes", ".", "nodal_plane_2", "[", "'dip'", "]", "catalogue", "[", "iloc", ",", "19", "]", "=", "tensor", ".", "nodal_planes", ".", "nodal_plane_2", "[", "'rake'", "]", "# Principal axes", "catalogue", "[", "iloc", ",", "20", "]", "=", "tensor", ".", "principal_axes", ".", "b_axis", "[", "'eigenvalue'", "]", "catalogue", "[", "iloc", ",", "21", "]", "=", "tensor", ".", "principal_axes", ".", "b_axis", "[", "'azimuth'", "]", "catalogue", "[", "iloc", ",", "22", "]", "=", "tensor", ".", "principal_axes", ".", "b_axis", "[", "'plunge'", "]", "catalogue", "[", "iloc", ",", "23", "]", "=", "tensor", ".", "principal_axes", ".", "p_axis", "[", "'eigenvalue'", "]", "catalogue", "[", "iloc", ",", "24", "]", "=", "tensor", ".", "principal_axes", ".", "p_axis", "[", "'azimuth'", "]", "catalogue", "[", "iloc", ",", "25", "]", "=", "tensor", ".", "principal_axes", ".", "p_axis", "[", "'plunge'", "]", "catalogue", "[", "iloc", ",", "26", "]", "=", "tensor", ".", "principal_axes", ".", "t_axis", "[", "'eigenvalue'", "]", "catalogue", "[", "iloc", ",", "27", "]", "=", "tensor", ".", "principal_axes", ".", "t_axis", "[", "'azimuth'", "]", "catalogue", "[", "iloc", ",", "28", "]", "=", "tensor", ".", "principal_axes", ".", "t_axis", "[", "'plunge'", "]", "return", "catalogue" ]
Converts the GCMT catalogue to a simple array of [ID, year, month, day, hour, minute, second, long., lat., depth, Mw, strike1, dip1, rake1, strike2, dip2, rake2, b-plunge, b-azimuth, b-eigenvalue, p-plunge, p-azimuth, p-eigenvalue, t-plunge, t-azimuth, t-eigenvalue, moment, f_clvd, erel]
[ "Converts", "the", "GCMT", "catalogue", "to", "a", "simple", "array", "of", "[", "ID", "year", "month", "day", "hour", "minute", "second", "long", ".", "lat", ".", "depth", "Mw", "strike1", "dip1", "rake1", "strike2", "dip2", "rake2", "b", "-", "plunge", "b", "-", "azimuth", "b", "-", "eigenvalue", "p", "-", "plunge", "p", "-", "azimuth", "p", "-", "eigenvalue", "t", "-", "plunge", "t", "-", "azimuth", "t", "-", "eigenvalue", "moment", "f_clvd", "erel", "]" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L447-L503
gem/oq-engine
openquake/hazardlib/gsim/akkar_cagnan_2010.py
AkkarCagnan2010.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients (for soil amplification) # specific to required intensity measure type C_SR = self.COEFFS_SOIL_RESPONSE[imt] # compute median PGA on rock (in g), needed to compute non-linear site # amplification C = self.COEFFS_AC10[PGA()] pga4nl = np.exp( self._compute_mean(C, rup.mag, dists.rjb, rup.rake)) * 1e-2 / g # compute full mean value by adding site amplification terms # (but avoiding recomputing mean on rock for PGA) if imt == PGA(): mean = (np.log(pga4nl) + self._get_site_amplification_linear(sites.vs30, C_SR) + self._get_site_amplification_non_linear(sites.vs30, pga4nl, C_SR)) else: C = self.COEFFS_AC10[imt] mean = (self._compute_mean(C, rup.mag, dists.rjb, rup.rake) + self._get_site_amplification_linear(sites.vs30, C_SR) + self._get_site_amplification_non_linear(sites.vs30, pga4nl, C_SR)) # convert from cm/s**2 to g for SA (PGA is already computed in g) if imt.name == "SA": mean = np.log(np.exp(mean) * 1e-2 / g) stddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30)) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C_SR = self.COEFFS_SOIL_RESPONSE[imt] C = self.COEFFS_AC10[PGA()] pga4nl = np.exp( self._compute_mean(C, rup.mag, dists.rjb, rup.rake)) * 1e-2 / g if imt == PGA(): mean = (np.log(pga4nl) + self._get_site_amplification_linear(sites.vs30, C_SR) + self._get_site_amplification_non_linear(sites.vs30, pga4nl, C_SR)) else: C = self.COEFFS_AC10[imt] mean = (self._compute_mean(C, rup.mag, dists.rjb, rup.rake) + self._get_site_amplification_linear(sites.vs30, C_SR) + self._get_site_amplification_non_linear(sites.vs30, pga4nl, C_SR)) if imt.name == "SA": mean = np.log(np.exp(mean) * 1e-2 / g) stddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30)) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extracting dictionary of coefficients (for soil amplification)", "# specific to required intensity measure type", "C_SR", "=", "self", ".", "COEFFS_SOIL_RESPONSE", "[", "imt", "]", "# compute median PGA on rock (in g), needed to compute non-linear site", "# amplification", "C", "=", "self", ".", "COEFFS_AC10", "[", "PGA", "(", ")", "]", "pga4nl", "=", "np", ".", "exp", "(", "self", ".", "_compute_mean", "(", "C", ",", "rup", ".", "mag", ",", "dists", ".", "rjb", ",", "rup", ".", "rake", ")", ")", "*", "1e-2", "/", "g", "# compute full mean value by adding site amplification terms", "# (but avoiding recomputing mean on rock for PGA)", "if", "imt", "==", "PGA", "(", ")", ":", "mean", "=", "(", "np", ".", "log", "(", "pga4nl", ")", "+", "self", ".", "_get_site_amplification_linear", "(", "sites", ".", "vs30", ",", "C_SR", ")", "+", "self", ".", "_get_site_amplification_non_linear", "(", "sites", ".", "vs30", ",", "pga4nl", ",", "C_SR", ")", ")", "else", ":", "C", "=", "self", ".", "COEFFS_AC10", "[", "imt", "]", "mean", "=", "(", "self", ".", "_compute_mean", "(", "C", ",", "rup", ".", "mag", ",", "dists", ".", "rjb", ",", "rup", ".", "rake", ")", "+", "self", ".", "_get_site_amplification_linear", "(", "sites", ".", "vs30", ",", "C_SR", ")", "+", "self", ".", "_get_site_amplification_non_linear", "(", "sites", ".", "vs30", ",", "pga4nl", ",", "C_SR", ")", ")", "# convert from cm/s**2 to g for SA (PGA is already computed in g)", "if", "imt", ".", "name", "==", "\"SA\"", ":", "mean", "=", "np", ".", "log", "(", "np", ".", "exp", "(", "mean", ")", "*", "1e-2", "/", "g", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "stddev_types", ",", "num_sites", "=", "len", "(", "sites", ".", "vs30", ")", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_cagnan_2010.py#L83-L119
gem/oq-engine
openquake/hazardlib/gsim/akkar_cagnan_2010.py
AkkarCagnan2010._compute_linear_magnitude_term
def _compute_linear_magnitude_term(self, C, mag): """ Compute and return second term in equations (1a) and (1b), pages 2981 and 2982, respectively. """ if mag <= self.c1: # this is the second term in eq. (1a), p. 2981 return C['a2'] * (mag - self.c1) else: # this is the second term in eq. (1b), p. 2982 return C['a3'] * (mag - self.c1)
python
def _compute_linear_magnitude_term(self, C, mag): if mag <= self.c1: return C['a2'] * (mag - self.c1) else: return C['a3'] * (mag - self.c1)
[ "def", "_compute_linear_magnitude_term", "(", "self", ",", "C", ",", "mag", ")", ":", "if", "mag", "<=", "self", ".", "c1", ":", "# this is the second term in eq. (1a), p. 2981", "return", "C", "[", "'a2'", "]", "*", "(", "mag", "-", "self", ".", "c1", ")", "else", ":", "# this is the second term in eq. (1b), p. 2982", "return", "C", "[", "'a3'", "]", "*", "(", "mag", "-", "self", ".", "c1", ")" ]
Compute and return second term in equations (1a) and (1b), pages 2981 and 2982, respectively.
[ "Compute", "and", "return", "second", "term", "in", "equations", "(", "1a", ")", "and", "(", "1b", ")", "pages", "2981", "and", "2982", "respectively", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_cagnan_2010.py#L137-L147
gem/oq-engine
openquake/commands/from_shapefile.py
from_shapefile
def from_shapefile(output, input_shp_files, validate): """ Convert multiple ESRI Shapefile(s) into a single NRML source model file. """ input_parser = shapefileparser.ShapefileParser() source_model = input_parser.read(input_shp_files[0], validate) for f in input_shp_files[1:]: source_model.sources.extend(input_parser.read(f, validate).sources) if not output: output = os.path.splitext(input_shp_files[0])[0] shapefileparser.SourceModelParser().write(output + '.xml', source_model)
python
def from_shapefile(output, input_shp_files, validate): input_parser = shapefileparser.ShapefileParser() source_model = input_parser.read(input_shp_files[0], validate) for f in input_shp_files[1:]: source_model.sources.extend(input_parser.read(f, validate).sources) if not output: output = os.path.splitext(input_shp_files[0])[0] shapefileparser.SourceModelParser().write(output + '.xml', source_model)
[ "def", "from_shapefile", "(", "output", ",", "input_shp_files", ",", "validate", ")", ":", "input_parser", "=", "shapefileparser", ".", "ShapefileParser", "(", ")", "source_model", "=", "input_parser", ".", "read", "(", "input_shp_files", "[", "0", "]", ",", "validate", ")", "for", "f", "in", "input_shp_files", "[", "1", ":", "]", ":", "source_model", ".", "sources", ".", "extend", "(", "input_parser", ".", "read", "(", "f", ",", "validate", ")", ".", "sources", ")", "if", "not", "output", ":", "output", "=", "os", ".", "path", ".", "splitext", "(", "input_shp_files", "[", "0", "]", ")", "[", "0", "]", "shapefileparser", ".", "SourceModelParser", "(", ")", ".", "write", "(", "output", "+", "'.xml'", ",", "source_model", ")" ]
Convert multiple ESRI Shapefile(s) into a single NRML source model file.
[ "Convert", "multiple", "ESRI", "Shapefile", "(", "s", ")", "into", "a", "single", "NRML", "source", "model", "file", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/from_shapefile.py#L24-L34
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010._compute_magnitude
def _compute_magnitude(self, rup, C): """ Compute the first term of the equation described on p. 199: ``b1 + b2 * M + b3 * M**2`` """ return C['b1'] + (C['b2'] * rup.mag) + (C['b3'] * (rup.mag ** 2))
python
def _compute_magnitude(self, rup, C): return C['b1'] + (C['b2'] * rup.mag) + (C['b3'] * (rup.mag ** 2))
[ "def", "_compute_magnitude", "(", "self", ",", "rup", ",", "C", ")", ":", "return", "C", "[", "'b1'", "]", "+", "(", "C", "[", "'b2'", "]", "*", "rup", ".", "mag", ")", "+", "(", "C", "[", "'b3'", "]", "*", "(", "rup", ".", "mag", "**", "2", ")", ")" ]
Compute the first term of the equation described on p. 199: ``b1 + b2 * M + b3 * M**2``
[ "Compute", "the", "first", "term", "of", "the", "equation", "described", "on", "p", ".", "199", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L145-L151
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010._compute_distance
def _compute_distance(self, rup, dists, imt, C): """ Compute the second term of the equation described on p. 199: ``(b4 + b5 * M) * log(sqrt(Rjb ** 2 + b6 ** 2))`` """ return (((C['b4'] + C['b5'] * rup.mag) * np.log10((np.sqrt(dists.rjb ** 2.0 + C['b6'] ** 2.0)))))
python
def _compute_distance(self, rup, dists, imt, C): return (((C['b4'] + C['b5'] * rup.mag) * np.log10((np.sqrt(dists.rjb ** 2.0 + C['b6'] ** 2.0)))))
[ "def", "_compute_distance", "(", "self", ",", "rup", ",", "dists", ",", "imt", ",", "C", ")", ":", "return", "(", "(", "(", "C", "[", "'b4'", "]", "+", "C", "[", "'b5'", "]", "*", "rup", ".", "mag", ")", "*", "np", ".", "log10", "(", "(", "np", ".", "sqrt", "(", "dists", ".", "rjb", "**", "2.0", "+", "C", "[", "'b6'", "]", "**", "2.0", ")", ")", ")", ")", ")" ]
Compute the second term of the equation described on p. 199: ``(b4 + b5 * M) * log(sqrt(Rjb ** 2 + b6 ** 2))``
[ "Compute", "the", "second", "term", "of", "the", "equation", "described", "on", "p", ".", "199", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L153-L160
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010._get_site_amplification
def _get_site_amplification(self, sites, imt, C): """ Compute the third term of the equation described on p. 199: ``b7 * Ss + b8 * Sa`` """ Ss, Sa = self._get_site_type_dummy_variables(sites) return (C['b7'] * Ss) + (C['b8'] * Sa)
python
def _get_site_amplification(self, sites, imt, C): Ss, Sa = self._get_site_type_dummy_variables(sites) return (C['b7'] * Ss) + (C['b8'] * Sa)
[ "def", "_get_site_amplification", "(", "self", ",", "sites", ",", "imt", ",", "C", ")", ":", "Ss", ",", "Sa", "=", "self", ".", "_get_site_type_dummy_variables", "(", "sites", ")", "return", "(", "C", "[", "'b7'", "]", "*", "Ss", ")", "+", "(", "C", "[", "'b8'", "]", "*", "Sa", ")" ]
Compute the third term of the equation described on p. 199: ``b7 * Ss + b8 * Sa``
[ "Compute", "the", "third", "term", "of", "the", "equation", "described", "on", "p", ".", "199", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L162-L169
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010._get_site_type_dummy_variables
def _get_site_type_dummy_variables(self, sites): """ Get site type dummy variables, ``Ss`` (for soft and stiff soil sites) and ``Sa`` (for rock sites). """ Ss = np.zeros((len(sites.vs30),)) Sa = np.zeros((len(sites.vs30),)) # Soft soil; Vs30 < 360 m/s. Page 199. idxSs = (sites.vs30 < 360.0) # Stiff soil Class A; 360 m/s <= Vs30 <= 750 m/s. Page 199. idxSa = (sites.vs30 >= 360.0) & (sites.vs30 <= 750.0) Ss[idxSs] = 1 Sa[idxSa] = 1 return Ss, Sa
python
def _get_site_type_dummy_variables(self, sites): Ss = np.zeros((len(sites.vs30),)) Sa = np.zeros((len(sites.vs30),)) idxSs = (sites.vs30 < 360.0) idxSa = (sites.vs30 >= 360.0) & (sites.vs30 <= 750.0) Ss[idxSs] = 1 Sa[idxSa] = 1 return Ss, Sa
[ "def", "_get_site_type_dummy_variables", "(", "self", ",", "sites", ")", ":", "Ss", "=", "np", ".", "zeros", "(", "(", "len", "(", "sites", ".", "vs30", ")", ",", ")", ")", "Sa", "=", "np", ".", "zeros", "(", "(", "len", "(", "sites", ".", "vs30", ")", ",", ")", ")", "# Soft soil; Vs30 < 360 m/s. Page 199.", "idxSs", "=", "(", "sites", ".", "vs30", "<", "360.0", ")", "# Stiff soil Class A; 360 m/s <= Vs30 <= 750 m/s. Page 199.", "idxSa", "=", "(", "sites", ".", "vs30", ">=", "360.0", ")", "&", "(", "sites", ".", "vs30", "<=", "750.0", ")", "Ss", "[", "idxSs", "]", "=", "1", "Sa", "[", "idxSa", "]", "=", "1", "return", "Ss", ",", "Sa" ]
Get site type dummy variables, ``Ss`` (for soft and stiff soil sites) and ``Sa`` (for rock sites).
[ "Get", "site", "type", "dummy", "variables", "Ss", "(", "for", "soft", "and", "stiff", "soil", "sites", ")", "and", "Sa", "(", "for", "rock", "sites", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L171-L184
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010._get_mechanism
def _get_mechanism(self, sites, rup, imt, C): """ Compute the fourth term of the equation described on p. 199: ``b9 * Fn + b10 * Fr`` """ Fn, Fr = self._get_fault_type_dummy_variables(sites, rup, imt) return (C['b9'] * Fn) + (C['b10'] * Fr)
python
def _get_mechanism(self, sites, rup, imt, C): Fn, Fr = self._get_fault_type_dummy_variables(sites, rup, imt) return (C['b9'] * Fn) + (C['b10'] * Fr)
[ "def", "_get_mechanism", "(", "self", ",", "sites", ",", "rup", ",", "imt", ",", "C", ")", ":", "Fn", ",", "Fr", "=", "self", ".", "_get_fault_type_dummy_variables", "(", "sites", ",", "rup", ",", "imt", ")", "return", "(", "C", "[", "'b9'", "]", "*", "Fn", ")", "+", "(", "C", "[", "'b10'", "]", "*", "Fr", ")" ]
Compute the fourth term of the equation described on p. 199: ``b9 * Fn + b10 * Fr``
[ "Compute", "the", "fourth", "term", "of", "the", "equation", "described", "on", "p", ".", "199", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L186-L193
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010._get_fault_type_dummy_variables
def _get_fault_type_dummy_variables(self, sites, rup, imt): """ Same classification of SadighEtAl1997. Akkar and Bommer 2010 is based on Akkar and Bommer 2007b; read Strong-Motion Dataset and Record Processing on p. 514 (Akkar and Bommer 2007b). """ Fn, Fr = 0, 0 if rup.rake >= -135 and rup.rake <= -45: # normal Fn = 1 elif rup.rake >= 45 and rup.rake <= 135: # reverse Fr = 1 return Fn, Fr
python
def _get_fault_type_dummy_variables(self, sites, rup, imt): Fn, Fr = 0, 0 if rup.rake >= -135 and rup.rake <= -45: Fn = 1 elif rup.rake >= 45 and rup.rake <= 135: Fr = 1 return Fn, Fr
[ "def", "_get_fault_type_dummy_variables", "(", "self", ",", "sites", ",", "rup", ",", "imt", ")", ":", "Fn", ",", "Fr", "=", "0", ",", "0", "if", "rup", ".", "rake", ">=", "-", "135", "and", "rup", ".", "rake", "<=", "-", "45", ":", "# normal", "Fn", "=", "1", "elif", "rup", ".", "rake", ">=", "45", "and", "rup", ".", "rake", "<=", "135", ":", "# reverse", "Fr", "=", "1", "return", "Fn", ",", "Fr" ]
Same classification of SadighEtAl1997. Akkar and Bommer 2010 is based on Akkar and Bommer 2007b; read Strong-Motion Dataset and Record Processing on p. 514 (Akkar and Bommer 2007b).
[ "Same", "classification", "of", "SadighEtAl1997", ".", "Akkar", "and", "Bommer", "2010", "is", "based", "on", "Akkar", "and", "Bommer", "2007b", ";", "read", "Strong", "-", "Motion", "Dataset", "and", "Record", "Processing", "on", "p", ".", "514", "(", "Akkar", "and", "Bommer", "2007b", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L195-L209
gem/oq-engine
openquake/hazardlib/gsim/akkar_bommer_2010.py
AkkarBommer2010SWISS01.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ sites.vs30 = 600 * np.ones(len(sites.vs30)) mean, stddevs = super(AkkarBommer2010SWISS01, self).\ get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) tau_ss = 'tau' log_phi_ss = np.log(10) mean, stddevs = _apply_adjustments( AkkarBommer2010.COEFFS, self.COEFFS_FS_ROCK[imt], tau_ss, mean, stddevs, sites, rup, dists.rjb, imt, stddev_types, log_phi_ss) return mean, np.log(10 ** np.array(stddevs))
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): sites.vs30 = 600 * np.ones(len(sites.vs30)) mean, stddevs = super(AkkarBommer2010SWISS01, self).\ get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) tau_ss = 'tau' log_phi_ss = np.log(10) mean, stddevs = _apply_adjustments( AkkarBommer2010.COEFFS, self.COEFFS_FS_ROCK[imt], tau_ss, mean, stddevs, sites, rup, dists.rjb, imt, stddev_types, log_phi_ss) return mean, np.log(10 ** np.array(stddevs))
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "sites", ".", "vs30", "=", "600", "*", "np", ".", "ones", "(", "len", "(", "sites", ".", "vs30", ")", ")", "mean", ",", "stddevs", "=", "super", "(", "AkkarBommer2010SWISS01", ",", "self", ")", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "tau_ss", "=", "'tau'", "log_phi_ss", "=", "np", ".", "log", "(", "10", ")", "mean", ",", "stddevs", "=", "_apply_adjustments", "(", "AkkarBommer2010", ".", "COEFFS", ",", "self", ".", "COEFFS_FS_ROCK", "[", "imt", "]", ",", "tau_ss", ",", "mean", ",", "stddevs", ",", "sites", ",", "rup", ",", "dists", ".", "rjb", ",", "imt", ",", "stddev_types", ",", "log_phi_ss", ")", "return", "mean", ",", "np", ".", "log", "(", "10", "**", "np", ".", "array", "(", "stddevs", ")", ")" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_bommer_2010.py#L317-L336
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.mesh
def mesh(self): """ :returns: mesh corresponding to the whole multi surface """ meshes = [surface.mesh for surface in self.surfaces] lons = numpy.concatenate([m.lons for m in meshes]) lats = numpy.concatenate([m.lats for m in meshes]) depths = numpy.concatenate([m.depths for m in meshes]) return Mesh(lons, lats, depths)
python
def mesh(self): meshes = [surface.mesh for surface in self.surfaces] lons = numpy.concatenate([m.lons for m in meshes]) lats = numpy.concatenate([m.lats for m in meshes]) depths = numpy.concatenate([m.depths for m in meshes]) return Mesh(lons, lats, depths)
[ "def", "mesh", "(", "self", ")", ":", "meshes", "=", "[", "surface", ".", "mesh", "for", "surface", "in", "self", ".", "surfaces", "]", "lons", "=", "numpy", ".", "concatenate", "(", "[", "m", ".", "lons", "for", "m", "in", "meshes", "]", ")", "lats", "=", "numpy", ".", "concatenate", "(", "[", "m", ".", "lats", "for", "m", "in", "meshes", "]", ")", "depths", "=", "numpy", ".", "concatenate", "(", "[", "m", ".", "depths", "for", "m", "in", "meshes", "]", ")", "return", "Mesh", "(", "lons", ",", "lats", ",", "depths", ")" ]
:returns: mesh corresponding to the whole multi surface
[ ":", "returns", ":", "mesh", "corresponding", "to", "the", "whole", "multi", "surface" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L98-L106
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface._get_edge_set
def _get_edge_set(self, tol=0.1): """ Retrieve set of top edges from all of the individual surfaces, downsampling the upper edge based on the specified tolerance """ edges = [] for surface in self.surfaces: if isinstance(surface, GriddedSurface): return edges.append(surface.mesh) elif isinstance(surface, PlanarSurface): # Top edge determined from two end points edge = [] for pnt in [surface.top_left, surface.top_right]: edge.append([pnt.longitude, pnt.latitude, pnt.depth]) edges.append(numpy.array(edge)) elif isinstance(surface, (ComplexFaultSurface, SimpleFaultSurface)): # Rectangular meshes are downsampled to reduce their # overall size edges.append(downsample_trace(surface.mesh, tol)) else: raise ValueError("Surface %s not recognised" % str(surface)) return edges
python
def _get_edge_set(self, tol=0.1): edges = [] for surface in self.surfaces: if isinstance(surface, GriddedSurface): return edges.append(surface.mesh) elif isinstance(surface, PlanarSurface): edge = [] for pnt in [surface.top_left, surface.top_right]: edge.append([pnt.longitude, pnt.latitude, pnt.depth]) edges.append(numpy.array(edge)) elif isinstance(surface, (ComplexFaultSurface, SimpleFaultSurface)): edges.append(downsample_trace(surface.mesh, tol)) else: raise ValueError("Surface %s not recognised" % str(surface)) return edges
[ "def", "_get_edge_set", "(", "self", ",", "tol", "=", "0.1", ")", ":", "edges", "=", "[", "]", "for", "surface", "in", "self", ".", "surfaces", ":", "if", "isinstance", "(", "surface", ",", "GriddedSurface", ")", ":", "return", "edges", ".", "append", "(", "surface", ".", "mesh", ")", "elif", "isinstance", "(", "surface", ",", "PlanarSurface", ")", ":", "# Top edge determined from two end points", "edge", "=", "[", "]", "for", "pnt", "in", "[", "surface", ".", "top_left", ",", "surface", ".", "top_right", "]", ":", "edge", ".", "append", "(", "[", "pnt", ".", "longitude", ",", "pnt", ".", "latitude", ",", "pnt", ".", "depth", "]", ")", "edges", ".", "append", "(", "numpy", ".", "array", "(", "edge", ")", ")", "elif", "isinstance", "(", "surface", ",", "(", "ComplexFaultSurface", ",", "SimpleFaultSurface", ")", ")", ":", "# Rectangular meshes are downsampled to reduce their", "# overall size", "edges", ".", "append", "(", "downsample_trace", "(", "surface", ".", "mesh", ",", "tol", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Surface %s not recognised\"", "%", "str", "(", "surface", ")", ")", "return", "edges" ]
Retrieve set of top edges from all of the individual surfaces, downsampling the upper edge based on the specified tolerance
[ "Retrieve", "set", "of", "top", "edges", "from", "all", "of", "the", "individual", "surfaces", "downsampling", "the", "upper", "edge", "based", "on", "the", "specified", "tolerance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L137-L159
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_min_distance
def get_min_distance(self, mesh): """ For each point in ``mesh`` compute the minimum distance to each surface element and return the smallest value. See :meth:`superclass method <.base.BaseSurface.get_min_distance>` for spec of input and result values. """ dists = [surf.get_min_distance(mesh) for surf in self.surfaces] return numpy.min(dists, axis=0)
python
def get_min_distance(self, mesh): dists = [surf.get_min_distance(mesh) for surf in self.surfaces] return numpy.min(dists, axis=0)
[ "def", "get_min_distance", "(", "self", ",", "mesh", ")", ":", "dists", "=", "[", "surf", ".", "get_min_distance", "(", "mesh", ")", "for", "surf", "in", "self", ".", "surfaces", "]", "return", "numpy", ".", "min", "(", "dists", ",", "axis", "=", "0", ")" ]
For each point in ``mesh`` compute the minimum distance to each surface element and return the smallest value. See :meth:`superclass method <.base.BaseSurface.get_min_distance>` for spec of input and result values.
[ "For", "each", "point", "in", "mesh", "compute", "the", "minimum", "distance", "to", "each", "surface", "element", "and", "return", "the", "smallest", "value", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L161-L172
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_closest_points
def get_closest_points(self, mesh): """ For each point in ``mesh`` find the closest surface element, and return the corresponding closest point. See :meth:`superclass method <.base.BaseSurface.get_closest_points>` for spec of input and result values. """ # first, for each point in mesh compute minimum distance to each # surface. The distance matrix is flattend, because mesh can be of # an arbitrary shape. By flattening we obtain a ``distances`` matrix # for which the first dimension represents the different surfaces # and the second dimension the mesh points. dists = numpy.array( [surf.get_min_distance(mesh).flatten() for surf in self.surfaces] ) # find for each point in mesh the index of closest surface idx = dists == numpy.min(dists, axis=0) # loop again over surfaces. For each surface compute the closest # points, and associate them to the mesh points for which the surface # is the closest. Note that if a surface is not the closest to any of # the mesh points then the calculation is skipped lons = numpy.empty_like(mesh.lons.flatten()) lats = numpy.empty_like(mesh.lats.flatten()) depths = None if mesh.depths is None else \ numpy.empty_like(mesh.depths.flatten()) for i, surf in enumerate(self.surfaces): if not idx[i, :].any(): continue cps = surf.get_closest_points(mesh) lons[idx[i, :]] = cps.lons.flatten()[idx[i, :]] lats[idx[i, :]] = cps.lats.flatten()[idx[i, :]] if depths is not None: depths[idx[i, :]] = cps.depths.flatten()[idx[i, :]] lons = lons.reshape(mesh.lons.shape) lats = lats.reshape(mesh.lats.shape) if depths is not None: depths = depths.reshape(mesh.depths.shape) return Mesh(lons, lats, depths)
python
def get_closest_points(self, mesh): dists = numpy.array( [surf.get_min_distance(mesh).flatten() for surf in self.surfaces] ) idx = dists == numpy.min(dists, axis=0) lons = numpy.empty_like(mesh.lons.flatten()) lats = numpy.empty_like(mesh.lats.flatten()) depths = None if mesh.depths is None else \ numpy.empty_like(mesh.depths.flatten()) for i, surf in enumerate(self.surfaces): if not idx[i, :].any(): continue cps = surf.get_closest_points(mesh) lons[idx[i, :]] = cps.lons.flatten()[idx[i, :]] lats[idx[i, :]] = cps.lats.flatten()[idx[i, :]] if depths is not None: depths[idx[i, :]] = cps.depths.flatten()[idx[i, :]] lons = lons.reshape(mesh.lons.shape) lats = lats.reshape(mesh.lats.shape) if depths is not None: depths = depths.reshape(mesh.depths.shape) return Mesh(lons, lats, depths)
[ "def", "get_closest_points", "(", "self", ",", "mesh", ")", ":", "# first, for each point in mesh compute minimum distance to each", "# surface. The distance matrix is flattend, because mesh can be of", "# an arbitrary shape. By flattening we obtain a ``distances`` matrix", "# for which the first dimension represents the different surfaces", "# and the second dimension the mesh points.", "dists", "=", "numpy", ".", "array", "(", "[", "surf", ".", "get_min_distance", "(", "mesh", ")", ".", "flatten", "(", ")", "for", "surf", "in", "self", ".", "surfaces", "]", ")", "# find for each point in mesh the index of closest surface", "idx", "=", "dists", "==", "numpy", ".", "min", "(", "dists", ",", "axis", "=", "0", ")", "# loop again over surfaces. For each surface compute the closest", "# points, and associate them to the mesh points for which the surface", "# is the closest. Note that if a surface is not the closest to any of", "# the mesh points then the calculation is skipped", "lons", "=", "numpy", ".", "empty_like", "(", "mesh", ".", "lons", ".", "flatten", "(", ")", ")", "lats", "=", "numpy", ".", "empty_like", "(", "mesh", ".", "lats", ".", "flatten", "(", ")", ")", "depths", "=", "None", "if", "mesh", ".", "depths", "is", "None", "else", "numpy", ".", "empty_like", "(", "mesh", ".", "depths", ".", "flatten", "(", ")", ")", "for", "i", ",", "surf", "in", "enumerate", "(", "self", ".", "surfaces", ")", ":", "if", "not", "idx", "[", "i", ",", ":", "]", ".", "any", "(", ")", ":", "continue", "cps", "=", "surf", ".", "get_closest_points", "(", "mesh", ")", "lons", "[", "idx", "[", "i", ",", ":", "]", "]", "=", "cps", ".", "lons", ".", "flatten", "(", ")", "[", "idx", "[", "i", ",", ":", "]", "]", "lats", "[", "idx", "[", "i", ",", ":", "]", "]", "=", "cps", ".", "lats", ".", "flatten", "(", ")", "[", "idx", "[", "i", ",", ":", "]", "]", "if", "depths", "is", "not", "None", ":", "depths", "[", "idx", "[", "i", ",", ":", "]", "]", "=", "cps", ".", "depths", ".", "flatten", "(", ")", "[", "idx", "[", "i", ",", ":", "]", "]", "lons", "=", "lons", ".", "reshape", "(", "mesh", ".", "lons", ".", "shape", ")", "lats", "=", "lats", ".", "reshape", "(", "mesh", ".", "lats", ".", "shape", ")", "if", "depths", "is", "not", "None", ":", "depths", "=", "depths", ".", "reshape", "(", "mesh", ".", "depths", ".", "shape", ")", "return", "Mesh", "(", "lons", ",", "lats", ",", "depths", ")" ]
For each point in ``mesh`` find the closest surface element, and return the corresponding closest point. See :meth:`superclass method <.base.BaseSurface.get_closest_points>` for spec of input and result values.
[ "For", "each", "point", "in", "mesh", "find", "the", "closest", "surface", "element", "and", "return", "the", "corresponding", "closest", "point", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L174-L216
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_top_edge_depth
def get_top_edge_depth(self): """ Compute top edge depth of each surface element and return area-weighted average value (in km). """ areas = self._get_areas() depths = numpy.array( [surf.get_top_edge_depth() for surf in self.surfaces]) return numpy.sum(areas * depths) / numpy.sum(areas)
python
def get_top_edge_depth(self): areas = self._get_areas() depths = numpy.array( [surf.get_top_edge_depth() for surf in self.surfaces]) return numpy.sum(areas * depths) / numpy.sum(areas)
[ "def", "get_top_edge_depth", "(", "self", ")", ":", "areas", "=", "self", ".", "_get_areas", "(", ")", "depths", "=", "numpy", ".", "array", "(", "[", "surf", ".", "get_top_edge_depth", "(", ")", "for", "surf", "in", "self", ".", "surfaces", "]", ")", "return", "numpy", ".", "sum", "(", "areas", "*", "depths", ")", "/", "numpy", ".", "sum", "(", "areas", ")" ]
Compute top edge depth of each surface element and return area-weighted average value (in km).
[ "Compute", "top", "edge", "depth", "of", "each", "surface", "element", "and", "return", "area", "-", "weighted", "average", "value", "(", "in", "km", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L233-L241
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_strike
def get_strike(self): """ Compute strike of each surface element and return area-weighted average value (in range ``[0, 360]``) using formula from: http://en.wikipedia.org/wiki/Mean_of_circular_quantities Note that the original formula has been adapted to compute a weighted rather than arithmetic mean. """ areas = self._get_areas() strikes = numpy.array([surf.get_strike() for surf in self.surfaces]) v1 = (numpy.sum(areas * numpy.sin(numpy.radians(strikes))) / numpy.sum(areas)) v2 = (numpy.sum(areas * numpy.cos(numpy.radians(strikes))) / numpy.sum(areas)) return numpy.degrees(numpy.arctan2(v1, v2)) % 360
python
def get_strike(self): areas = self._get_areas() strikes = numpy.array([surf.get_strike() for surf in self.surfaces]) v1 = (numpy.sum(areas * numpy.sin(numpy.radians(strikes))) / numpy.sum(areas)) v2 = (numpy.sum(areas * numpy.cos(numpy.radians(strikes))) / numpy.sum(areas)) return numpy.degrees(numpy.arctan2(v1, v2)) % 360
[ "def", "get_strike", "(", "self", ")", ":", "areas", "=", "self", ".", "_get_areas", "(", ")", "strikes", "=", "numpy", ".", "array", "(", "[", "surf", ".", "get_strike", "(", ")", "for", "surf", "in", "self", ".", "surfaces", "]", ")", "v1", "=", "(", "numpy", ".", "sum", "(", "areas", "*", "numpy", ".", "sin", "(", "numpy", ".", "radians", "(", "strikes", ")", ")", ")", "/", "numpy", ".", "sum", "(", "areas", ")", ")", "v2", "=", "(", "numpy", ".", "sum", "(", "areas", "*", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "strikes", ")", ")", ")", "/", "numpy", ".", "sum", "(", "areas", ")", ")", "return", "numpy", ".", "degrees", "(", "numpy", ".", "arctan2", "(", "v1", ",", "v2", ")", ")", "%", "360" ]
Compute strike of each surface element and return area-weighted average value (in range ``[0, 360]``) using formula from: http://en.wikipedia.org/wiki/Mean_of_circular_quantities Note that the original formula has been adapted to compute a weighted rather than arithmetic mean.
[ "Compute", "strike", "of", "each", "surface", "element", "and", "return", "area", "-", "weighted", "average", "value", "(", "in", "range", "[", "0", "360", "]", ")", "using", "formula", "from", ":", "http", ":", "//", "en", ".", "wikipedia", ".", "org", "/", "wiki", "/", "Mean_of_circular_quantities" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L243-L260
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_dip
def get_dip(self): """ Compute dip of each surface element and return area-weighted average value (in range ``(0, 90]``). Given that dip values are constrained in the range (0, 90], the simple formula for weighted mean is used. """ areas = self._get_areas() dips = numpy.array([surf.get_dip() for surf in self.surfaces]) return numpy.sum(areas * dips) / numpy.sum(areas)
python
def get_dip(self): areas = self._get_areas() dips = numpy.array([surf.get_dip() for surf in self.surfaces]) return numpy.sum(areas * dips) / numpy.sum(areas)
[ "def", "get_dip", "(", "self", ")", ":", "areas", "=", "self", ".", "_get_areas", "(", ")", "dips", "=", "numpy", ".", "array", "(", "[", "surf", ".", "get_dip", "(", ")", "for", "surf", "in", "self", ".", "surfaces", "]", ")", "return", "numpy", ".", "sum", "(", "areas", "*", "dips", ")", "/", "numpy", ".", "sum", "(", "areas", ")" ]
Compute dip of each surface element and return area-weighted average value (in range ``(0, 90]``). Given that dip values are constrained in the range (0, 90], the simple formula for weighted mean is used.
[ "Compute", "dip", "of", "each", "surface", "element", "and", "return", "area", "-", "weighted", "average", "value", "(", "in", "range", "(", "0", "90", "]", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L262-L273
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_width
def get_width(self): """ Compute width of each surface element, and return area-weighted average value (in km). """ areas = self._get_areas() widths = numpy.array([surf.get_width() for surf in self.surfaces]) return numpy.sum(areas * widths) / numpy.sum(areas)
python
def get_width(self): areas = self._get_areas() widths = numpy.array([surf.get_width() for surf in self.surfaces]) return numpy.sum(areas * widths) / numpy.sum(areas)
[ "def", "get_width", "(", "self", ")", ":", "areas", "=", "self", ".", "_get_areas", "(", ")", "widths", "=", "numpy", ".", "array", "(", "[", "surf", ".", "get_width", "(", ")", "for", "surf", "in", "self", ".", "surfaces", "]", ")", "return", "numpy", ".", "sum", "(", "areas", "*", "widths", ")", "/", "numpy", ".", "sum", "(", "areas", ")" ]
Compute width of each surface element, and return area-weighted average value (in km).
[ "Compute", "width", "of", "each", "surface", "element", "and", "return", "area", "-", "weighted", "average", "value", "(", "in", "km", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L275-L283
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_bounding_box
def get_bounding_box(self): """ Compute bounding box for each surface element, and then return the bounding box of all surface elements' bounding boxes. :return: A tuple of four items. These items represent western, eastern, northern and southern borders of the bounding box respectively. Values are floats in decimal degrees. """ lons = [] lats = [] for surf in self.surfaces: west, east, north, south = surf.get_bounding_box() lons.extend([west, east]) lats.extend([north, south]) return utils.get_spherical_bounding_box(lons, lats)
python
def get_bounding_box(self): lons = [] lats = [] for surf in self.surfaces: west, east, north, south = surf.get_bounding_box() lons.extend([west, east]) lats.extend([north, south]) return utils.get_spherical_bounding_box(lons, lats)
[ "def", "get_bounding_box", "(", "self", ")", ":", "lons", "=", "[", "]", "lats", "=", "[", "]", "for", "surf", "in", "self", ".", "surfaces", ":", "west", ",", "east", ",", "north", ",", "south", "=", "surf", ".", "get_bounding_box", "(", ")", "lons", ".", "extend", "(", "[", "west", ",", "east", "]", ")", "lats", ".", "extend", "(", "[", "north", ",", "south", "]", ")", "return", "utils", ".", "get_spherical_bounding_box", "(", "lons", ",", "lats", ")" ]
Compute bounding box for each surface element, and then return the bounding box of all surface elements' bounding boxes. :return: A tuple of four items. These items represent western, eastern, northern and southern borders of the bounding box respectively. Values are floats in decimal degrees.
[ "Compute", "bounding", "box", "for", "each", "surface", "element", "and", "then", "return", "the", "bounding", "box", "of", "all", "surface", "elements", "bounding", "boxes", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L291-L307
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_middle_point
def get_middle_point(self): """ If :class:`MultiSurface` is defined by a single surface, simply returns surface's middle point, otherwise find surface element closest to the surface's bounding box centroid and return corresponding middle point. Note that the concept of middle point for a multi surface is ambiguous and alternative definitions may be possible. However, this method is mostly used to define the hypocenter location for ruptures described by a multi surface (see :meth:`openquake.hazardlib.source.characteristic.CharacteristicFaultSource.iter_ruptures`). This is needed because when creating fault based sources, the rupture's hypocenter locations are not explicitly defined, and therefore an automated way to define them is required. """ if len(self.surfaces) == 1: return self.surfaces[0].get_middle_point() west, east, north, south = self.get_bounding_box() longitude, latitude = utils.get_middle_point(west, north, east, south) dists = [] for surf in self.surfaces: dists.append( surf.get_min_distance(Mesh(numpy.array([longitude]), numpy.array([latitude]), None))) dists = numpy.array(dists).flatten() idx = dists == numpy.min(dists) return numpy.array(self.surfaces)[idx][0].get_middle_point()
python
def get_middle_point(self): if len(self.surfaces) == 1: return self.surfaces[0].get_middle_point() west, east, north, south = self.get_bounding_box() longitude, latitude = utils.get_middle_point(west, north, east, south) dists = [] for surf in self.surfaces: dists.append( surf.get_min_distance(Mesh(numpy.array([longitude]), numpy.array([latitude]), None))) dists = numpy.array(dists).flatten() idx = dists == numpy.min(dists) return numpy.array(self.surfaces)[idx][0].get_middle_point()
[ "def", "get_middle_point", "(", "self", ")", ":", "if", "len", "(", "self", ".", "surfaces", ")", "==", "1", ":", "return", "self", ".", "surfaces", "[", "0", "]", ".", "get_middle_point", "(", ")", "west", ",", "east", ",", "north", ",", "south", "=", "self", ".", "get_bounding_box", "(", ")", "longitude", ",", "latitude", "=", "utils", ".", "get_middle_point", "(", "west", ",", "north", ",", "east", ",", "south", ")", "dists", "=", "[", "]", "for", "surf", "in", "self", ".", "surfaces", ":", "dists", ".", "append", "(", "surf", ".", "get_min_distance", "(", "Mesh", "(", "numpy", ".", "array", "(", "[", "longitude", "]", ")", ",", "numpy", ".", "array", "(", "[", "latitude", "]", ")", ",", "None", ")", ")", ")", "dists", "=", "numpy", ".", "array", "(", "dists", ")", ".", "flatten", "(", ")", "idx", "=", "dists", "==", "numpy", ".", "min", "(", "dists", ")", "return", "numpy", ".", "array", "(", "self", ".", "surfaces", ")", "[", "idx", "]", "[", "0", "]", ".", "get_middle_point", "(", ")" ]
If :class:`MultiSurface` is defined by a single surface, simply returns surface's middle point, otherwise find surface element closest to the surface's bounding box centroid and return corresponding middle point. Note that the concept of middle point for a multi surface is ambiguous and alternative definitions may be possible. However, this method is mostly used to define the hypocenter location for ruptures described by a multi surface (see :meth:`openquake.hazardlib.source.characteristic.CharacteristicFaultSource.iter_ruptures`). This is needed because when creating fault based sources, the rupture's hypocenter locations are not explicitly defined, and therefore an automated way to define them is required.
[ "If", ":", "class", ":", "MultiSurface", "is", "defined", "by", "a", "single", "surface", "simply", "returns", "surface", "s", "middle", "point", "otherwise", "find", "surface", "element", "closest", "to", "the", "surface", "s", "bounding", "box", "centroid", "and", "return", "corresponding", "middle", "point", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L309-L339
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface._get_areas
def _get_areas(self): """ Return surface elements area values in a numpy array. """ if self.areas is None: self.areas = [] for surf in self.surfaces: self.areas.append(surf.get_area()) self.areas = numpy.array(self.areas) return self.areas
python
def _get_areas(self): if self.areas is None: self.areas = [] for surf in self.surfaces: self.areas.append(surf.get_area()) self.areas = numpy.array(self.areas) return self.areas
[ "def", "_get_areas", "(", "self", ")", ":", "if", "self", ".", "areas", "is", "None", ":", "self", ".", "areas", "=", "[", "]", "for", "surf", "in", "self", ".", "surfaces", ":", "self", ".", "areas", ".", "append", "(", "surf", ".", "get_area", "(", ")", ")", "self", ".", "areas", "=", "numpy", ".", "array", "(", "self", ".", "areas", ")", "return", "self", ".", "areas" ]
Return surface elements area values in a numpy array.
[ "Return", "surface", "elements", "area", "values", "in", "a", "numpy", "array", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L350-L360
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface._get_cartesian_edge_set
def _get_cartesian_edge_set(self): """ For the GC2 calculations a set of cartesian representations of the fault edges are needed. In this present case we use a common cartesian framework for all edges, as opposed to defining a separate orthographic projection per edge """ # Get projection space for cartesian projection edge_sets = numpy.vstack(self.edge_set) west, east, north, south = utils.get_spherical_bounding_box( edge_sets[:, 0], edge_sets[:, 1]) self.proj = utils.OrthographicProjection(west, east, north, south) for edges in self.edge_set: # Project edges into cartesian space px, py = self.proj(edges[:, 0], edges[:, 1]) # Store the two end-points of the trace self.cartesian_endpoints.append( numpy.array([[px[0], py[0], edges[0, 2]], [px[-1], py[-1], edges[-1, 2]]])) self.cartesian_edges.append(numpy.column_stack([px, py, edges[:, 2]])) # Get surface length vector for the trace - easier in cartesian lengths = numpy.sqrt((px[:-1] - px[1:]) ** 2. + (py[:-1] - py[1:]) ** 2.) self.length_set.append(lengths) # Get cumulative surface length vector self.cum_length_set.append( numpy.hstack([0., numpy.cumsum(lengths)])) return edge_sets
python
def _get_cartesian_edge_set(self): edge_sets = numpy.vstack(self.edge_set) west, east, north, south = utils.get_spherical_bounding_box( edge_sets[:, 0], edge_sets[:, 1]) self.proj = utils.OrthographicProjection(west, east, north, south) for edges in self.edge_set: px, py = self.proj(edges[:, 0], edges[:, 1]) self.cartesian_endpoints.append( numpy.array([[px[0], py[0], edges[0, 2]], [px[-1], py[-1], edges[-1, 2]]])) self.cartesian_edges.append(numpy.column_stack([px, py, edges[:, 2]])) lengths = numpy.sqrt((px[:-1] - px[1:]) ** 2. + (py[:-1] - py[1:]) ** 2.) self.length_set.append(lengths) self.cum_length_set.append( numpy.hstack([0., numpy.cumsum(lengths)])) return edge_sets
[ "def", "_get_cartesian_edge_set", "(", "self", ")", ":", "# Get projection space for cartesian projection", "edge_sets", "=", "numpy", ".", "vstack", "(", "self", ".", "edge_set", ")", "west", ",", "east", ",", "north", ",", "south", "=", "utils", ".", "get_spherical_bounding_box", "(", "edge_sets", "[", ":", ",", "0", "]", ",", "edge_sets", "[", ":", ",", "1", "]", ")", "self", ".", "proj", "=", "utils", ".", "OrthographicProjection", "(", "west", ",", "east", ",", "north", ",", "south", ")", "for", "edges", "in", "self", ".", "edge_set", ":", "# Project edges into cartesian space", "px", ",", "py", "=", "self", ".", "proj", "(", "edges", "[", ":", ",", "0", "]", ",", "edges", "[", ":", ",", "1", "]", ")", "# Store the two end-points of the trace", "self", ".", "cartesian_endpoints", ".", "append", "(", "numpy", ".", "array", "(", "[", "[", "px", "[", "0", "]", ",", "py", "[", "0", "]", ",", "edges", "[", "0", ",", "2", "]", "]", ",", "[", "px", "[", "-", "1", "]", ",", "py", "[", "-", "1", "]", ",", "edges", "[", "-", "1", ",", "2", "]", "]", "]", ")", ")", "self", ".", "cartesian_edges", ".", "append", "(", "numpy", ".", "column_stack", "(", "[", "px", ",", "py", ",", "edges", "[", ":", ",", "2", "]", "]", ")", ")", "# Get surface length vector for the trace - easier in cartesian", "lengths", "=", "numpy", ".", "sqrt", "(", "(", "px", "[", ":", "-", "1", "]", "-", "px", "[", "1", ":", "]", ")", "**", "2.", "+", "(", "py", "[", ":", "-", "1", "]", "-", "py", "[", "1", ":", "]", ")", "**", "2.", ")", "self", ".", "length_set", ".", "append", "(", "lengths", ")", "# Get cumulative surface length vector", "self", ".", "cum_length_set", ".", "append", "(", "numpy", ".", "hstack", "(", "[", "0.", ",", "numpy", ".", "cumsum", "(", "lengths", ")", "]", ")", ")", "return", "edge_sets" ]
For the GC2 calculations a set of cartesian representations of the fault edges are needed. In this present case we use a common cartesian framework for all edges, as opposed to defining a separate orthographic projection per edge
[ "For", "the", "GC2", "calculations", "a", "set", "of", "cartesian", "representations", "of", "the", "fault", "edges", "are", "needed", ".", "In", "this", "present", "case", "we", "use", "a", "common", "cartesian", "framework", "for", "all", "edges", "as", "opposed", "to", "defining", "a", "separate", "orthographic", "projection", "per", "edge" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L362-L392
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface._setup_gc2_framework
def _setup_gc2_framework(self): """ This method establishes the GC2 framework for a multi-segment (and indeed multi-typology) case based on the description in Spudich & Chiou (2015) - see section on Generalized Coordinate System for Multiple Rupture Traces """ # Generate cartesian edge set edge_sets = self._get_cartesian_edge_set() self.gc2_config = {} # Determine furthest two points apart endpoint_set = numpy.vstack([cep for cep in self.cartesian_endpoints]) dmat = squareform(pdist(endpoint_set)) irow, icol = numpy.unravel_index(numpy.argmax(dmat), dmat.shape) # Join further points to form a vector (a_hat in Spudich & Chiou) # According to Spudich & Chiou, a_vec should be eastward trending if endpoint_set[irow, 0] > endpoint_set[icol, 0]: # Row point is to the east of column point beginning = endpoint_set[icol, :2] ending = endpoint_set[irow, :2] else: # Column point is to the east of row point beginning = endpoint_set[irow, :2] ending = endpoint_set[icol, :2] # Convert to unit vector a_vec = ending - beginning self.gc2_config["a_hat"] = a_vec / numpy.linalg.norm(a_vec) # Get e_j set self.gc2_config["ejs"] = [] for c_edges in self.cartesian_edges: self.gc2_config["ejs"].append( numpy.dot(c_edges[-1, :2] - c_edges[0, :2], self.gc2_config["a_hat"])) # A "total E" is defined as the sum of the e_j values self.gc2_config["e_tot"] = sum(self.gc2_config["ejs"]) sign_etot = numpy.sign(self.gc2_config["e_tot"]) b_vec = numpy.zeros(2) self.gc2_config["sign"] = [] for i, c_edges in enumerate(self.cartesian_edges): segment_sign = numpy.sign(self.gc2_config["ejs"][i]) * sign_etot self.gc2_config["sign"].append(segment_sign) if segment_sign < 0: # Segment is discordant - reverse the points c_edges = numpy.flipud(c_edges) self.cartesian_edges[i] = c_edges self.cartesian_endpoints[i] = numpy.flipud( self.cartesian_endpoints[i]) b_vec += (c_edges[-1, :2] - c_edges[0, :2]) # Get unit vector self.gc2_config["b_hat"] = b_vec / numpy.linalg.norm(b_vec) if numpy.dot(a_vec, self.gc2_config["b_hat"]) >= 0.0: self.p0 = beginning else: self.p0 = ending # To later calculate Ry0 it is necessary to determine the maximum # GC2-U coordinate for the fault self._get_gc2_coordinates_for_rupture(edge_sets)
python
def _setup_gc2_framework(self): edge_sets = self._get_cartesian_edge_set() self.gc2_config = {} endpoint_set = numpy.vstack([cep for cep in self.cartesian_endpoints]) dmat = squareform(pdist(endpoint_set)) irow, icol = numpy.unravel_index(numpy.argmax(dmat), dmat.shape) if endpoint_set[irow, 0] > endpoint_set[icol, 0]: beginning = endpoint_set[icol, :2] ending = endpoint_set[irow, :2] else: beginning = endpoint_set[irow, :2] ending = endpoint_set[icol, :2] a_vec = ending - beginning self.gc2_config["a_hat"] = a_vec / numpy.linalg.norm(a_vec) self.gc2_config["ejs"] = [] for c_edges in self.cartesian_edges: self.gc2_config["ejs"].append( numpy.dot(c_edges[-1, :2] - c_edges[0, :2], self.gc2_config["a_hat"])) self.gc2_config["e_tot"] = sum(self.gc2_config["ejs"]) sign_etot = numpy.sign(self.gc2_config["e_tot"]) b_vec = numpy.zeros(2) self.gc2_config["sign"] = [] for i, c_edges in enumerate(self.cartesian_edges): segment_sign = numpy.sign(self.gc2_config["ejs"][i]) * sign_etot self.gc2_config["sign"].append(segment_sign) if segment_sign < 0: c_edges = numpy.flipud(c_edges) self.cartesian_edges[i] = c_edges self.cartesian_endpoints[i] = numpy.flipud( self.cartesian_endpoints[i]) b_vec += (c_edges[-1, :2] - c_edges[0, :2]) self.gc2_config["b_hat"] = b_vec / numpy.linalg.norm(b_vec) if numpy.dot(a_vec, self.gc2_config["b_hat"]) >= 0.0: self.p0 = beginning else: self.p0 = ending self._get_gc2_coordinates_for_rupture(edge_sets)
[ "def", "_setup_gc2_framework", "(", "self", ")", ":", "# Generate cartesian edge set", "edge_sets", "=", "self", ".", "_get_cartesian_edge_set", "(", ")", "self", ".", "gc2_config", "=", "{", "}", "# Determine furthest two points apart", "endpoint_set", "=", "numpy", ".", "vstack", "(", "[", "cep", "for", "cep", "in", "self", ".", "cartesian_endpoints", "]", ")", "dmat", "=", "squareform", "(", "pdist", "(", "endpoint_set", ")", ")", "irow", ",", "icol", "=", "numpy", ".", "unravel_index", "(", "numpy", ".", "argmax", "(", "dmat", ")", ",", "dmat", ".", "shape", ")", "# Join further points to form a vector (a_hat in Spudich & Chiou)", "# According to Spudich & Chiou, a_vec should be eastward trending", "if", "endpoint_set", "[", "irow", ",", "0", "]", ">", "endpoint_set", "[", "icol", ",", "0", "]", ":", "# Row point is to the east of column point", "beginning", "=", "endpoint_set", "[", "icol", ",", ":", "2", "]", "ending", "=", "endpoint_set", "[", "irow", ",", ":", "2", "]", "else", ":", "# Column point is to the east of row point", "beginning", "=", "endpoint_set", "[", "irow", ",", ":", "2", "]", "ending", "=", "endpoint_set", "[", "icol", ",", ":", "2", "]", "# Convert to unit vector", "a_vec", "=", "ending", "-", "beginning", "self", ".", "gc2_config", "[", "\"a_hat\"", "]", "=", "a_vec", "/", "numpy", ".", "linalg", ".", "norm", "(", "a_vec", ")", "# Get e_j set", "self", ".", "gc2_config", "[", "\"ejs\"", "]", "=", "[", "]", "for", "c_edges", "in", "self", ".", "cartesian_edges", ":", "self", ".", "gc2_config", "[", "\"ejs\"", "]", ".", "append", "(", "numpy", ".", "dot", "(", "c_edges", "[", "-", "1", ",", ":", "2", "]", "-", "c_edges", "[", "0", ",", ":", "2", "]", ",", "self", ".", "gc2_config", "[", "\"a_hat\"", "]", ")", ")", "# A \"total E\" is defined as the sum of the e_j values", "self", ".", "gc2_config", "[", "\"e_tot\"", "]", "=", "sum", "(", "self", ".", "gc2_config", "[", "\"ejs\"", "]", ")", "sign_etot", "=", "numpy", ".", "sign", "(", "self", ".", "gc2_config", "[", "\"e_tot\"", "]", ")", "b_vec", "=", "numpy", ".", "zeros", "(", "2", ")", "self", ".", "gc2_config", "[", "\"sign\"", "]", "=", "[", "]", "for", "i", ",", "c_edges", "in", "enumerate", "(", "self", ".", "cartesian_edges", ")", ":", "segment_sign", "=", "numpy", ".", "sign", "(", "self", ".", "gc2_config", "[", "\"ejs\"", "]", "[", "i", "]", ")", "*", "sign_etot", "self", ".", "gc2_config", "[", "\"sign\"", "]", ".", "append", "(", "segment_sign", ")", "if", "segment_sign", "<", "0", ":", "# Segment is discordant - reverse the points", "c_edges", "=", "numpy", ".", "flipud", "(", "c_edges", ")", "self", ".", "cartesian_edges", "[", "i", "]", "=", "c_edges", "self", ".", "cartesian_endpoints", "[", "i", "]", "=", "numpy", ".", "flipud", "(", "self", ".", "cartesian_endpoints", "[", "i", "]", ")", "b_vec", "+=", "(", "c_edges", "[", "-", "1", ",", ":", "2", "]", "-", "c_edges", "[", "0", ",", ":", "2", "]", ")", "# Get unit vector", "self", ".", "gc2_config", "[", "\"b_hat\"", "]", "=", "b_vec", "/", "numpy", ".", "linalg", ".", "norm", "(", "b_vec", ")", "if", "numpy", ".", "dot", "(", "a_vec", ",", "self", ".", "gc2_config", "[", "\"b_hat\"", "]", ")", ">=", "0.0", ":", "self", ".", "p0", "=", "beginning", "else", ":", "self", ".", "p0", "=", "ending", "# To later calculate Ry0 it is necessary to determine the maximum", "# GC2-U coordinate for the fault", "self", ".", "_get_gc2_coordinates_for_rupture", "(", "edge_sets", ")" ]
This method establishes the GC2 framework for a multi-segment (and indeed multi-typology) case based on the description in Spudich & Chiou (2015) - see section on Generalized Coordinate System for Multiple Rupture Traces
[ "This", "method", "establishes", "the", "GC2", "framework", "for", "a", "multi", "-", "segment", "(", "and", "indeed", "multi", "-", "typology", ")", "case", "based", "on", "the", "description", "in", "Spudich", "&", "Chiou", "(", "2015", ")", "-", "see", "section", "on", "Generalized", "Coordinate", "System", "for", "Multiple", "Rupture", "Traces" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L394-L452
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface._get_gc2_coordinates_for_rupture
def _get_gc2_coordinates_for_rupture(self, edge_sets): """ Calculates the GC2 coordinates for the nodes of the upper edge of the fault """ # Establish GC2 length - for use with Ry0 rup_gc2t, rup_gc2u = self.get_generalised_coordinates( edge_sets[:, 0], edge_sets[:, 1]) # GC2 length should be the largest positive GC2 value of the edges self.gc_length = numpy.max(rup_gc2u)
python
def _get_gc2_coordinates_for_rupture(self, edge_sets): rup_gc2t, rup_gc2u = self.get_generalised_coordinates( edge_sets[:, 0], edge_sets[:, 1]) self.gc_length = numpy.max(rup_gc2u)
[ "def", "_get_gc2_coordinates_for_rupture", "(", "self", ",", "edge_sets", ")", ":", "# Establish GC2 length - for use with Ry0", "rup_gc2t", ",", "rup_gc2u", "=", "self", ".", "get_generalised_coordinates", "(", "edge_sets", "[", ":", ",", "0", "]", ",", "edge_sets", "[", ":", ",", "1", "]", ")", "# GC2 length should be the largest positive GC2 value of the edges", "self", ".", "gc_length", "=", "numpy", ".", "max", "(", "rup_gc2u", ")" ]
Calculates the GC2 coordinates for the nodes of the upper edge of the fault
[ "Calculates", "the", "GC2", "coordinates", "for", "the", "nodes", "of", "the", "upper", "edge", "of", "the", "fault" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L454-L464
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface._get_ut_i
def _get_ut_i(self, seg, sx, sy): """ Returns the U and T coordinate for a specific trace segment :param seg: End points of the segment edge :param sx: Sites longitudes rendered into coordinate system :param sy: Sites latitudes rendered into coordinate system """ p0x, p0y, p1x, p1y = seg[0, 0], seg[0, 1], seg[1, 0], seg[1, 1] # Unit vector normal to strike t_i_vec = [p1y - p0y, -(p1x - p0x), 0.0] t_i_hat = t_i_vec / numpy.linalg.norm(t_i_vec) # Unit vector along strike u_i_vec = [p1x - p0x, p1y - p0y, 0.0] u_i_hat = u_i_vec / numpy.linalg.norm(u_i_vec) # Vectors from P0 to sites rsite = numpy.column_stack([sx - p0x, sy - p0y]) return numpy.sum(u_i_hat[:-1] * rsite, axis=1),\ numpy.sum(t_i_hat[:-1] * rsite, axis=1)
python
def _get_ut_i(self, seg, sx, sy): p0x, p0y, p1x, p1y = seg[0, 0], seg[0, 1], seg[1, 0], seg[1, 1] t_i_vec = [p1y - p0y, -(p1x - p0x), 0.0] t_i_hat = t_i_vec / numpy.linalg.norm(t_i_vec) u_i_vec = [p1x - p0x, p1y - p0y, 0.0] u_i_hat = u_i_vec / numpy.linalg.norm(u_i_vec) rsite = numpy.column_stack([sx - p0x, sy - p0y]) return numpy.sum(u_i_hat[:-1] * rsite, axis=1),\ numpy.sum(t_i_hat[:-1] * rsite, axis=1)
[ "def", "_get_ut_i", "(", "self", ",", "seg", ",", "sx", ",", "sy", ")", ":", "p0x", ",", "p0y", ",", "p1x", ",", "p1y", "=", "seg", "[", "0", ",", "0", "]", ",", "seg", "[", "0", ",", "1", "]", ",", "seg", "[", "1", ",", "0", "]", ",", "seg", "[", "1", ",", "1", "]", "# Unit vector normal to strike", "t_i_vec", "=", "[", "p1y", "-", "p0y", ",", "-", "(", "p1x", "-", "p0x", ")", ",", "0.0", "]", "t_i_hat", "=", "t_i_vec", "/", "numpy", ".", "linalg", ".", "norm", "(", "t_i_vec", ")", "# Unit vector along strike", "u_i_vec", "=", "[", "p1x", "-", "p0x", ",", "p1y", "-", "p0y", ",", "0.0", "]", "u_i_hat", "=", "u_i_vec", "/", "numpy", ".", "linalg", ".", "norm", "(", "u_i_vec", ")", "# Vectors from P0 to sites", "rsite", "=", "numpy", ".", "column_stack", "(", "[", "sx", "-", "p0x", ",", "sy", "-", "p0y", "]", ")", "return", "numpy", ".", "sum", "(", "u_i_hat", "[", ":", "-", "1", "]", "*", "rsite", ",", "axis", "=", "1", ")", ",", "numpy", ".", "sum", "(", "t_i_hat", "[", ":", "-", "1", "]", "*", "rsite", ",", "axis", "=", "1", ")" ]
Returns the U and T coordinate for a specific trace segment :param seg: End points of the segment edge :param sx: Sites longitudes rendered into coordinate system :param sy: Sites latitudes rendered into coordinate system
[ "Returns", "the", "U", "and", "T", "coordinate", "for", "a", "specific", "trace", "segment" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L466-L489
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_generalised_coordinates
def get_generalised_coordinates(self, lons, lats): """ Transforms the site positions into the generalised coordinate form described by Spudich and Chiou (2015) for the multi-rupture and/or discordant case Spudich, Paul and Chiou, Brian (2015) Strike-parallel and strike-normal coordinate system around geometrically complicated rupture traces — Use by NGA-West2 and further improvements: U.S. Geological Survey Open-File Report 2015-1028 """ # If the GC2 configuration has not been setup already - do it! if not self.gc2_config: self._setup_gc2_framework() # Initially the weights are set to zero sx, sy = self.proj(lons, lats) sum_w_i = numpy.zeros_like(lons) sum_w_i_t_i = numpy.zeros_like(lons) sum_wi_ui_si = numpy.zeros_like(lons) # Find the cumulative length of the fault up until the given segment # Essentially calculating s_i general_t = numpy.zeros_like(lons) general_u = numpy.zeros_like(lons) on_segment = numpy.zeros_like(lons, dtype=bool) # Loop over the traces for j, edges in enumerate(self.cartesian_edges): # Loop over segments in trace # s_ij_total = 0.0 for i in range(edges.shape[0] - 1): # Get u_i and t_i u_i, t_i = self._get_ut_i(edges[i:(i + 2), :], sx, sy) # If t_i is 0 and u_i is within the section length then site is # directly on the edge - therefore general_t is 0 w_i = numpy.zeros_like(lons) ti0_check = numpy.fabs(t_i) < 1.0E-3 # < 1 m precision on_segment_range = numpy.logical_and( u_i >= 0.0, u_i <= self.length_set[j][i]) # Deal with the case in which t_i is 0 and the site is inside # of the segment idx0 = numpy.logical_and(ti0_check, on_segment_range) # In this null case w_i is ignored - however, null sites on # previous segments would not be null sites on this segment, # so we update the list of null sites on_segment[numpy.logical_or(on_segment, idx0)] = True # Also take care of the U case this time using # equation 12 of Spudich and Chiou s_ij = self.cum_length_set[j][i] + numpy.dot( (edges[0, :2] - self.p0), self.gc2_config["b_hat"]) general_u[idx0] = u_i[idx0] + s_ij # In the first case, ti = 0, u_i is outside of the segment # this implements equation 5 idx1 = numpy.logical_and(ti0_check, numpy.logical_not(on_segment_range)) w_i[idx1] = ((1.0 / (u_i[idx1] - self.length_set[j][i])) - (1.0 / u_i[idx1])) # In the last case the site is not on the edge (t != 0) # implements equation 4 idx2 = numpy.logical_not(ti0_check) w_i[idx2] = ((1. / t_i[idx2]) * (numpy.arctan( (self.length_set[j][i] - u_i[idx2]) / t_i[idx2]) - numpy.arctan(-u_i[idx2] / t_i[idx2]))) idx = numpy.logical_or(idx1, idx2) # Equation 3 sum_w_i[idx] += w_i[idx] # Part of equation 2 sum_w_i_t_i[idx] += (w_i[idx] * t_i[idx]) # Part of equation 9 sum_wi_ui_si[idx] += (w_i[idx] * (u_i[idx] + s_ij)) # For those sites not on the segment edge itself idx_t = numpy.logical_not(on_segment) general_t[idx_t] = (1.0 / sum_w_i[idx_t]) * sum_w_i_t_i[idx_t] general_u[idx_t] = (1.0 / sum_w_i[idx_t]) * sum_wi_ui_si[idx_t] return general_t, general_u
python
def get_generalised_coordinates(self, lons, lats): if not self.gc2_config: self._setup_gc2_framework() sx, sy = self.proj(lons, lats) sum_w_i = numpy.zeros_like(lons) sum_w_i_t_i = numpy.zeros_like(lons) sum_wi_ui_si = numpy.zeros_like(lons) general_t = numpy.zeros_like(lons) general_u = numpy.zeros_like(lons) on_segment = numpy.zeros_like(lons, dtype=bool) for j, edges in enumerate(self.cartesian_edges): for i in range(edges.shape[0] - 1): u_i, t_i = self._get_ut_i(edges[i:(i + 2), :], sx, sy) w_i = numpy.zeros_like(lons) ti0_check = numpy.fabs(t_i) < 1.0E-3 on_segment_range = numpy.logical_and( u_i >= 0.0, u_i <= self.length_set[j][i]) idx0 = numpy.logical_and(ti0_check, on_segment_range) on_segment[numpy.logical_or(on_segment, idx0)] = True s_ij = self.cum_length_set[j][i] + numpy.dot( (edges[0, :2] - self.p0), self.gc2_config["b_hat"]) general_u[idx0] = u_i[idx0] + s_ij idx1 = numpy.logical_and(ti0_check, numpy.logical_not(on_segment_range)) w_i[idx1] = ((1.0 / (u_i[idx1] - self.length_set[j][i])) - (1.0 / u_i[idx1])) idx2 = numpy.logical_not(ti0_check) w_i[idx2] = ((1. / t_i[idx2]) * (numpy.arctan( (self.length_set[j][i] - u_i[idx2]) / t_i[idx2]) - numpy.arctan(-u_i[idx2] / t_i[idx2]))) idx = numpy.logical_or(idx1, idx2) sum_w_i[idx] += w_i[idx] sum_w_i_t_i[idx] += (w_i[idx] * t_i[idx]) sum_wi_ui_si[idx] += (w_i[idx] * (u_i[idx] + s_ij)) idx_t = numpy.logical_not(on_segment) general_t[idx_t] = (1.0 / sum_w_i[idx_t]) * sum_w_i_t_i[idx_t] general_u[idx_t] = (1.0 / sum_w_i[idx_t]) * sum_wi_ui_si[idx_t] return general_t, general_u
[ "def", "get_generalised_coordinates", "(", "self", ",", "lons", ",", "lats", ")", ":", "# If the GC2 configuration has not been setup already - do it!", "if", "not", "self", ".", "gc2_config", ":", "self", ".", "_setup_gc2_framework", "(", ")", "# Initially the weights are set to zero", "sx", ",", "sy", "=", "self", ".", "proj", "(", "lons", ",", "lats", ")", "sum_w_i", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "sum_w_i_t_i", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "sum_wi_ui_si", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "# Find the cumulative length of the fault up until the given segment", "# Essentially calculating s_i", "general_t", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "general_u", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "on_segment", "=", "numpy", ".", "zeros_like", "(", "lons", ",", "dtype", "=", "bool", ")", "# Loop over the traces", "for", "j", ",", "edges", "in", "enumerate", "(", "self", ".", "cartesian_edges", ")", ":", "# Loop over segments in trace", "# s_ij_total = 0.0", "for", "i", "in", "range", "(", "edges", ".", "shape", "[", "0", "]", "-", "1", ")", ":", "# Get u_i and t_i", "u_i", ",", "t_i", "=", "self", ".", "_get_ut_i", "(", "edges", "[", "i", ":", "(", "i", "+", "2", ")", ",", ":", "]", ",", "sx", ",", "sy", ")", "# If t_i is 0 and u_i is within the section length then site is", "# directly on the edge - therefore general_t is 0", "w_i", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "ti0_check", "=", "numpy", ".", "fabs", "(", "t_i", ")", "<", "1.0E-3", "# < 1 m precision", "on_segment_range", "=", "numpy", ".", "logical_and", "(", "u_i", ">=", "0.0", ",", "u_i", "<=", "self", ".", "length_set", "[", "j", "]", "[", "i", "]", ")", "# Deal with the case in which t_i is 0 and the site is inside", "# of the segment", "idx0", "=", "numpy", ".", "logical_and", "(", "ti0_check", ",", "on_segment_range", ")", "# In this null case w_i is ignored - however, null sites on", "# previous segments would not be null sites on this segment,", "# so we update the list of null sites", "on_segment", "[", "numpy", ".", "logical_or", "(", "on_segment", ",", "idx0", ")", "]", "=", "True", "# Also take care of the U case this time using", "# equation 12 of Spudich and Chiou", "s_ij", "=", "self", ".", "cum_length_set", "[", "j", "]", "[", "i", "]", "+", "numpy", ".", "dot", "(", "(", "edges", "[", "0", ",", ":", "2", "]", "-", "self", ".", "p0", ")", ",", "self", ".", "gc2_config", "[", "\"b_hat\"", "]", ")", "general_u", "[", "idx0", "]", "=", "u_i", "[", "idx0", "]", "+", "s_ij", "# In the first case, ti = 0, u_i is outside of the segment", "# this implements equation 5", "idx1", "=", "numpy", ".", "logical_and", "(", "ti0_check", ",", "numpy", ".", "logical_not", "(", "on_segment_range", ")", ")", "w_i", "[", "idx1", "]", "=", "(", "(", "1.0", "/", "(", "u_i", "[", "idx1", "]", "-", "self", ".", "length_set", "[", "j", "]", "[", "i", "]", ")", ")", "-", "(", "1.0", "/", "u_i", "[", "idx1", "]", ")", ")", "# In the last case the site is not on the edge (t != 0)", "# implements equation 4", "idx2", "=", "numpy", ".", "logical_not", "(", "ti0_check", ")", "w_i", "[", "idx2", "]", "=", "(", "(", "1.", "/", "t_i", "[", "idx2", "]", ")", "*", "(", "numpy", ".", "arctan", "(", "(", "self", ".", "length_set", "[", "j", "]", "[", "i", "]", "-", "u_i", "[", "idx2", "]", ")", "/", "t_i", "[", "idx2", "]", ")", "-", "numpy", ".", "arctan", "(", "-", "u_i", "[", "idx2", "]", "/", "t_i", "[", "idx2", "]", ")", ")", ")", "idx", "=", "numpy", ".", "logical_or", "(", "idx1", ",", "idx2", ")", "# Equation 3", "sum_w_i", "[", "idx", "]", "+=", "w_i", "[", "idx", "]", "# Part of equation 2", "sum_w_i_t_i", "[", "idx", "]", "+=", "(", "w_i", "[", "idx", "]", "*", "t_i", "[", "idx", "]", ")", "# Part of equation 9", "sum_wi_ui_si", "[", "idx", "]", "+=", "(", "w_i", "[", "idx", "]", "*", "(", "u_i", "[", "idx", "]", "+", "s_ij", ")", ")", "# For those sites not on the segment edge itself", "idx_t", "=", "numpy", ".", "logical_not", "(", "on_segment", ")", "general_t", "[", "idx_t", "]", "=", "(", "1.0", "/", "sum_w_i", "[", "idx_t", "]", ")", "*", "sum_w_i_t_i", "[", "idx_t", "]", "general_u", "[", "idx_t", "]", "=", "(", "1.0", "/", "sum_w_i", "[", "idx_t", "]", ")", "*", "sum_wi_ui_si", "[", "idx_t", "]", "return", "general_t", ",", "general_u" ]
Transforms the site positions into the generalised coordinate form described by Spudich and Chiou (2015) for the multi-rupture and/or discordant case Spudich, Paul and Chiou, Brian (2015) Strike-parallel and strike-normal coordinate system around geometrically complicated rupture traces — Use by NGA-West2 and further improvements: U.S. Geological Survey Open-File Report 2015-1028
[ "Transforms", "the", "site", "positions", "into", "the", "generalised", "coordinate", "form", "described", "by", "Spudich", "and", "Chiou", "(", "2015", ")", "for", "the", "multi", "-", "rupture", "and", "/", "or", "discordant", "case" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L491-L568
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_rx_distance
def get_rx_distance(self, mesh): """ For each point determine the corresponding rx distance using the GC2 configuration. See :meth:`superclass method <.base.BaseSurface.get_rx_distance>` for spec of input and result values. """ # If the GC2 calculations have already been computed (by invoking Ry0 # first) and the mesh is identical then class has GC2 attributes # already pre-calculated if not self.tmp_mesh or (self.tmp_mesh == mesh): self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons, mesh.lats) # Update mesh self.tmp_mesh = deepcopy(mesh) # Rx coordinate is taken directly from gc2t return self.gc2t
python
def get_rx_distance(self, mesh): if not self.tmp_mesh or (self.tmp_mesh == mesh): self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons, mesh.lats) self.tmp_mesh = deepcopy(mesh) return self.gc2t
[ "def", "get_rx_distance", "(", "self", ",", "mesh", ")", ":", "# If the GC2 calculations have already been computed (by invoking Ry0", "# first) and the mesh is identical then class has GC2 attributes", "# already pre-calculated", "if", "not", "self", ".", "tmp_mesh", "or", "(", "self", ".", "tmp_mesh", "==", "mesh", ")", ":", "self", ".", "gc2t", ",", "self", ".", "gc2u", "=", "self", ".", "get_generalised_coordinates", "(", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "# Update mesh", "self", ".", "tmp_mesh", "=", "deepcopy", "(", "mesh", ")", "# Rx coordinate is taken directly from gc2t", "return", "self", ".", "gc2t" ]
For each point determine the corresponding rx distance using the GC2 configuration. See :meth:`superclass method <.base.BaseSurface.get_rx_distance>` for spec of input and result values.
[ "For", "each", "point", "determine", "the", "corresponding", "rx", "distance", "using", "the", "GC2", "configuration", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L570-L588
gem/oq-engine
openquake/hazardlib/geo/surface/multi.py
MultiSurface.get_ry0_distance
def get_ry0_distance(self, mesh): """ For each point determine the corresponding Ry0 distance using the GC2 configuration. See :meth:`superclass method <.base.BaseSurface.get_ry0_distance>` for spec of input and result values. """ # If the GC2 calculations have already been computed (by invoking Ry0 # first) and the mesh is identical then class has GC2 attributes # already pre-calculated if not self.tmp_mesh or (self.tmp_mesh == mesh): # If that's not the case, or the mesh is different then # re-compute GC2 configuration self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons, mesh.lats) # Update mesh self.tmp_mesh = deepcopy(mesh) # Default value ry0 (for sites within fault length) is 0.0 ry0 = numpy.zeros_like(self.gc2u, dtype=float) # For sites with negative gc2u (off the initial point of the fault) # take the absolute value of gc2u neg_gc2u = self.gc2u < 0.0 ry0[neg_gc2u] = numpy.fabs(self.gc2u[neg_gc2u]) # Sites off the end of the fault have values shifted by the # GC2 length of the fault pos_gc2u = self.gc2u >= self.gc_length ry0[pos_gc2u] = self.gc2u[pos_gc2u] - self.gc_length return ry0
python
def get_ry0_distance(self, mesh): if not self.tmp_mesh or (self.tmp_mesh == mesh): self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons, mesh.lats) self.tmp_mesh = deepcopy(mesh) ry0 = numpy.zeros_like(self.gc2u, dtype=float) neg_gc2u = self.gc2u < 0.0 ry0[neg_gc2u] = numpy.fabs(self.gc2u[neg_gc2u]) pos_gc2u = self.gc2u >= self.gc_length ry0[pos_gc2u] = self.gc2u[pos_gc2u] - self.gc_length return ry0
[ "def", "get_ry0_distance", "(", "self", ",", "mesh", ")", ":", "# If the GC2 calculations have already been computed (by invoking Ry0", "# first) and the mesh is identical then class has GC2 attributes", "# already pre-calculated", "if", "not", "self", ".", "tmp_mesh", "or", "(", "self", ".", "tmp_mesh", "==", "mesh", ")", ":", "# If that's not the case, or the mesh is different then", "# re-compute GC2 configuration", "self", ".", "gc2t", ",", "self", ".", "gc2u", "=", "self", ".", "get_generalised_coordinates", "(", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "# Update mesh", "self", ".", "tmp_mesh", "=", "deepcopy", "(", "mesh", ")", "# Default value ry0 (for sites within fault length) is 0.0", "ry0", "=", "numpy", ".", "zeros_like", "(", "self", ".", "gc2u", ",", "dtype", "=", "float", ")", "# For sites with negative gc2u (off the initial point of the fault)", "# take the absolute value of gc2u", "neg_gc2u", "=", "self", ".", "gc2u", "<", "0.0", "ry0", "[", "neg_gc2u", "]", "=", "numpy", ".", "fabs", "(", "self", ".", "gc2u", "[", "neg_gc2u", "]", ")", "# Sites off the end of the fault have values shifted by the", "# GC2 length of the fault", "pos_gc2u", "=", "self", ".", "gc2u", ">=", "self", ".", "gc_length", "ry0", "[", "pos_gc2u", "]", "=", "self", ".", "gc2u", "[", "pos_gc2u", "]", "-", "self", ".", "gc_length", "return", "ry0" ]
For each point determine the corresponding Ry0 distance using the GC2 configuration. See :meth:`superclass method <.base.BaseSurface.get_ry0_distance>` for spec of input and result values.
[ "For", "each", "point", "determine", "the", "corresponding", "Ry0", "distance", "using", "the", "GC2", "configuration", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L590-L622
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RateGrid.from_model_files
def from_model_files(cls, limits, input_model, investigation_time=1.0, simple_mesh_spacing=1.0, complex_mesh_spacing=5.0, mfd_width=0.1, area_discretisation=10.0): """ Reads the hazard model from a file :param list limits: Grid configuration [west, east, xspc, south, north, yspc, upper, lower, zspc] :param str input_model: Path to input source model :param float investigation_time: Investigation time of Poisson model :param float simple_mesh_spacing: Rupture mesh spacing of simple fault (km) :param float complex_mesh_spacing: Rupture mesh spacing of complex fault (km) :param float mfd_width: Spacing (in magnitude units) of MFD :param float area_discretisation: Spacing of discretisation of area source (km) """ converter = SourceConverter(investigation_time, simple_mesh_spacing, complex_mesh_spacing, mfd_width, area_discretisation) sources = [] for grp in nrml.to_python(input_model, converter): sources.extend(grp.sources) return cls(limits, sources, area_discretisation)
python
def from_model_files(cls, limits, input_model, investigation_time=1.0, simple_mesh_spacing=1.0, complex_mesh_spacing=5.0, mfd_width=0.1, area_discretisation=10.0): converter = SourceConverter(investigation_time, simple_mesh_spacing, complex_mesh_spacing, mfd_width, area_discretisation) sources = [] for grp in nrml.to_python(input_model, converter): sources.extend(grp.sources) return cls(limits, sources, area_discretisation)
[ "def", "from_model_files", "(", "cls", ",", "limits", ",", "input_model", ",", "investigation_time", "=", "1.0", ",", "simple_mesh_spacing", "=", "1.0", ",", "complex_mesh_spacing", "=", "5.0", ",", "mfd_width", "=", "0.1", ",", "area_discretisation", "=", "10.0", ")", ":", "converter", "=", "SourceConverter", "(", "investigation_time", ",", "simple_mesh_spacing", ",", "complex_mesh_spacing", ",", "mfd_width", ",", "area_discretisation", ")", "sources", "=", "[", "]", "for", "grp", "in", "nrml", ".", "to_python", "(", "input_model", ",", "converter", ")", ":", "sources", ".", "extend", "(", "grp", ".", "sources", ")", "return", "cls", "(", "limits", ",", "sources", ",", "area_discretisation", ")" ]
Reads the hazard model from a file :param list limits: Grid configuration [west, east, xspc, south, north, yspc, upper, lower, zspc] :param str input_model: Path to input source model :param float investigation_time: Investigation time of Poisson model :param float simple_mesh_spacing: Rupture mesh spacing of simple fault (km) :param float complex_mesh_spacing: Rupture mesh spacing of complex fault (km) :param float mfd_width: Spacing (in magnitude units) of MFD :param float area_discretisation: Spacing of discretisation of area source (km)
[ "Reads", "the", "hazard", "model", "from", "a", "file" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L114-L144
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RateGrid.get_rates
def get_rates(self, mmin, mmax=np.inf): """ Returns the cumulative rates greater than Mmin :param float mmin: Minimum magnitude """ nsrcs = self.number_sources() for iloc, source in enumerate(self.source_model): print("Source Number %s of %s, Name = %s, Typology = %s" % ( iloc + 1, nsrcs, source.name, source.__class__.__name__)) if isinstance(source, CharacteristicFaultSource): self._get_fault_rates(source, mmin, mmax) elif isinstance(source, ComplexFaultSource): self._get_fault_rates(source, mmin, mmax) elif isinstance(source, SimpleFaultSource): self._get_fault_rates(source, mmin, mmax) elif isinstance(source, AreaSource): self._get_area_rates(source, mmin, mmax) elif isinstance(source, PointSource): self._get_point_rates(source, mmin, mmax) else: print("Source type %s not recognised - skipping!" % source) continue
python
def get_rates(self, mmin, mmax=np.inf): nsrcs = self.number_sources() for iloc, source in enumerate(self.source_model): print("Source Number %s of %s, Name = %s, Typology = %s" % ( iloc + 1, nsrcs, source.name, source.__class__.__name__)) if isinstance(source, CharacteristicFaultSource): self._get_fault_rates(source, mmin, mmax) elif isinstance(source, ComplexFaultSource): self._get_fault_rates(source, mmin, mmax) elif isinstance(source, SimpleFaultSource): self._get_fault_rates(source, mmin, mmax) elif isinstance(source, AreaSource): self._get_area_rates(source, mmin, mmax) elif isinstance(source, PointSource): self._get_point_rates(source, mmin, mmax) else: print("Source type %s not recognised - skipping!" % source) continue
[ "def", "get_rates", "(", "self", ",", "mmin", ",", "mmax", "=", "np", ".", "inf", ")", ":", "nsrcs", "=", "self", ".", "number_sources", "(", ")", "for", "iloc", ",", "source", "in", "enumerate", "(", "self", ".", "source_model", ")", ":", "print", "(", "\"Source Number %s of %s, Name = %s, Typology = %s\"", "%", "(", "iloc", "+", "1", ",", "nsrcs", ",", "source", ".", "name", ",", "source", ".", "__class__", ".", "__name__", ")", ")", "if", "isinstance", "(", "source", ",", "CharacteristicFaultSource", ")", ":", "self", ".", "_get_fault_rates", "(", "source", ",", "mmin", ",", "mmax", ")", "elif", "isinstance", "(", "source", ",", "ComplexFaultSource", ")", ":", "self", ".", "_get_fault_rates", "(", "source", ",", "mmin", ",", "mmax", ")", "elif", "isinstance", "(", "source", ",", "SimpleFaultSource", ")", ":", "self", ".", "_get_fault_rates", "(", "source", ",", "mmin", ",", "mmax", ")", "elif", "isinstance", "(", "source", ",", "AreaSource", ")", ":", "self", ".", "_get_area_rates", "(", "source", ",", "mmin", ",", "mmax", ")", "elif", "isinstance", "(", "source", ",", "PointSource", ")", ":", "self", ".", "_get_point_rates", "(", "source", ",", "mmin", ",", "mmax", ")", "else", ":", "print", "(", "\"Source type %s not recognised - skipping!\"", "%", "source", ")", "continue" ]
Returns the cumulative rates greater than Mmin :param float mmin: Minimum magnitude
[ "Returns", "the", "cumulative", "rates", "greater", "than", "Mmin" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L152-L178
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RateGrid._get_point_location
def _get_point_location(self, location): """ Returns the location in the output grid corresponding to the cell in which the epicentre lays :param location: Source hypocentre as instance of :class: openquake.hazardlib.geo.point.Point :returns: xloc - Location of longitude cell yloc - Location of latitude cell """ if (location.longitude < self.xlim[0]) or\ (location.longitude > self.xlim[-1]): return None, None xloc = int(((location.longitude - self.xlim[0]) / self.xspc) + 1E-7) if (location.latitude < self.ylim[0]) or\ (location.latitude > self.ylim[-1]): return None, None yloc = int(((location.latitude - self.ylim[0]) / self.yspc) + 1E-7) return xloc, yloc
python
def _get_point_location(self, location): if (location.longitude < self.xlim[0]) or\ (location.longitude > self.xlim[-1]): return None, None xloc = int(((location.longitude - self.xlim[0]) / self.xspc) + 1E-7) if (location.latitude < self.ylim[0]) or\ (location.latitude > self.ylim[-1]): return None, None yloc = int(((location.latitude - self.ylim[0]) / self.yspc) + 1E-7) return xloc, yloc
[ "def", "_get_point_location", "(", "self", ",", "location", ")", ":", "if", "(", "location", ".", "longitude", "<", "self", ".", "xlim", "[", "0", "]", ")", "or", "(", "location", ".", "longitude", ">", "self", ".", "xlim", "[", "-", "1", "]", ")", ":", "return", "None", ",", "None", "xloc", "=", "int", "(", "(", "(", "location", ".", "longitude", "-", "self", ".", "xlim", "[", "0", "]", ")", "/", "self", ".", "xspc", ")", "+", "1E-7", ")", "if", "(", "location", ".", "latitude", "<", "self", ".", "ylim", "[", "0", "]", ")", "or", "(", "location", ".", "latitude", ">", "self", ".", "ylim", "[", "-", "1", "]", ")", ":", "return", "None", ",", "None", "yloc", "=", "int", "(", "(", "(", "location", ".", "latitude", "-", "self", ".", "ylim", "[", "0", "]", ")", "/", "self", ".", "yspc", ")", "+", "1E-7", ")", "return", "xloc", ",", "yloc" ]
Returns the location in the output grid corresponding to the cell in which the epicentre lays :param location: Source hypocentre as instance of :class: openquake.hazardlib.geo.point.Point :returns: xloc - Location of longitude cell yloc - Location of latitude cell
[ "Returns", "the", "location", "in", "the", "output", "grid", "corresponding", "to", "the", "cell", "in", "which", "the", "epicentre", "lays" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L180-L200
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RateGrid._get_point_rates
def _get_point_rates(self, source, mmin, mmax=np.inf): """ Adds the rates for a point source :param source: Point source as instance of :class: openquake.hazardlib.source.point.PointSource :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude """ xloc, yloc = self._get_point_location(source.location) if (xloc is None) or (yloc is None): return # Get annual rates annual_rate = source.get_annual_occurrence_rates() mags = np.array([val[0] for val in annual_rate]) annual_rate = np.array([val[1] for val in annual_rate]) idx = np.logical_and(mags >= mmin, mags < mmax) annual_rate = np.sum(annual_rate[idx]) for hypo_depth in source.hypocenter_distribution.data: zloc = int((hypo_depth[1] - self.zlim[0]) / self.zspc) if (zloc < 0) or (zloc >= (self.nz - 1)): continue else: self.rates[xloc, yloc, zloc] += float(hypo_depth[0]) * \ annual_rate
python
def _get_point_rates(self, source, mmin, mmax=np.inf): xloc, yloc = self._get_point_location(source.location) if (xloc is None) or (yloc is None): return annual_rate = source.get_annual_occurrence_rates() mags = np.array([val[0] for val in annual_rate]) annual_rate = np.array([val[1] for val in annual_rate]) idx = np.logical_and(mags >= mmin, mags < mmax) annual_rate = np.sum(annual_rate[idx]) for hypo_depth in source.hypocenter_distribution.data: zloc = int((hypo_depth[1] - self.zlim[0]) / self.zspc) if (zloc < 0) or (zloc >= (self.nz - 1)): continue else: self.rates[xloc, yloc, zloc] += float(hypo_depth[0]) * \ annual_rate
[ "def", "_get_point_rates", "(", "self", ",", "source", ",", "mmin", ",", "mmax", "=", "np", ".", "inf", ")", ":", "xloc", ",", "yloc", "=", "self", ".", "_get_point_location", "(", "source", ".", "location", ")", "if", "(", "xloc", "is", "None", ")", "or", "(", "yloc", "is", "None", ")", ":", "return", "# Get annual rates", "annual_rate", "=", "source", ".", "get_annual_occurrence_rates", "(", ")", "mags", "=", "np", ".", "array", "(", "[", "val", "[", "0", "]", "for", "val", "in", "annual_rate", "]", ")", "annual_rate", "=", "np", ".", "array", "(", "[", "val", "[", "1", "]", "for", "val", "in", "annual_rate", "]", ")", "idx", "=", "np", ".", "logical_and", "(", "mags", ">=", "mmin", ",", "mags", "<", "mmax", ")", "annual_rate", "=", "np", ".", "sum", "(", "annual_rate", "[", "idx", "]", ")", "for", "hypo_depth", "in", "source", ".", "hypocenter_distribution", ".", "data", ":", "zloc", "=", "int", "(", "(", "hypo_depth", "[", "1", "]", "-", "self", ".", "zlim", "[", "0", "]", ")", "/", "self", ".", "zspc", ")", "if", "(", "zloc", "<", "0", ")", "or", "(", "zloc", ">=", "(", "self", ".", "nz", "-", "1", ")", ")", ":", "continue", "else", ":", "self", ".", "rates", "[", "xloc", ",", "yloc", ",", "zloc", "]", "+=", "float", "(", "hypo_depth", "[", "0", "]", ")", "*", "annual_rate" ]
Adds the rates for a point source :param source: Point source as instance of :class: openquake.hazardlib.source.point.PointSource :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude
[ "Adds", "the", "rates", "for", "a", "point", "source" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L202-L229
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RateGrid._get_area_rates
def _get_area_rates(self, source, mmin, mmax=np.inf): """ Adds the rates from the area source by discretising the source to a set of point sources :param source: Area source as instance of :class: openquake.hazardlib.source.area.AreaSource """ points = list(source) for point in points: self._get_point_rates(point, mmin, mmax)
python
def _get_area_rates(self, source, mmin, mmax=np.inf): points = list(source) for point in points: self._get_point_rates(point, mmin, mmax)
[ "def", "_get_area_rates", "(", "self", ",", "source", ",", "mmin", ",", "mmax", "=", "np", ".", "inf", ")", ":", "points", "=", "list", "(", "source", ")", "for", "point", "in", "points", ":", "self", ".", "_get_point_rates", "(", "point", ",", "mmin", ",", "mmax", ")" ]
Adds the rates from the area source by discretising the source to a set of point sources :param source: Area source as instance of :class: openquake.hazardlib.source.area.AreaSource
[ "Adds", "the", "rates", "from", "the", "area", "source", "by", "discretising", "the", "source", "to", "a", "set", "of", "point", "sources" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L231-L242
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RateGrid._get_fault_rates
def _get_fault_rates(self, source, mmin, mmax=np.inf): """ Adds the rates for a simple or complex fault source :param source: Fault source as instance of :class: openquake.hazardlib.source.simple_fault.SimpleFaultSource or openquake.hazardlib.source.complex_fault.ComplexFaultSource """ for rupt in list(source.iter_ruptures()): valid_rupt = (rupt.mag >= mmin) and (rupt.mag < mmax) if not valid_rupt: continue grd = np.column_stack([rupt.surface.mesh.lons.flatten(), rupt.surface.mesh.lats.flatten(), rupt.surface.mesh.depths.flatten()]) npts = np.shape(grd)[0] counter = np.histogramdd(grd, bins=[self.xlim, self.ylim, self.zlim] )[0] point_rate = rupt.occurrence_rate / float(npts) self.rates += (point_rate * counter)
python
def _get_fault_rates(self, source, mmin, mmax=np.inf): for rupt in list(source.iter_ruptures()): valid_rupt = (rupt.mag >= mmin) and (rupt.mag < mmax) if not valid_rupt: continue grd = np.column_stack([rupt.surface.mesh.lons.flatten(), rupt.surface.mesh.lats.flatten(), rupt.surface.mesh.depths.flatten()]) npts = np.shape(grd)[0] counter = np.histogramdd(grd, bins=[self.xlim, self.ylim, self.zlim] )[0] point_rate = rupt.occurrence_rate / float(npts) self.rates += (point_rate * counter)
[ "def", "_get_fault_rates", "(", "self", ",", "source", ",", "mmin", ",", "mmax", "=", "np", ".", "inf", ")", ":", "for", "rupt", "in", "list", "(", "source", ".", "iter_ruptures", "(", ")", ")", ":", "valid_rupt", "=", "(", "rupt", ".", "mag", ">=", "mmin", ")", "and", "(", "rupt", ".", "mag", "<", "mmax", ")", "if", "not", "valid_rupt", ":", "continue", "grd", "=", "np", ".", "column_stack", "(", "[", "rupt", ".", "surface", ".", "mesh", ".", "lons", ".", "flatten", "(", ")", ",", "rupt", ".", "surface", ".", "mesh", ".", "lats", ".", "flatten", "(", ")", ",", "rupt", ".", "surface", ".", "mesh", ".", "depths", ".", "flatten", "(", ")", "]", ")", "npts", "=", "np", ".", "shape", "(", "grd", ")", "[", "0", "]", "counter", "=", "np", ".", "histogramdd", "(", "grd", ",", "bins", "=", "[", "self", ".", "xlim", ",", "self", ".", "ylim", ",", "self", ".", "zlim", "]", ")", "[", "0", "]", "point_rate", "=", "rupt", ".", "occurrence_rate", "/", "float", "(", "npts", ")", "self", ".", "rates", "+=", "(", "point_rate", "*", "counter", ")" ]
Adds the rates for a simple or complex fault source :param source: Fault source as instance of :class: openquake.hazardlib.source.simple_fault.SimpleFaultSource or openquake.hazardlib.source.complex_fault.ComplexFaultSource
[ "Adds", "the", "rates", "for", "a", "simple", "or", "complex", "fault", "source" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L244-L265
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RatePolygon._get_point_rates
def _get_point_rates(self, source, mmin, mmax=np.inf): """ Adds the rates for a point source :param source: Point source as instance of :class: openquake.hazardlib.source.point.PointSource :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude """ src_mesh = Mesh.from_points_list([source.location]) in_poly = self.limits.intersects(src_mesh)[0] if not in_poly: return else: for (mag, rate) in source.get_annual_occurrence_rates(): if (mag < mmin) or (mag > mmax): return else: for (prob, depth) in source.hypocenter_distribution.data: if (depth < self.upper_depth) or\ (depth > self.lower_depth): continue else: self.rates += (prob * rate)
python
def _get_point_rates(self, source, mmin, mmax=np.inf): src_mesh = Mesh.from_points_list([source.location]) in_poly = self.limits.intersects(src_mesh)[0] if not in_poly: return else: for (mag, rate) in source.get_annual_occurrence_rates(): if (mag < mmin) or (mag > mmax): return else: for (prob, depth) in source.hypocenter_distribution.data: if (depth < self.upper_depth) or\ (depth > self.lower_depth): continue else: self.rates += (prob * rate)
[ "def", "_get_point_rates", "(", "self", ",", "source", ",", "mmin", ",", "mmax", "=", "np", ".", "inf", ")", ":", "src_mesh", "=", "Mesh", ".", "from_points_list", "(", "[", "source", ".", "location", "]", ")", "in_poly", "=", "self", ".", "limits", ".", "intersects", "(", "src_mesh", ")", "[", "0", "]", "if", "not", "in_poly", ":", "return", "else", ":", "for", "(", "mag", ",", "rate", ")", "in", "source", ".", "get_annual_occurrence_rates", "(", ")", ":", "if", "(", "mag", "<", "mmin", ")", "or", "(", "mag", ">", "mmax", ")", ":", "return", "else", ":", "for", "(", "prob", ",", "depth", ")", "in", "source", ".", "hypocenter_distribution", ".", "data", ":", "if", "(", "depth", "<", "self", ".", "upper_depth", ")", "or", "(", "depth", ">", "self", ".", "lower_depth", ")", ":", "continue", "else", ":", "self", ".", "rates", "+=", "(", "prob", "*", "rate", ")" ]
Adds the rates for a point source :param source: Point source as instance of :class: openquake.hazardlib.source.point.PointSource :param float mmin: Minimum Magnitude :param float mmax: Maximum Magnitude
[ "Adds", "the", "rates", "for", "a", "point", "source" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L303-L329
gem/oq-engine
openquake/hmtk/comparison/rate_grids.py
RatePolygon._get_fault_rates
def _get_fault_rates(self, source, mmin, mmax=np.inf): """ Adds the rates for a simple or complex fault source :param source: Fault source as instance of :class: openquake.hazardlib.source.simple_fault.SimpleFaultSource or openquake.hazardlib.source.complex_fault.ComplexFaultSource """ for rup in list(source.iter_ruptures()): if (rup.mag < mmin) or (rup.mag > mmax): # Magnitude outside search range continue depths = rup.surface.mesh.depths.flatten() # Generate simple mesh from surface rupt_mesh = Mesh(rup.surface.mesh.lons.flatten(), rup.surface.mesh.lats.flatten(), depths) # Mesh points in polygon in_poly = self.limits.intersects(rupt_mesh) in_depth = np.logical_and(depths >= self.upper_depth, depths <= self.lower_depth) idx = np.logical_and(in_poly, in_depth) if np.any(idx): node_rate = rup.occurrence_rate / float(len(depths)) self.rates += (node_rate * np.sum(idx))
python
def _get_fault_rates(self, source, mmin, mmax=np.inf): for rup in list(source.iter_ruptures()): if (rup.mag < mmin) or (rup.mag > mmax): continue depths = rup.surface.mesh.depths.flatten() rupt_mesh = Mesh(rup.surface.mesh.lons.flatten(), rup.surface.mesh.lats.flatten(), depths) in_poly = self.limits.intersects(rupt_mesh) in_depth = np.logical_and(depths >= self.upper_depth, depths <= self.lower_depth) idx = np.logical_and(in_poly, in_depth) if np.any(idx): node_rate = rup.occurrence_rate / float(len(depths)) self.rates += (node_rate * np.sum(idx))
[ "def", "_get_fault_rates", "(", "self", ",", "source", ",", "mmin", ",", "mmax", "=", "np", ".", "inf", ")", ":", "for", "rup", "in", "list", "(", "source", ".", "iter_ruptures", "(", ")", ")", ":", "if", "(", "rup", ".", "mag", "<", "mmin", ")", "or", "(", "rup", ".", "mag", ">", "mmax", ")", ":", "# Magnitude outside search range", "continue", "depths", "=", "rup", ".", "surface", ".", "mesh", ".", "depths", ".", "flatten", "(", ")", "# Generate simple mesh from surface", "rupt_mesh", "=", "Mesh", "(", "rup", ".", "surface", ".", "mesh", ".", "lons", ".", "flatten", "(", ")", ",", "rup", ".", "surface", ".", "mesh", ".", "lats", ".", "flatten", "(", ")", ",", "depths", ")", "# Mesh points in polygon", "in_poly", "=", "self", ".", "limits", ".", "intersects", "(", "rupt_mesh", ")", "in_depth", "=", "np", ".", "logical_and", "(", "depths", ">=", "self", ".", "upper_depth", ",", "depths", "<=", "self", ".", "lower_depth", ")", "idx", "=", "np", ".", "logical_and", "(", "in_poly", ",", "in_depth", ")", "if", "np", ".", "any", "(", "idx", ")", ":", "node_rate", "=", "rup", ".", "occurrence_rate", "/", "float", "(", "len", "(", "depths", ")", ")", "self", ".", "rates", "+=", "(", "node_rate", "*", "np", ".", "sum", "(", "idx", ")", ")" ]
Adds the rates for a simple or complex fault source :param source: Fault source as instance of :class: openquake.hazardlib.source.simple_fault.SimpleFaultSource or openquake.hazardlib.source.complex_fault.ComplexFaultSource
[ "Adds", "the", "rates", "for", "a", "simple", "or", "complex", "fault", "source" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L331-L356
gem/oq-engine
openquake/hazardlib/gsim/megawati_pan_2010.py
MegawatiPan2010.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) C = self.COEFFS[imt] mean = (self._get_magnitude_scaling(C, rup.mag) + self._get_distance_scaling(C, rup.mag, dists.rhypo)) if imt.name in "SA PGA": mean = np.log(np.exp(mean) / (100.0 * g)) stddevs = self._compute_std(C, stddev_types, len(dists.rhypo)) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) C = self.COEFFS[imt] mean = (self._get_magnitude_scaling(C, rup.mag) + self._get_distance_scaling(C, rup.mag, dists.rhypo)) if imt.name in "SA PGA": mean = np.log(np.exp(mean) / (100.0 * g)) stddevs = self._compute_std(C, stddev_types, len(dists.rhypo)) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "assert", "all", "(", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "for", "stddev_type", "in", "stddev_types", ")", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "mean", "=", "(", "self", ".", "_get_magnitude_scaling", "(", "C", ",", "rup", ".", "mag", ")", "+", "self", ".", "_get_distance_scaling", "(", "C", ",", "rup", ".", "mag", ",", "dists", ".", "rhypo", ")", ")", "if", "imt", ".", "name", "in", "\"SA PGA\"", ":", "mean", "=", "np", ".", "log", "(", "np", ".", "exp", "(", "mean", ")", "/", "(", "100.0", "*", "g", ")", ")", "stddevs", "=", "self", ".", "_compute_std", "(", "C", ",", "stddev_types", ",", "len", "(", "dists", ".", "rhypo", ")", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/megawati_pan_2010.py#L75-L90
gem/oq-engine
openquake/hazardlib/gsim/megawati_pan_2010.py
MegawatiPan2010._get_distance_scaling
def _get_distance_scaling(self, C, mag, rhypo): """ Returns the distance scalig term """ return (C["a3"] * np.log(rhypo)) + (C["a4"] + C["a5"] * mag) * rhypo
python
def _get_distance_scaling(self, C, mag, rhypo): return (C["a3"] * np.log(rhypo)) + (C["a4"] + C["a5"] * mag) * rhypo
[ "def", "_get_distance_scaling", "(", "self", ",", "C", ",", "mag", ",", "rhypo", ")", ":", "return", "(", "C", "[", "\"a3\"", "]", "*", "np", ".", "log", "(", "rhypo", ")", ")", "+", "(", "C", "[", "\"a4\"", "]", "+", "C", "[", "\"a5\"", "]", "*", "mag", ")", "*", "rhypo" ]
Returns the distance scalig term
[ "Returns", "the", "distance", "scalig", "term" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/megawati_pan_2010.py#L98-L102
gem/oq-engine
openquake/hazardlib/gsim/rietbrock_2013.py
RietbrockEtAl2013SelfSimilar.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extract dictionaries of coefficients specific to required # intensity measure type C = self.COEFFS[imt] imean = (self._get_magnitude_scaling_term(C, rup.mag) + self._get_distance_scaling_term(C, dists.rjb, rup.mag)) # convert from cm/s**2 to g for SA and from cm/s**2 to g for PGA (PGV # is already in cm/s) and also convert from base 10 to base e. if imt.name in "SA PGA": mean = np.log((10.0 ** (imean - 2.0)) / g) else: mean = np.log(10 ** imean) stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape[0]) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] imean = (self._get_magnitude_scaling_term(C, rup.mag) + self._get_distance_scaling_term(C, dists.rjb, rup.mag)) if imt.name in "SA PGA": mean = np.log((10.0 ** (imean - 2.0)) / g) else: mean = np.log(10 ** imean) stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape[0]) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extract dictionaries of coefficients specific to required", "# intensity measure type", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "imean", "=", "(", "self", ".", "_get_magnitude_scaling_term", "(", "C", ",", "rup", ".", "mag", ")", "+", "self", ".", "_get_distance_scaling_term", "(", "C", ",", "dists", ".", "rjb", ",", "rup", ".", "mag", ")", ")", "# convert from cm/s**2 to g for SA and from cm/s**2 to g for PGA (PGV", "# is already in cm/s) and also convert from base 10 to base e.", "if", "imt", ".", "name", "in", "\"SA PGA\"", ":", "mean", "=", "np", ".", "log", "(", "(", "10.0", "**", "(", "imean", "-", "2.0", ")", ")", "/", "g", ")", "else", ":", "mean", "=", "np", ".", "log", "(", "10", "**", "imean", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "stddev_types", ",", "dists", ".", "rjb", ".", "shape", "[", "0", "]", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/rietbrock_2013.py#L80-L100
gem/oq-engine
openquake/hazardlib/gsim/rietbrock_2013.py
RietbrockEtAl2013SelfSimilar._get_distance_scaling_term
def _get_distance_scaling_term(self, C, rjb, mag): """ Returns the distance scaling component of the model Equation 10, Page 63 """ # Depth adjusted distance, equation 11 (Page 63) rval = np.sqrt(rjb ** 2.0 + C["c11"] ** 2.0) f_0, f_1, f_2 = self._get_distance_segment_coefficients(rval) return ((C["c4"] + C["c5"] * mag) * f_0 + (C["c6"] + C["c7"] * mag) * f_1 + (C["c8"] + C["c9"] * mag) * f_2 + (C["c10"] * rval))
python
def _get_distance_scaling_term(self, C, rjb, mag): rval = np.sqrt(rjb ** 2.0 + C["c11"] ** 2.0) f_0, f_1, f_2 = self._get_distance_segment_coefficients(rval) return ((C["c4"] + C["c5"] * mag) * f_0 + (C["c6"] + C["c7"] * mag) * f_1 + (C["c8"] + C["c9"] * mag) * f_2 + (C["c10"] * rval))
[ "def", "_get_distance_scaling_term", "(", "self", ",", "C", ",", "rjb", ",", "mag", ")", ":", "# Depth adjusted distance, equation 11 (Page 63)", "rval", "=", "np", ".", "sqrt", "(", "rjb", "**", "2.0", "+", "C", "[", "\"c11\"", "]", "**", "2.0", ")", "f_0", ",", "f_1", ",", "f_2", "=", "self", ".", "_get_distance_segment_coefficients", "(", "rval", ")", "return", "(", "(", "C", "[", "\"c4\"", "]", "+", "C", "[", "\"c5\"", "]", "*", "mag", ")", "*", "f_0", "+", "(", "C", "[", "\"c6\"", "]", "+", "C", "[", "\"c7\"", "]", "*", "mag", ")", "*", "f_1", "+", "(", "C", "[", "\"c8\"", "]", "+", "C", "[", "\"c9\"", "]", "*", "mag", ")", "*", "f_2", "+", "(", "C", "[", "\"c10\"", "]", "*", "rval", ")", ")" ]
Returns the distance scaling component of the model Equation 10, Page 63
[ "Returns", "the", "distance", "scaling", "component", "of", "the", "model", "Equation", "10", "Page", "63" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/rietbrock_2013.py#L109-L120
gem/oq-engine
openquake/hazardlib/gsim/rietbrock_2013.py
RietbrockEtAl2013SelfSimilar._get_distance_segment_coefficients
def _get_distance_segment_coefficients(self, rval): """ Returns the coefficients describing the distance attenuation shape for three different distance bins, equations 12a - 12c """ # Get distance segment ends nsites = len(rval) # Equation 12a f_0 = np.log10(self.CONSTS["r0"] / rval) f_0[rval > self.CONSTS["r0"]] = 0.0 # Equation 12b f_1 = np.log10(rval) f_1[rval > self.CONSTS["r1"]] = np.log10(self.CONSTS["r1"]) # Equation 12c f_2 = np.log10(rval / self.CONSTS["r2"]) f_2[rval <= self.CONSTS["r2"]] = 0.0 return f_0, f_1, f_2
python
def _get_distance_segment_coefficients(self, rval): nsites = len(rval) f_0 = np.log10(self.CONSTS["r0"] / rval) f_0[rval > self.CONSTS["r0"]] = 0.0 f_1 = np.log10(rval) f_1[rval > self.CONSTS["r1"]] = np.log10(self.CONSTS["r1"]) f_2 = np.log10(rval / self.CONSTS["r2"]) f_2[rval <= self.CONSTS["r2"]] = 0.0 return f_0, f_1, f_2
[ "def", "_get_distance_segment_coefficients", "(", "self", ",", "rval", ")", ":", "# Get distance segment ends", "nsites", "=", "len", "(", "rval", ")", "# Equation 12a", "f_0", "=", "np", ".", "log10", "(", "self", ".", "CONSTS", "[", "\"r0\"", "]", "/", "rval", ")", "f_0", "[", "rval", ">", "self", ".", "CONSTS", "[", "\"r0\"", "]", "]", "=", "0.0", "# Equation 12b", "f_1", "=", "np", ".", "log10", "(", "rval", ")", "f_1", "[", "rval", ">", "self", ".", "CONSTS", "[", "\"r1\"", "]", "]", "=", "np", ".", "log10", "(", "self", ".", "CONSTS", "[", "\"r1\"", "]", ")", "# Equation 12c", "f_2", "=", "np", ".", "log10", "(", "rval", "/", "self", ".", "CONSTS", "[", "\"r2\"", "]", ")", "f_2", "[", "rval", "<=", "self", ".", "CONSTS", "[", "\"r2\"", "]", "]", "=", "0.0", "return", "f_0", ",", "f_1", ",", "f_2" ]
Returns the coefficients describing the distance attenuation shape for three different distance bins, equations 12a - 12c
[ "Returns", "the", "coefficients", "describing", "the", "distance", "attenuation", "shape", "for", "three", "different", "distance", "bins", "equations", "12a", "-", "12c" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/rietbrock_2013.py#L122-L139
gem/oq-engine
openquake/commonlib/readinput.py
collect_files
def collect_files(dirpath, cond=lambda fullname: True): """ Recursively collect the files contained inside dirpath. :param dirpath: path to a readable directory :param cond: condition on the path to collect the file """ files = [] for fname in os.listdir(dirpath): fullname = os.path.join(dirpath, fname) if os.path.isdir(fullname): # navigate inside files.extend(collect_files(fullname)) else: # collect files if cond(fullname): files.append(fullname) return files
python
def collect_files(dirpath, cond=lambda fullname: True): files = [] for fname in os.listdir(dirpath): fullname = os.path.join(dirpath, fname) if os.path.isdir(fullname): files.extend(collect_files(fullname)) else: if cond(fullname): files.append(fullname) return files
[ "def", "collect_files", "(", "dirpath", ",", "cond", "=", "lambda", "fullname", ":", "True", ")", ":", "files", "=", "[", "]", "for", "fname", "in", "os", ".", "listdir", "(", "dirpath", ")", ":", "fullname", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "fname", ")", "if", "os", ".", "path", ".", "isdir", "(", "fullname", ")", ":", "# navigate inside", "files", ".", "extend", "(", "collect_files", "(", "fullname", ")", ")", "else", ":", "# collect files", "if", "cond", "(", "fullname", ")", ":", "files", ".", "append", "(", "fullname", ")", "return", "files" ]
Recursively collect the files contained inside dirpath. :param dirpath: path to a readable directory :param cond: condition on the path to collect the file
[ "Recursively", "collect", "the", "files", "contained", "inside", "dirpath", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L69-L84
gem/oq-engine
openquake/commonlib/readinput.py
extract_from_zip
def extract_from_zip(path, candidates): """ Given a zip archive and a function to detect the presence of a given filename, unzip the archive into a temporary directory and return the full path of the file. Raise an IOError if the file cannot be found within the archive. :param path: pathname of the archive :param candidates: list of names to search for """ temp_dir = tempfile.mkdtemp() with zipfile.ZipFile(path) as archive: archive.extractall(temp_dir) return [f for f in collect_files(temp_dir) if os.path.basename(f) in candidates]
python
def extract_from_zip(path, candidates): temp_dir = tempfile.mkdtemp() with zipfile.ZipFile(path) as archive: archive.extractall(temp_dir) return [f for f in collect_files(temp_dir) if os.path.basename(f) in candidates]
[ "def", "extract_from_zip", "(", "path", ",", "candidates", ")", ":", "temp_dir", "=", "tempfile", ".", "mkdtemp", "(", ")", "with", "zipfile", ".", "ZipFile", "(", "path", ")", "as", "archive", ":", "archive", ".", "extractall", "(", "temp_dir", ")", "return", "[", "f", "for", "f", "in", "collect_files", "(", "temp_dir", ")", "if", "os", ".", "path", ".", "basename", "(", "f", ")", "in", "candidates", "]" ]
Given a zip archive and a function to detect the presence of a given filename, unzip the archive into a temporary directory and return the full path of the file. Raise an IOError if the file cannot be found within the archive. :param path: pathname of the archive :param candidates: list of names to search for
[ "Given", "a", "zip", "archive", "and", "a", "function", "to", "detect", "the", "presence", "of", "a", "given", "filename", "unzip", "the", "archive", "into", "a", "temporary", "directory", "and", "return", "the", "full", "path", "of", "the", "file", ".", "Raise", "an", "IOError", "if", "the", "file", "cannot", "be", "found", "within", "the", "archive", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L87-L101
gem/oq-engine
openquake/commonlib/readinput.py
get_params
def get_params(job_inis, **kw): """ Parse one or more INI-style config files. :param job_inis: List of configuration files (or list containing a single zip archive) :param kw: Optionally override some parameters :returns: A dictionary of parameters """ input_zip = None if len(job_inis) == 1 and job_inis[0].endswith('.zip'): input_zip = job_inis[0] job_inis = extract_from_zip( job_inis[0], ['job_hazard.ini', 'job_haz.ini', 'job.ini', 'job_risk.ini']) not_found = [ini for ini in job_inis if not os.path.exists(ini)] if not_found: # something was not found raise IOError('File not found: %s' % not_found[0]) cp = configparser.ConfigParser() cp.read(job_inis) # directory containing the config files we're parsing job_ini = os.path.abspath(job_inis[0]) base_path = decode(os.path.dirname(job_ini)) params = dict(base_path=base_path, inputs={'job_ini': job_ini}) if input_zip: params['inputs']['input_zip'] = os.path.abspath(input_zip) for sect in cp.sections(): _update(params, cp.items(sect), base_path) _update(params, kw.items(), base_path) # override on demand if params['inputs'].get('reqv'): # using pointsource_distance=0 because of the reqv approximation params['pointsource_distance'] = '0' return params
python
def get_params(job_inis, **kw): input_zip = None if len(job_inis) == 1 and job_inis[0].endswith('.zip'): input_zip = job_inis[0] job_inis = extract_from_zip( job_inis[0], ['job_hazard.ini', 'job_haz.ini', 'job.ini', 'job_risk.ini']) not_found = [ini for ini in job_inis if not os.path.exists(ini)] if not_found: raise IOError('File not found: %s' % not_found[0]) cp = configparser.ConfigParser() cp.read(job_inis) job_ini = os.path.abspath(job_inis[0]) base_path = decode(os.path.dirname(job_ini)) params = dict(base_path=base_path, inputs={'job_ini': job_ini}) if input_zip: params['inputs']['input_zip'] = os.path.abspath(input_zip) for sect in cp.sections(): _update(params, cp.items(sect), base_path) _update(params, kw.items(), base_path) if params['inputs'].get('reqv'): params['pointsource_distance'] = '0' return params
[ "def", "get_params", "(", "job_inis", ",", "*", "*", "kw", ")", ":", "input_zip", "=", "None", "if", "len", "(", "job_inis", ")", "==", "1", "and", "job_inis", "[", "0", "]", ".", "endswith", "(", "'.zip'", ")", ":", "input_zip", "=", "job_inis", "[", "0", "]", "job_inis", "=", "extract_from_zip", "(", "job_inis", "[", "0", "]", ",", "[", "'job_hazard.ini'", ",", "'job_haz.ini'", ",", "'job.ini'", ",", "'job_risk.ini'", "]", ")", "not_found", "=", "[", "ini", "for", "ini", "in", "job_inis", "if", "not", "os", ".", "path", ".", "exists", "(", "ini", ")", "]", "if", "not_found", ":", "# something was not found", "raise", "IOError", "(", "'File not found: %s'", "%", "not_found", "[", "0", "]", ")", "cp", "=", "configparser", ".", "ConfigParser", "(", ")", "cp", ".", "read", "(", "job_inis", ")", "# directory containing the config files we're parsing", "job_ini", "=", "os", ".", "path", ".", "abspath", "(", "job_inis", "[", "0", "]", ")", "base_path", "=", "decode", "(", "os", ".", "path", ".", "dirname", "(", "job_ini", ")", ")", "params", "=", "dict", "(", "base_path", "=", "base_path", ",", "inputs", "=", "{", "'job_ini'", ":", "job_ini", "}", ")", "if", "input_zip", ":", "params", "[", "'inputs'", "]", "[", "'input_zip'", "]", "=", "os", ".", "path", ".", "abspath", "(", "input_zip", ")", "for", "sect", "in", "cp", ".", "sections", "(", ")", ":", "_update", "(", "params", ",", "cp", ".", "items", "(", "sect", ")", ",", "base_path", ")", "_update", "(", "params", ",", "kw", ".", "items", "(", ")", ",", "base_path", ")", "# override on demand", "if", "params", "[", "'inputs'", "]", ".", "get", "(", "'reqv'", ")", ":", "# using pointsource_distance=0 because of the reqv approximation", "params", "[", "'pointsource_distance'", "]", "=", "'0'", "return", "params" ]
Parse one or more INI-style config files. :param job_inis: List of configuration files (or list containing a single zip archive) :param kw: Optionally override some parameters :returns: A dictionary of parameters
[ "Parse", "one", "or", "more", "INI", "-", "style", "config", "files", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L152-L191
gem/oq-engine
openquake/commonlib/readinput.py
get_oqparam
def get_oqparam(job_ini, pkg=None, calculators=None, hc_id=None, validate=1, **kw): """ Parse a dictionary of parameters from an INI-style config file. :param job_ini: Path to configuration file/archive or dictionary of parameters :param pkg: Python package where to find the configuration file (optional) :param calculators: Sequence of calculator names (optional) used to restrict the valid choices for `calculation_mode` :param hc_id: Not None only when called from a post calculation :param validate: Flag. By default it is true and the parameters are validated :param kw: String-valued keyword arguments used to override the job.ini parameters :returns: An :class:`openquake.commonlib.oqvalidation.OqParam` instance containing the validate and casted parameters/values parsed from the job.ini file as well as a subdictionary 'inputs' containing absolute paths to all of the files referenced in the job.ini, keyed by the parameter name. """ # UGLY: this is here to avoid circular imports from openquake.calculators import base OqParam.calculation_mode.validator.choices = tuple( calculators or base.calculators) if not isinstance(job_ini, dict): basedir = os.path.dirname(pkg.__file__) if pkg else '' job_ini = get_params([os.path.join(basedir, job_ini)]) if hc_id: job_ini.update(hazard_calculation_id=str(hc_id)) job_ini.update(kw) oqparam = OqParam(**job_ini) if validate: oqparam.validate() return oqparam
python
def get_oqparam(job_ini, pkg=None, calculators=None, hc_id=None, validate=1, **kw): from openquake.calculators import base OqParam.calculation_mode.validator.choices = tuple( calculators or base.calculators) if not isinstance(job_ini, dict): basedir = os.path.dirname(pkg.__file__) if pkg else '' job_ini = get_params([os.path.join(basedir, job_ini)]) if hc_id: job_ini.update(hazard_calculation_id=str(hc_id)) job_ini.update(kw) oqparam = OqParam(**job_ini) if validate: oqparam.validate() return oqparam
[ "def", "get_oqparam", "(", "job_ini", ",", "pkg", "=", "None", ",", "calculators", "=", "None", ",", "hc_id", "=", "None", ",", "validate", "=", "1", ",", "*", "*", "kw", ")", ":", "# UGLY: this is here to avoid circular imports", "from", "openquake", ".", "calculators", "import", "base", "OqParam", ".", "calculation_mode", ".", "validator", ".", "choices", "=", "tuple", "(", "calculators", "or", "base", ".", "calculators", ")", "if", "not", "isinstance", "(", "job_ini", ",", "dict", ")", ":", "basedir", "=", "os", ".", "path", ".", "dirname", "(", "pkg", ".", "__file__", ")", "if", "pkg", "else", "''", "job_ini", "=", "get_params", "(", "[", "os", ".", "path", ".", "join", "(", "basedir", ",", "job_ini", ")", "]", ")", "if", "hc_id", ":", "job_ini", ".", "update", "(", "hazard_calculation_id", "=", "str", "(", "hc_id", ")", ")", "job_ini", ".", "update", "(", "kw", ")", "oqparam", "=", "OqParam", "(", "*", "*", "job_ini", ")", "if", "validate", ":", "oqparam", ".", "validate", "(", ")", "return", "oqparam" ]
Parse a dictionary of parameters from an INI-style config file. :param job_ini: Path to configuration file/archive or dictionary of parameters :param pkg: Python package where to find the configuration file (optional) :param calculators: Sequence of calculator names (optional) used to restrict the valid choices for `calculation_mode` :param hc_id: Not None only when called from a post calculation :param validate: Flag. By default it is true and the parameters are validated :param kw: String-valued keyword arguments used to override the job.ini parameters :returns: An :class:`openquake.commonlib.oqvalidation.OqParam` instance containing the validate and casted parameters/values parsed from the job.ini file as well as a subdictionary 'inputs' containing absolute paths to all of the files referenced in the job.ini, keyed by the parameter name.
[ "Parse", "a", "dictionary", "of", "parameters", "from", "an", "INI", "-", "style", "config", "file", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L194-L233
gem/oq-engine
openquake/commonlib/readinput.py
get_csv_header
def get_csv_header(fname, sep=','): """ :param fname: a CSV file :param sep: the separator (default comma) :returns: the first line of fname """ with open(fname, encoding='utf-8-sig') as f: return next(f).split(sep)
python
def get_csv_header(fname, sep=','): with open(fname, encoding='utf-8-sig') as f: return next(f).split(sep)
[ "def", "get_csv_header", "(", "fname", ",", "sep", "=", "','", ")", ":", "with", "open", "(", "fname", ",", "encoding", "=", "'utf-8-sig'", ")", "as", "f", ":", "return", "next", "(", "f", ")", ".", "split", "(", "sep", ")" ]
:param fname: a CSV file :param sep: the separator (default comma) :returns: the first line of fname
[ ":", "param", "fname", ":", "a", "CSV", "file", ":", "param", "sep", ":", "the", "separator", "(", "default", "comma", ")", ":", "returns", ":", "the", "first", "line", "of", "fname" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L248-L255
gem/oq-engine
openquake/commonlib/readinput.py
read_csv
def read_csv(fname, sep=','): """ :param fname: a CSV file with an header and float fields :param sep: separato (default the comma) :return: a structured array of floats """ with open(fname, encoding='utf-8-sig') as f: header = next(f).strip().split(sep) dt = numpy.dtype([(h, numpy.bool if h == 'vs30measured' else float) for h in header]) return numpy.loadtxt(f, dt, delimiter=sep)
python
def read_csv(fname, sep=','): with open(fname, encoding='utf-8-sig') as f: header = next(f).strip().split(sep) dt = numpy.dtype([(h, numpy.bool if h == 'vs30measured' else float) for h in header]) return numpy.loadtxt(f, dt, delimiter=sep)
[ "def", "read_csv", "(", "fname", ",", "sep", "=", "','", ")", ":", "with", "open", "(", "fname", ",", "encoding", "=", "'utf-8-sig'", ")", "as", "f", ":", "header", "=", "next", "(", "f", ")", ".", "strip", "(", ")", ".", "split", "(", "sep", ")", "dt", "=", "numpy", ".", "dtype", "(", "[", "(", "h", ",", "numpy", ".", "bool", "if", "h", "==", "'vs30measured'", "else", "float", ")", "for", "h", "in", "header", "]", ")", "return", "numpy", ".", "loadtxt", "(", "f", ",", "dt", ",", "delimiter", "=", "sep", ")" ]
:param fname: a CSV file with an header and float fields :param sep: separato (default the comma) :return: a structured array of floats
[ ":", "param", "fname", ":", "a", "CSV", "file", "with", "an", "header", "and", "float", "fields", ":", "param", "sep", ":", "separato", "(", "default", "the", "comma", ")", ":", "return", ":", "a", "structured", "array", "of", "floats" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L258-L268
gem/oq-engine
openquake/commonlib/readinput.py
get_mesh
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig')] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) return geo.Mesh.from_coords(c) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): # for csv mesh, pmap = get_pmap_from_csv(oqparam, fname) elif fname.endswith('.xml'): mesh, pmap = get_pmap_from_nrml(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif 'gmfs' in oqparam.inputs: eids, gmfs = _get_gmfs(oqparam) # sets oqparam.sites return geo.Mesh.from_coords(oqparam.sites) elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif 'site_model' in oqparam.inputs: sm = get_site_model(oqparam) poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() elif exposure: poly = exposure.mesh.get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError( 'Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'exposure' in oqparam.inputs: return exposure.mesh
python
def get_mesh(oqparam): global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig')] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) return geo.Mesh.from_coords(c) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): mesh, pmap = get_pmap_from_csv(oqparam, fname) elif fname.endswith('.xml'): mesh, pmap = get_pmap_from_nrml(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif 'gmfs' in oqparam.inputs: eids, gmfs = _get_gmfs(oqparam) return geo.Mesh.from_coords(oqparam.sites) elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif 'site_model' in oqparam.inputs: sm = get_site_model(oqparam) poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() elif exposure: poly = exposure.mesh.get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError( 'Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'exposure' in oqparam.inputs: return exposure.mesh
[ "def", "get_mesh", "(", "oqparam", ")", ":", "global", "pmap", ",", "exposure", ",", "gmfs", ",", "eids", "if", "'exposure'", "in", "oqparam", ".", "inputs", "and", "exposure", "is", "None", ":", "# read it only once", "exposure", "=", "get_exposure", "(", "oqparam", ")", "if", "oqparam", ".", "sites", ":", "return", "geo", ".", "Mesh", ".", "from_coords", "(", "oqparam", ".", "sites", ")", "elif", "'sites'", "in", "oqparam", ".", "inputs", ":", "fname", "=", "oqparam", ".", "inputs", "[", "'sites'", "]", "header", "=", "get_csv_header", "(", "fname", ")", "if", "'lon'", "in", "header", ":", "data", "=", "[", "]", "for", "i", ",", "row", "in", "enumerate", "(", "csv", ".", "DictReader", "(", "open", "(", "fname", ",", "encoding", "=", "'utf-8-sig'", ")", ")", ")", ":", "if", "header", "[", "0", "]", "==", "'site_id'", "and", "row", "[", "'site_id'", "]", "!=", "str", "(", "i", ")", ":", "raise", "InvalidFile", "(", "'%s: expected site_id=%d, got %s'", "%", "(", "fname", ",", "i", ",", "row", "[", "'site_id'", "]", ")", ")", "data", ".", "append", "(", "' '", ".", "join", "(", "[", "row", "[", "'lon'", "]", ",", "row", "[", "'lat'", "]", "]", ")", ")", "elif", "'gmfs'", "in", "oqparam", ".", "inputs", ":", "raise", "InvalidFile", "(", "'Missing header in %(sites)s'", "%", "oqparam", ".", "inputs", ")", "else", ":", "data", "=", "[", "line", ".", "replace", "(", "','", ",", "' '", ")", "for", "line", "in", "open", "(", "fname", ",", "encoding", "=", "'utf-8-sig'", ")", "]", "coords", "=", "valid", ".", "coordinates", "(", "','", ".", "join", "(", "data", ")", ")", "start", ",", "stop", "=", "oqparam", ".", "sites_slice", "c", "=", "(", "coords", "[", "start", ":", "stop", "]", "if", "header", "[", "0", "]", "==", "'site_id'", "else", "sorted", "(", "coords", "[", "start", ":", "stop", "]", ")", ")", "return", "geo", ".", "Mesh", ".", "from_coords", "(", "c", ")", "elif", "'hazard_curves'", "in", "oqparam", ".", "inputs", ":", "fname", "=", "oqparam", ".", "inputs", "[", "'hazard_curves'", "]", "if", "isinstance", "(", "fname", ",", "list", ")", ":", "# for csv", "mesh", ",", "pmap", "=", "get_pmap_from_csv", "(", "oqparam", ",", "fname", ")", "elif", "fname", ".", "endswith", "(", "'.xml'", ")", ":", "mesh", ",", "pmap", "=", "get_pmap_from_nrml", "(", "oqparam", ",", "fname", ")", "else", ":", "raise", "NotImplementedError", "(", "'Reading from %s'", "%", "fname", ")", "return", "mesh", "elif", "'gmfs'", "in", "oqparam", ".", "inputs", ":", "eids", ",", "gmfs", "=", "_get_gmfs", "(", "oqparam", ")", "# sets oqparam.sites", "return", "geo", ".", "Mesh", ".", "from_coords", "(", "oqparam", ".", "sites", ")", "elif", "oqparam", ".", "region_grid_spacing", ":", "if", "oqparam", ".", "region", ":", "poly", "=", "geo", ".", "Polygon", ".", "from_wkt", "(", "oqparam", ".", "region", ")", "elif", "'site_model'", "in", "oqparam", ".", "inputs", ":", "sm", "=", "get_site_model", "(", "oqparam", ")", "poly", "=", "geo", ".", "Mesh", "(", "sm", "[", "'lon'", "]", ",", "sm", "[", "'lat'", "]", ")", ".", "get_convex_hull", "(", ")", "elif", "exposure", ":", "poly", "=", "exposure", ".", "mesh", ".", "get_convex_hull", "(", ")", "else", ":", "raise", "InvalidFile", "(", "'There is a grid spacing but not a region, '", "'nor a site model, nor an exposure in %s'", "%", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ")", "try", ":", "mesh", "=", "poly", ".", "dilate", "(", "oqparam", ".", "region_grid_spacing", ")", ".", "discretize", "(", "oqparam", ".", "region_grid_spacing", ")", "return", "geo", ".", "Mesh", ".", "from_coords", "(", "zip", "(", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", ")", "except", "Exception", ":", "raise", "ValueError", "(", "'Could not discretize region with grid spacing '", "'%(region_grid_spacing)s'", "%", "vars", "(", "oqparam", ")", ")", "elif", "'exposure'", "in", "oqparam", ".", "inputs", ":", "return", "exposure", ".", "mesh" ]
Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance
[ "Extract", "the", "mesh", "of", "points", "to", "compute", "from", "the", "sites", "the", "sites_csv", "or", "the", "region", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L271-L339
gem/oq-engine
openquake/commonlib/readinput.py
get_site_model
def get_site_model(oqparam): """ Convert the NRML file into an array of site parameters. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: an array with fields lon, lat, vs30, ... """ req_site_params = get_gsim_lt(oqparam).req_site_params arrays = [] for fname in oqparam.inputs['site_model']: if isinstance(fname, str) and fname.endswith('.csv'): sm = read_csv(fname) if 'site_id' in sm.dtype.names: raise InvalidFile('%s: you passed a sites.csv file instead of ' 'a site_model.csv file!' % fname) z = numpy.zeros(len(sm), sorted(sm.dtype.descr)) for name in z.dtype.names: # reorder the fields z[name] = sm[name] arrays.append(z) continue nodes = nrml.read(fname).siteModel params = [valid.site_param(node.attrib) for node in nodes] missing = req_site_params - set(params[0]) if 'vs30measured' in missing: # use a default of False missing -= {'vs30measured'} for param in params: param['vs30measured'] = False if 'backarc' in missing: # use a default of False missing -= {'backarc'} for param in params: param['backarc'] = False if missing: raise InvalidFile('%s: missing parameter %s' % (oqparam.inputs['site_model'], ', '.join(missing))) # NB: the sorted in sorted(params[0]) is essential, otherwise there is # an heisenbug in scenario/test_case_4 site_model_dt = numpy.dtype([(p, site.site_param_dt[p]) for p in sorted(params[0])]) sm = numpy.array([tuple(param[name] for name in site_model_dt.names) for param in params], site_model_dt) arrays.append(sm) return numpy.concatenate(arrays)
python
def get_site_model(oqparam): req_site_params = get_gsim_lt(oqparam).req_site_params arrays = [] for fname in oqparam.inputs['site_model']: if isinstance(fname, str) and fname.endswith('.csv'): sm = read_csv(fname) if 'site_id' in sm.dtype.names: raise InvalidFile('%s: you passed a sites.csv file instead of ' 'a site_model.csv file!' % fname) z = numpy.zeros(len(sm), sorted(sm.dtype.descr)) for name in z.dtype.names: z[name] = sm[name] arrays.append(z) continue nodes = nrml.read(fname).siteModel params = [valid.site_param(node.attrib) for node in nodes] missing = req_site_params - set(params[0]) if 'vs30measured' in missing: missing -= {'vs30measured'} for param in params: param['vs30measured'] = False if 'backarc' in missing: missing -= {'backarc'} for param in params: param['backarc'] = False if missing: raise InvalidFile('%s: missing parameter %s' % (oqparam.inputs['site_model'], ', '.join(missing))) site_model_dt = numpy.dtype([(p, site.site_param_dt[p]) for p in sorted(params[0])]) sm = numpy.array([tuple(param[name] for name in site_model_dt.names) for param in params], site_model_dt) arrays.append(sm) return numpy.concatenate(arrays)
[ "def", "get_site_model", "(", "oqparam", ")", ":", "req_site_params", "=", "get_gsim_lt", "(", "oqparam", ")", ".", "req_site_params", "arrays", "=", "[", "]", "for", "fname", "in", "oqparam", ".", "inputs", "[", "'site_model'", "]", ":", "if", "isinstance", "(", "fname", ",", "str", ")", "and", "fname", ".", "endswith", "(", "'.csv'", ")", ":", "sm", "=", "read_csv", "(", "fname", ")", "if", "'site_id'", "in", "sm", ".", "dtype", ".", "names", ":", "raise", "InvalidFile", "(", "'%s: you passed a sites.csv file instead of '", "'a site_model.csv file!'", "%", "fname", ")", "z", "=", "numpy", ".", "zeros", "(", "len", "(", "sm", ")", ",", "sorted", "(", "sm", ".", "dtype", ".", "descr", ")", ")", "for", "name", "in", "z", ".", "dtype", ".", "names", ":", "# reorder the fields", "z", "[", "name", "]", "=", "sm", "[", "name", "]", "arrays", ".", "append", "(", "z", ")", "continue", "nodes", "=", "nrml", ".", "read", "(", "fname", ")", ".", "siteModel", "params", "=", "[", "valid", ".", "site_param", "(", "node", ".", "attrib", ")", "for", "node", "in", "nodes", "]", "missing", "=", "req_site_params", "-", "set", "(", "params", "[", "0", "]", ")", "if", "'vs30measured'", "in", "missing", ":", "# use a default of False", "missing", "-=", "{", "'vs30measured'", "}", "for", "param", "in", "params", ":", "param", "[", "'vs30measured'", "]", "=", "False", "if", "'backarc'", "in", "missing", ":", "# use a default of False", "missing", "-=", "{", "'backarc'", "}", "for", "param", "in", "params", ":", "param", "[", "'backarc'", "]", "=", "False", "if", "missing", ":", "raise", "InvalidFile", "(", "'%s: missing parameter %s'", "%", "(", "oqparam", ".", "inputs", "[", "'site_model'", "]", ",", "', '", ".", "join", "(", "missing", ")", ")", ")", "# NB: the sorted in sorted(params[0]) is essential, otherwise there is", "# an heisenbug in scenario/test_case_4", "site_model_dt", "=", "numpy", ".", "dtype", "(", "[", "(", "p", ",", "site", ".", "site_param_dt", "[", "p", "]", ")", "for", "p", "in", "sorted", "(", "params", "[", "0", "]", ")", "]", ")", "sm", "=", "numpy", ".", "array", "(", "[", "tuple", "(", "param", "[", "name", "]", "for", "name", "in", "site_model_dt", ".", "names", ")", "for", "param", "in", "params", "]", ",", "site_model_dt", ")", "arrays", ".", "append", "(", "sm", ")", "return", "numpy", ".", "concatenate", "(", "arrays", ")" ]
Convert the NRML file into an array of site parameters. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: an array with fields lon, lat, vs30, ...
[ "Convert", "the", "NRML", "file", "into", "an", "array", "of", "site", "parameters", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L342-L386
gem/oq-engine
openquake/commonlib/readinput.py
get_site_collection
def get_site_collection(oqparam): """ Returns a SiteCollection instance by looking at the points and the site model defined by the configuration parameters. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ mesh = get_mesh(oqparam) req_site_params = get_gsim_lt(oqparam).req_site_params if oqparam.inputs.get('site_model'): sm = get_site_model(oqparam) try: # in the future we could have elevation in the site model depth = sm['depth'] except ValueError: # this is the normal case depth = None sitecol = site.SiteCollection.from_points( sm['lon'], sm['lat'], depth, sm, req_site_params) if oqparam.region_grid_spacing: logging.info('Reducing the grid sites to the site ' 'parameters within the grid spacing') sitecol, params, _ = geo.utils.assoc( sm, sitecol, oqparam.region_grid_spacing * 1.414, 'filter') sitecol.make_complete() else: params = sm for name in req_site_params: if name in ('vs30measured', 'backarc') \ and name not in params.dtype.names: sitecol._set(name, 0) # the default else: sitecol._set(name, params[name]) elif mesh is None and oqparam.ground_motion_fields: raise InvalidFile('You are missing sites.csv or site_model.csv in %s' % oqparam.inputs['job_ini']) elif mesh is None: # a None sitecol is okay when computing the ruptures only return else: # use the default site params sitecol = site.SiteCollection.from_points( mesh.lons, mesh.lats, mesh.depths, oqparam, req_site_params) ss = os.environ.get('OQ_SAMPLE_SITES') if ss: # debugging tip to reduce the size of a calculation # OQ_SAMPLE_SITES=.1 oq engine --run job.ini # will run a computation with 10 times less sites sitecol.array = numpy.array(random_filter(sitecol.array, float(ss))) sitecol.make_complete() return sitecol
python
def get_site_collection(oqparam): mesh = get_mesh(oqparam) req_site_params = get_gsim_lt(oqparam).req_site_params if oqparam.inputs.get('site_model'): sm = get_site_model(oqparam) try: depth = sm['depth'] except ValueError: depth = None sitecol = site.SiteCollection.from_points( sm['lon'], sm['lat'], depth, sm, req_site_params) if oqparam.region_grid_spacing: logging.info('Reducing the grid sites to the site ' 'parameters within the grid spacing') sitecol, params, _ = geo.utils.assoc( sm, sitecol, oqparam.region_grid_spacing * 1.414, 'filter') sitecol.make_complete() else: params = sm for name in req_site_params: if name in ('vs30measured', 'backarc') \ and name not in params.dtype.names: sitecol._set(name, 0) else: sitecol._set(name, params[name]) elif mesh is None and oqparam.ground_motion_fields: raise InvalidFile('You are missing sites.csv or site_model.csv in %s' % oqparam.inputs['job_ini']) elif mesh is None: return else: sitecol = site.SiteCollection.from_points( mesh.lons, mesh.lats, mesh.depths, oqparam, req_site_params) ss = os.environ.get('OQ_SAMPLE_SITES') if ss: sitecol.array = numpy.array(random_filter(sitecol.array, float(ss))) sitecol.make_complete() return sitecol
[ "def", "get_site_collection", "(", "oqparam", ")", ":", "mesh", "=", "get_mesh", "(", "oqparam", ")", "req_site_params", "=", "get_gsim_lt", "(", "oqparam", ")", ".", "req_site_params", "if", "oqparam", ".", "inputs", ".", "get", "(", "'site_model'", ")", ":", "sm", "=", "get_site_model", "(", "oqparam", ")", "try", ":", "# in the future we could have elevation in the site model", "depth", "=", "sm", "[", "'depth'", "]", "except", "ValueError", ":", "# this is the normal case", "depth", "=", "None", "sitecol", "=", "site", ".", "SiteCollection", ".", "from_points", "(", "sm", "[", "'lon'", "]", ",", "sm", "[", "'lat'", "]", ",", "depth", ",", "sm", ",", "req_site_params", ")", "if", "oqparam", ".", "region_grid_spacing", ":", "logging", ".", "info", "(", "'Reducing the grid sites to the site '", "'parameters within the grid spacing'", ")", "sitecol", ",", "params", ",", "_", "=", "geo", ".", "utils", ".", "assoc", "(", "sm", ",", "sitecol", ",", "oqparam", ".", "region_grid_spacing", "*", "1.414", ",", "'filter'", ")", "sitecol", ".", "make_complete", "(", ")", "else", ":", "params", "=", "sm", "for", "name", "in", "req_site_params", ":", "if", "name", "in", "(", "'vs30measured'", ",", "'backarc'", ")", "and", "name", "not", "in", "params", ".", "dtype", ".", "names", ":", "sitecol", ".", "_set", "(", "name", ",", "0", ")", "# the default", "else", ":", "sitecol", ".", "_set", "(", "name", ",", "params", "[", "name", "]", ")", "elif", "mesh", "is", "None", "and", "oqparam", ".", "ground_motion_fields", ":", "raise", "InvalidFile", "(", "'You are missing sites.csv or site_model.csv in %s'", "%", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ")", "elif", "mesh", "is", "None", ":", "# a None sitecol is okay when computing the ruptures only", "return", "else", ":", "# use the default site params", "sitecol", "=", "site", ".", "SiteCollection", ".", "from_points", "(", "mesh", ".", "lons", ",", "mesh", ".", "lats", ",", "mesh", ".", "depths", ",", "oqparam", ",", "req_site_params", ")", "ss", "=", "os", ".", "environ", ".", "get", "(", "'OQ_SAMPLE_SITES'", ")", "if", "ss", ":", "# debugging tip to reduce the size of a calculation", "# OQ_SAMPLE_SITES=.1 oq engine --run job.ini", "# will run a computation with 10 times less sites", "sitecol", ".", "array", "=", "numpy", ".", "array", "(", "random_filter", "(", "sitecol", ".", "array", ",", "float", "(", "ss", ")", ")", ")", "sitecol", ".", "make_complete", "(", ")", "return", "sitecol" ]
Returns a SiteCollection instance by looking at the points and the site model defined by the configuration parameters. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance
[ "Returns", "a", "SiteCollection", "instance", "by", "looking", "at", "the", "points", "and", "the", "site", "model", "defined", "by", "the", "configuration", "parameters", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L389-L439
gem/oq-engine
openquake/commonlib/readinput.py
get_gsim_lt
def get_gsim_lt(oqparam, trts=['*']): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param trts: a sequence of tectonic region types as strings; trts=['*'] means that there is no filtering :returns: a GsimLogicTree instance obtained by filtering on the provided tectonic region types. """ if 'gsim_logic_tree' not in oqparam.inputs: return logictree.GsimLogicTree.from_(oqparam.gsim) gsim_file = os.path.join( oqparam.base_path, oqparam.inputs['gsim_logic_tree']) gsim_lt = logictree.GsimLogicTree(gsim_file, trts) gmfcorr = oqparam.correl_model for trt, gsims in gsim_lt.values.items(): for gsim in gsims: if gmfcorr and (gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {StdDev.TOTAL}): raise CorrelationButNoInterIntraStdDevs(gmfcorr, gsim) trts = set(oqparam.minimum_magnitude) - {'default'} expected_trts = set(gsim_lt.values) assert trts <= expected_trts, (trts, expected_trts) imt_dep_w = any(len(branch.weight.dic) > 1 for branch in gsim_lt.branches) if oqparam.number_of_logic_tree_samples and imt_dep_w: raise NotImplementedError('IMT-dependent weights in the logic tree ' 'do not work with sampling!') return gsim_lt
python
def get_gsim_lt(oqparam, trts=['*']): if 'gsim_logic_tree' not in oqparam.inputs: return logictree.GsimLogicTree.from_(oqparam.gsim) gsim_file = os.path.join( oqparam.base_path, oqparam.inputs['gsim_logic_tree']) gsim_lt = logictree.GsimLogicTree(gsim_file, trts) gmfcorr = oqparam.correl_model for trt, gsims in gsim_lt.values.items(): for gsim in gsims: if gmfcorr and (gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {StdDev.TOTAL}): raise CorrelationButNoInterIntraStdDevs(gmfcorr, gsim) trts = set(oqparam.minimum_magnitude) - {'default'} expected_trts = set(gsim_lt.values) assert trts <= expected_trts, (trts, expected_trts) imt_dep_w = any(len(branch.weight.dic) > 1 for branch in gsim_lt.branches) if oqparam.number_of_logic_tree_samples and imt_dep_w: raise NotImplementedError('IMT-dependent weights in the logic tree ' 'do not work with sampling!') return gsim_lt
[ "def", "get_gsim_lt", "(", "oqparam", ",", "trts", "=", "[", "'*'", "]", ")", ":", "if", "'gsim_logic_tree'", "not", "in", "oqparam", ".", "inputs", ":", "return", "logictree", ".", "GsimLogicTree", ".", "from_", "(", "oqparam", ".", "gsim", ")", "gsim_file", "=", "os", ".", "path", ".", "join", "(", "oqparam", ".", "base_path", ",", "oqparam", ".", "inputs", "[", "'gsim_logic_tree'", "]", ")", "gsim_lt", "=", "logictree", ".", "GsimLogicTree", "(", "gsim_file", ",", "trts", ")", "gmfcorr", "=", "oqparam", ".", "correl_model", "for", "trt", ",", "gsims", "in", "gsim_lt", ".", "values", ".", "items", "(", ")", ":", "for", "gsim", "in", "gsims", ":", "if", "gmfcorr", "and", "(", "gsim", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "==", "{", "StdDev", ".", "TOTAL", "}", ")", ":", "raise", "CorrelationButNoInterIntraStdDevs", "(", "gmfcorr", ",", "gsim", ")", "trts", "=", "set", "(", "oqparam", ".", "minimum_magnitude", ")", "-", "{", "'default'", "}", "expected_trts", "=", "set", "(", "gsim_lt", ".", "values", ")", "assert", "trts", "<=", "expected_trts", ",", "(", "trts", ",", "expected_trts", ")", "imt_dep_w", "=", "any", "(", "len", "(", "branch", ".", "weight", ".", "dic", ")", ">", "1", "for", "branch", "in", "gsim_lt", ".", "branches", ")", "if", "oqparam", ".", "number_of_logic_tree_samples", "and", "imt_dep_w", ":", "raise", "NotImplementedError", "(", "'IMT-dependent weights in the logic tree '", "'do not work with sampling!'", ")", "return", "gsim_lt" ]
:param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param trts: a sequence of tectonic region types as strings; trts=['*'] means that there is no filtering :returns: a GsimLogicTree instance obtained by filtering on the provided tectonic region types.
[ ":", "param", "oqparam", ":", "an", ":", "class", ":", "openquake", ".", "commonlib", ".", "oqvalidation", ".", "OqParam", "instance", ":", "param", "trts", ":", "a", "sequence", "of", "tectonic", "region", "types", "as", "strings", ";", "trts", "=", "[", "*", "]", "means", "that", "there", "is", "no", "filtering", ":", "returns", ":", "a", "GsimLogicTree", "instance", "obtained", "by", "filtering", "on", "the", "provided", "tectonic", "region", "types", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L442-L471
gem/oq-engine
openquake/commonlib/readinput.py
get_rlzs_by_gsim
def get_rlzs_by_gsim(oqparam): """ Return an ordered dictionary gsim -> [realization index]. Work for gsim logic trees with a single tectonic region type. """ cinfo = source.CompositionInfo.fake(get_gsim_lt(oqparam)) ra = cinfo.get_rlzs_assoc() dic = {} for rlzi, gsim_by_trt in enumerate(ra.gsim_by_trt): dic[gsim_by_trt['*']] = [rlzi] return dic
python
def get_rlzs_by_gsim(oqparam): cinfo = source.CompositionInfo.fake(get_gsim_lt(oqparam)) ra = cinfo.get_rlzs_assoc() dic = {} for rlzi, gsim_by_trt in enumerate(ra.gsim_by_trt): dic[gsim_by_trt['*']] = [rlzi] return dic
[ "def", "get_rlzs_by_gsim", "(", "oqparam", ")", ":", "cinfo", "=", "source", ".", "CompositionInfo", ".", "fake", "(", "get_gsim_lt", "(", "oqparam", ")", ")", "ra", "=", "cinfo", ".", "get_rlzs_assoc", "(", ")", "dic", "=", "{", "}", "for", "rlzi", ",", "gsim_by_trt", "in", "enumerate", "(", "ra", ".", "gsim_by_trt", ")", ":", "dic", "[", "gsim_by_trt", "[", "'*'", "]", "]", "=", "[", "rlzi", "]", "return", "dic" ]
Return an ordered dictionary gsim -> [realization index]. Work for gsim logic trees with a single tectonic region type.
[ "Return", "an", "ordered", "dictionary", "gsim", "-", ">", "[", "realization", "index", "]", ".", "Work", "for", "gsim", "logic", "trees", "with", "a", "single", "tectonic", "region", "type", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L485-L495
gem/oq-engine
openquake/commonlib/readinput.py
get_rupture
def get_rupture(oqparam): """ Read the `rupture_model` file and by filter the site collection :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: an hazardlib rupture """ rup_model = oqparam.inputs['rupture_model'] [rup_node] = nrml.read(rup_model) conv = sourceconverter.RuptureConverter( oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing) rup = conv.convert_node(rup_node) rup.tectonic_region_type = '*' # there is not TRT for scenario ruptures rup.serial = oqparam.random_seed return rup
python
def get_rupture(oqparam): rup_model = oqparam.inputs['rupture_model'] [rup_node] = nrml.read(rup_model) conv = sourceconverter.RuptureConverter( oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing) rup = conv.convert_node(rup_node) rup.tectonic_region_type = '*' rup.serial = oqparam.random_seed return rup
[ "def", "get_rupture", "(", "oqparam", ")", ":", "rup_model", "=", "oqparam", ".", "inputs", "[", "'rupture_model'", "]", "[", "rup_node", "]", "=", "nrml", ".", "read", "(", "rup_model", ")", "conv", "=", "sourceconverter", ".", "RuptureConverter", "(", "oqparam", ".", "rupture_mesh_spacing", ",", "oqparam", ".", "complex_fault_mesh_spacing", ")", "rup", "=", "conv", ".", "convert_node", "(", "rup_node", ")", "rup", ".", "tectonic_region_type", "=", "'*'", "# there is not TRT for scenario ruptures", "rup", ".", "serial", "=", "oqparam", ".", "random_seed", "return", "rup" ]
Read the `rupture_model` file and by filter the site collection :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: an hazardlib rupture
[ "Read", "the", "rupture_model", "file", "and", "by", "filter", "the", "site", "collection" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L498-L514
gem/oq-engine
openquake/commonlib/readinput.py
get_source_model_lt
def get_source_model_lt(oqparam, validate=True): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance """ fname = oqparam.inputs.get('source_model_logic_tree') if fname: # NB: converting the random_seed into an integer is needed on Windows return logictree.SourceModelLogicTree( fname, validate, seed=int(oqparam.random_seed), num_samples=oqparam.number_of_logic_tree_samples) return logictree.FakeSmlt(oqparam.inputs['source_model'], int(oqparam.random_seed), oqparam.number_of_logic_tree_samples)
python
def get_source_model_lt(oqparam, validate=True): fname = oqparam.inputs.get('source_model_logic_tree') if fname: return logictree.SourceModelLogicTree( fname, validate, seed=int(oqparam.random_seed), num_samples=oqparam.number_of_logic_tree_samples) return logictree.FakeSmlt(oqparam.inputs['source_model'], int(oqparam.random_seed), oqparam.number_of_logic_tree_samples)
[ "def", "get_source_model_lt", "(", "oqparam", ",", "validate", "=", "True", ")", ":", "fname", "=", "oqparam", ".", "inputs", ".", "get", "(", "'source_model_logic_tree'", ")", "if", "fname", ":", "# NB: converting the random_seed into an integer is needed on Windows", "return", "logictree", ".", "SourceModelLogicTree", "(", "fname", ",", "validate", ",", "seed", "=", "int", "(", "oqparam", ".", "random_seed", ")", ",", "num_samples", "=", "oqparam", ".", "number_of_logic_tree_samples", ")", "return", "logictree", ".", "FakeSmlt", "(", "oqparam", ".", "inputs", "[", "'source_model'", "]", ",", "int", "(", "oqparam", ".", "random_seed", ")", ",", "oqparam", ".", "number_of_logic_tree_samples", ")" ]
:param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance
[ ":", "param", "oqparam", ":", "an", ":", "class", ":", "openquake", ".", "commonlib", ".", "oqvalidation", ".", "OqParam", "instance", ":", "returns", ":", "a", ":", "class", ":", "openquake", ".", "commonlib", ".", "logictree", ".", "SourceModelLogicTree", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L517-L533
gem/oq-engine
openquake/commonlib/readinput.py
check_nonparametric_sources
def check_nonparametric_sources(fname, smodel, investigation_time): """ :param fname: full path to a source model file :param smodel: source model object :param investigation_time: investigation_time to compare with in the case of nonparametric sources :returns: the nonparametric sources in the model :raises: a ValueError if the investigation_time is different from the expected """ # NonParametricSeismicSources np = [src for sg in smodel.src_groups for src in sg if hasattr(src, 'data')] if np and smodel.investigation_time != investigation_time: raise ValueError( 'The source model %s contains an investigation_time ' 'of %s, while the job.ini has %s' % ( fname, smodel.investigation_time, investigation_time)) return np
python
def check_nonparametric_sources(fname, smodel, investigation_time): np = [src for sg in smodel.src_groups for src in sg if hasattr(src, 'data')] if np and smodel.investigation_time != investigation_time: raise ValueError( 'The source model %s contains an investigation_time ' 'of %s, while the job.ini has %s' % ( fname, smodel.investigation_time, investigation_time)) return np
[ "def", "check_nonparametric_sources", "(", "fname", ",", "smodel", ",", "investigation_time", ")", ":", "# NonParametricSeismicSources", "np", "=", "[", "src", "for", "sg", "in", "smodel", ".", "src_groups", "for", "src", "in", "sg", "if", "hasattr", "(", "src", ",", "'data'", ")", "]", "if", "np", "and", "smodel", ".", "investigation_time", "!=", "investigation_time", ":", "raise", "ValueError", "(", "'The source model %s contains an investigation_time '", "'of %s, while the job.ini has %s'", "%", "(", "fname", ",", "smodel", ".", "investigation_time", ",", "investigation_time", ")", ")", "return", "np" ]
:param fname: full path to a source model file :param smodel: source model object :param investigation_time: investigation_time to compare with in the case of nonparametric sources :returns: the nonparametric sources in the model :raises: a ValueError if the investigation_time is different from the expected
[ ":", "param", "fname", ":", "full", "path", "to", "a", "source", "model", "file", ":", "param", "smodel", ":", "source", "model", "object", ":", "param", "investigation_time", ":", "investigation_time", "to", "compare", "with", "in", "the", "case", "of", "nonparametric", "sources", ":", "returns", ":", "the", "nonparametric", "sources", "in", "the", "model", ":", "raises", ":", "a", "ValueError", "if", "the", "investigation_time", "is", "different", "from", "the", "expected" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L536-L558
gem/oq-engine
openquake/commonlib/readinput.py
store_sm
def store_sm(smodel, filename, monitor): """ :param smodel: a :class:`openquake.hazardlib.nrml.SourceModel` instance :param filename: path to an hdf5 file (cache_XXX.hdf5) :param monitor: a Monitor instance with an .hdf5 attribute """ h5 = monitor.hdf5 with monitor('store source model'): sources = h5['source_info'] source_geom = h5['source_geom'] gid = len(source_geom) for sg in smodel: if filename: with hdf5.File(filename, 'r+') as hdf5cache: hdf5cache['grp-%02d' % sg.id] = sg srcs = [] geoms = [] for src in sg: srcgeom = src.geom() n = len(srcgeom) geom = numpy.zeros(n, point3d) geom['lon'], geom['lat'], geom['depth'] = srcgeom.T srcs.append((sg.id, src.source_id, src.code, gid, gid + n, src.num_ruptures, 0, 0, 0)) geoms.append(geom) gid += n if geoms: hdf5.extend(source_geom, numpy.concatenate(geoms)) if sources: hdf5.extend(sources, numpy.array(srcs, source_info_dt))
python
def store_sm(smodel, filename, monitor): h5 = monitor.hdf5 with monitor('store source model'): sources = h5['source_info'] source_geom = h5['source_geom'] gid = len(source_geom) for sg in smodel: if filename: with hdf5.File(filename, 'r+') as hdf5cache: hdf5cache['grp-%02d' % sg.id] = sg srcs = [] geoms = [] for src in sg: srcgeom = src.geom() n = len(srcgeom) geom = numpy.zeros(n, point3d) geom['lon'], geom['lat'], geom['depth'] = srcgeom.T srcs.append((sg.id, src.source_id, src.code, gid, gid + n, src.num_ruptures, 0, 0, 0)) geoms.append(geom) gid += n if geoms: hdf5.extend(source_geom, numpy.concatenate(geoms)) if sources: hdf5.extend(sources, numpy.array(srcs, source_info_dt))
[ "def", "store_sm", "(", "smodel", ",", "filename", ",", "monitor", ")", ":", "h5", "=", "monitor", ".", "hdf5", "with", "monitor", "(", "'store source model'", ")", ":", "sources", "=", "h5", "[", "'source_info'", "]", "source_geom", "=", "h5", "[", "'source_geom'", "]", "gid", "=", "len", "(", "source_geom", ")", "for", "sg", "in", "smodel", ":", "if", "filename", ":", "with", "hdf5", ".", "File", "(", "filename", ",", "'r+'", ")", "as", "hdf5cache", ":", "hdf5cache", "[", "'grp-%02d'", "%", "sg", ".", "id", "]", "=", "sg", "srcs", "=", "[", "]", "geoms", "=", "[", "]", "for", "src", "in", "sg", ":", "srcgeom", "=", "src", ".", "geom", "(", ")", "n", "=", "len", "(", "srcgeom", ")", "geom", "=", "numpy", ".", "zeros", "(", "n", ",", "point3d", ")", "geom", "[", "'lon'", "]", ",", "geom", "[", "'lat'", "]", ",", "geom", "[", "'depth'", "]", "=", "srcgeom", ".", "T", "srcs", ".", "append", "(", "(", "sg", ".", "id", ",", "src", ".", "source_id", ",", "src", ".", "code", ",", "gid", ",", "gid", "+", "n", ",", "src", ".", "num_ruptures", ",", "0", ",", "0", ",", "0", ")", ")", "geoms", ".", "append", "(", "geom", ")", "gid", "+=", "n", "if", "geoms", ":", "hdf5", ".", "extend", "(", "source_geom", ",", "numpy", ".", "concatenate", "(", "geoms", ")", ")", "if", "sources", ":", "hdf5", ".", "extend", "(", "sources", ",", "numpy", ".", "array", "(", "srcs", ",", "source_info_dt", ")", ")" ]
:param smodel: a :class:`openquake.hazardlib.nrml.SourceModel` instance :param filename: path to an hdf5 file (cache_XXX.hdf5) :param monitor: a Monitor instance with an .hdf5 attribute
[ ":", "param", "smodel", ":", "a", ":", "class", ":", "openquake", ".", "hazardlib", ".", "nrml", ".", "SourceModel", "instance", ":", "param", "filename", ":", "path", "to", "an", "hdf5", "file", "(", "cache_XXX", ".", "hdf5", ")", ":", "param", "monitor", ":", "a", "Monitor", "instance", "with", "an", ".", "hdf5", "attribute" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L608-L637
gem/oq-engine
openquake/commonlib/readinput.py
get_source_models
def get_source_models(oqparam, gsim_lt, source_model_lt, monitor, in_memory=True, srcfilter=None): """ Build all the source models generated by the logic tree. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param gsim_lt: a :class:`openquake.commonlib.logictree.GsimLogicTree` instance :param source_model_lt: a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance :param monitor: a `openquake.baselib.performance.Monitor` instance :param in_memory: if True, keep in memory the sources, else just collect the TRTs :param srcfilter: a SourceFilter instance with an .filename pointing to the cache file :returns: an iterator over :class:`openquake.commonlib.logictree.LtSourceModel` tuples """ make_sm = SourceModelFactory() spinning_off = oqparam.pointsource_distance == {'default': 0.0} if spinning_off: logging.info('Removing nodal plane and hypocenter distributions') dist = 'no' if os.environ.get('OQ_DISTRIBUTE') == 'no' else 'processpool' smlt_dir = os.path.dirname(source_model_lt.filename) converter = sourceconverter.SourceConverter( oqparam.investigation_time, oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing, oqparam.width_of_mfd_bin, oqparam.area_source_discretization, oqparam.minimum_magnitude, not spinning_off, oqparam.source_id) if oqparam.calculation_mode.startswith('ucerf'): [grp] = nrml.to_python(oqparam.inputs["source_model"], converter) elif in_memory: logging.info('Reading the source model(s) in parallel') smap = parallel.Starmap( nrml.read_source_models, monitor=monitor, distribute=dist) for sm in source_model_lt.gen_source_models(gsim_lt): for name in sm.names.split(): fname = os.path.abspath(os.path.join(smlt_dir, name)) smap.submit([fname], converter) dic = {sm.fname: sm for sm in smap} # consider only the effective realizations nr = 0 idx = 0 grp_id = 0 if monitor.hdf5: sources = hdf5.create(monitor.hdf5, 'source_info', source_info_dt) hdf5.create(monitor.hdf5, 'source_geom', point3d) filename = (getattr(srcfilter, 'filename', None) if oqparam.prefilter_sources == 'no' else None) source_ids = set() for sm in source_model_lt.gen_source_models(gsim_lt): apply_unc = functools.partial( source_model_lt.apply_uncertainties, sm.path) src_groups = [] for name in sm.names.split(): fname = os.path.abspath(os.path.join(smlt_dir, name)) if oqparam.calculation_mode.startswith('ucerf'): sg = copy.copy(grp) sg.id = grp_id src = sg[0].new(sm.ordinal, sm.names) # one source source_ids.add(src.source_id) src.src_group_id = grp_id src.id = idx if oqparam.number_of_logic_tree_samples: src.samples = sm.samples sg.sources = [src] src_groups.append(sg) idx += 1 grp_id += 1 data = [((sg.id, src.source_id, src.code, 0, 0, src.num_ruptures, 0, 0, 0))] hdf5.extend(sources, numpy.array(data, source_info_dt)) elif in_memory: newsm = make_sm(fname, dic[fname], apply_unc, oqparam.investigation_time) for sg in newsm: nr += sum(src.num_ruptures for src in sg) # sample a source for each group if os.environ.get('OQ_SAMPLE_SOURCES'): sg.sources = random_filtered_sources( sg.sources, srcfilter, sg.id + oqparam.random_seed) for src in sg: source_ids.add(src.source_id) src.src_group_id = grp_id src.id = idx idx += 1 sg.id = grp_id grp_id += 1 src_groups.append(sg) if monitor.hdf5: store_sm(newsm, filename, monitor) else: # just collect the TRT models groups = logictree.read_source_groups(fname) for group in groups: source_ids.update(src['id'] for src in group) src_groups.extend(groups) if grp_id >= TWO16: # the limit is really needed only for event based calculations raise ValueError('There is a limit of %d src groups!' % TWO16) for brid, srcids in source_model_lt.info.applytosources.items(): for srcid in srcids: if srcid not in source_ids: raise ValueError( 'The source %s is not in the source model, please fix ' 'applyToSources in %s or the source model' % (srcid, source_model_lt.filename)) num_sources = sum(len(sg.sources) for sg in src_groups) sm.src_groups = src_groups trts = [mod.trt for mod in src_groups] source_model_lt.tectonic_region_types.update(trts) logging.info( 'Processed source model %d with %d gsim path(s) and %d ' 'sources', sm.ordinal + 1, sm.num_gsim_paths, num_sources) gsim_file = oqparam.inputs.get('gsim_logic_tree') if gsim_file: # check TRTs for src_group in src_groups: if src_group.trt not in gsim_lt.values: raise ValueError( "Found in %r a tectonic region type %r inconsistent " "with the ones in %r" % (sm, src_group.trt, gsim_file)) yield sm logging.info('The composite source model has {:,d} ruptures'.format(nr)) # log if some source file is being used more than once dupl = 0 for fname, hits in make_sm.fname_hits.items(): if hits > 1: logging.info('%s has been considered %d times', fname, hits) if not make_sm.changes: dupl += hits if (dupl and not oqparam.optimize_same_id_sources and not oqparam.is_event_based()): logging.warning( 'You are doing redundant calculations: please make sure ' 'that different sources have different IDs and set ' 'optimize_same_id_sources=true in your .ini file') if make_sm.changes: logging.info('Applied %d changes to the composite source model', make_sm.changes)
python
def get_source_models(oqparam, gsim_lt, source_model_lt, monitor, in_memory=True, srcfilter=None): make_sm = SourceModelFactory() spinning_off = oqparam.pointsource_distance == {'default': 0.0} if spinning_off: logging.info('Removing nodal plane and hypocenter distributions') dist = 'no' if os.environ.get('OQ_DISTRIBUTE') == 'no' else 'processpool' smlt_dir = os.path.dirname(source_model_lt.filename) converter = sourceconverter.SourceConverter( oqparam.investigation_time, oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing, oqparam.width_of_mfd_bin, oqparam.area_source_discretization, oqparam.minimum_magnitude, not spinning_off, oqparam.source_id) if oqparam.calculation_mode.startswith('ucerf'): [grp] = nrml.to_python(oqparam.inputs["source_model"], converter) elif in_memory: logging.info('Reading the source model(s) in parallel') smap = parallel.Starmap( nrml.read_source_models, monitor=monitor, distribute=dist) for sm in source_model_lt.gen_source_models(gsim_lt): for name in sm.names.split(): fname = os.path.abspath(os.path.join(smlt_dir, name)) smap.submit([fname], converter) dic = {sm.fname: sm for sm in smap} nr = 0 idx = 0 grp_id = 0 if monitor.hdf5: sources = hdf5.create(monitor.hdf5, 'source_info', source_info_dt) hdf5.create(monitor.hdf5, 'source_geom', point3d) filename = (getattr(srcfilter, 'filename', None) if oqparam.prefilter_sources == 'no' else None) source_ids = set() for sm in source_model_lt.gen_source_models(gsim_lt): apply_unc = functools.partial( source_model_lt.apply_uncertainties, sm.path) src_groups = [] for name in sm.names.split(): fname = os.path.abspath(os.path.join(smlt_dir, name)) if oqparam.calculation_mode.startswith('ucerf'): sg = copy.copy(grp) sg.id = grp_id src = sg[0].new(sm.ordinal, sm.names) source_ids.add(src.source_id) src.src_group_id = grp_id src.id = idx if oqparam.number_of_logic_tree_samples: src.samples = sm.samples sg.sources = [src] src_groups.append(sg) idx += 1 grp_id += 1 data = [((sg.id, src.source_id, src.code, 0, 0, src.num_ruptures, 0, 0, 0))] hdf5.extend(sources, numpy.array(data, source_info_dt)) elif in_memory: newsm = make_sm(fname, dic[fname], apply_unc, oqparam.investigation_time) for sg in newsm: nr += sum(src.num_ruptures for src in sg) if os.environ.get('OQ_SAMPLE_SOURCES'): sg.sources = random_filtered_sources( sg.sources, srcfilter, sg.id + oqparam.random_seed) for src in sg: source_ids.add(src.source_id) src.src_group_id = grp_id src.id = idx idx += 1 sg.id = grp_id grp_id += 1 src_groups.append(sg) if monitor.hdf5: store_sm(newsm, filename, monitor) else: groups = logictree.read_source_groups(fname) for group in groups: source_ids.update(src['id'] for src in group) src_groups.extend(groups) if grp_id >= TWO16: raise ValueError('There is a limit of %d src groups!' % TWO16) for brid, srcids in source_model_lt.info.applytosources.items(): for srcid in srcids: if srcid not in source_ids: raise ValueError( 'The source %s is not in the source model, please fix ' 'applyToSources in %s or the source model' % (srcid, source_model_lt.filename)) num_sources = sum(len(sg.sources) for sg in src_groups) sm.src_groups = src_groups trts = [mod.trt for mod in src_groups] source_model_lt.tectonic_region_types.update(trts) logging.info( 'Processed source model %d with %d gsim path(s) and %d ' 'sources', sm.ordinal + 1, sm.num_gsim_paths, num_sources) gsim_file = oqparam.inputs.get('gsim_logic_tree') if gsim_file: for src_group in src_groups: if src_group.trt not in gsim_lt.values: raise ValueError( "Found in %r a tectonic region type %r inconsistent " "with the ones in %r" % (sm, src_group.trt, gsim_file)) yield sm logging.info('The composite source model has {:,d} ruptures'.format(nr)) dupl = 0 for fname, hits in make_sm.fname_hits.items(): if hits > 1: logging.info('%s has been considered %d times', fname, hits) if not make_sm.changes: dupl += hits if (dupl and not oqparam.optimize_same_id_sources and not oqparam.is_event_based()): logging.warning( 'You are doing redundant calculations: please make sure ' 'that different sources have different IDs and set ' 'optimize_same_id_sources=true in your .ini file') if make_sm.changes: logging.info('Applied %d changes to the composite source model', make_sm.changes)
[ "def", "get_source_models", "(", "oqparam", ",", "gsim_lt", ",", "source_model_lt", ",", "monitor", ",", "in_memory", "=", "True", ",", "srcfilter", "=", "None", ")", ":", "make_sm", "=", "SourceModelFactory", "(", ")", "spinning_off", "=", "oqparam", ".", "pointsource_distance", "==", "{", "'default'", ":", "0.0", "}", "if", "spinning_off", ":", "logging", ".", "info", "(", "'Removing nodal plane and hypocenter distributions'", ")", "dist", "=", "'no'", "if", "os", ".", "environ", ".", "get", "(", "'OQ_DISTRIBUTE'", ")", "==", "'no'", "else", "'processpool'", "smlt_dir", "=", "os", ".", "path", ".", "dirname", "(", "source_model_lt", ".", "filename", ")", "converter", "=", "sourceconverter", ".", "SourceConverter", "(", "oqparam", ".", "investigation_time", ",", "oqparam", ".", "rupture_mesh_spacing", ",", "oqparam", ".", "complex_fault_mesh_spacing", ",", "oqparam", ".", "width_of_mfd_bin", ",", "oqparam", ".", "area_source_discretization", ",", "oqparam", ".", "minimum_magnitude", ",", "not", "spinning_off", ",", "oqparam", ".", "source_id", ")", "if", "oqparam", ".", "calculation_mode", ".", "startswith", "(", "'ucerf'", ")", ":", "[", "grp", "]", "=", "nrml", ".", "to_python", "(", "oqparam", ".", "inputs", "[", "\"source_model\"", "]", ",", "converter", ")", "elif", "in_memory", ":", "logging", ".", "info", "(", "'Reading the source model(s) in parallel'", ")", "smap", "=", "parallel", ".", "Starmap", "(", "nrml", ".", "read_source_models", ",", "monitor", "=", "monitor", ",", "distribute", "=", "dist", ")", "for", "sm", "in", "source_model_lt", ".", "gen_source_models", "(", "gsim_lt", ")", ":", "for", "name", "in", "sm", ".", "names", ".", "split", "(", ")", ":", "fname", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "smlt_dir", ",", "name", ")", ")", "smap", ".", "submit", "(", "[", "fname", "]", ",", "converter", ")", "dic", "=", "{", "sm", ".", "fname", ":", "sm", "for", "sm", "in", "smap", "}", "# consider only the effective realizations", "nr", "=", "0", "idx", "=", "0", "grp_id", "=", "0", "if", "monitor", ".", "hdf5", ":", "sources", "=", "hdf5", ".", "create", "(", "monitor", ".", "hdf5", ",", "'source_info'", ",", "source_info_dt", ")", "hdf5", ".", "create", "(", "monitor", ".", "hdf5", ",", "'source_geom'", ",", "point3d", ")", "filename", "=", "(", "getattr", "(", "srcfilter", ",", "'filename'", ",", "None", ")", "if", "oqparam", ".", "prefilter_sources", "==", "'no'", "else", "None", ")", "source_ids", "=", "set", "(", ")", "for", "sm", "in", "source_model_lt", ".", "gen_source_models", "(", "gsim_lt", ")", ":", "apply_unc", "=", "functools", ".", "partial", "(", "source_model_lt", ".", "apply_uncertainties", ",", "sm", ".", "path", ")", "src_groups", "=", "[", "]", "for", "name", "in", "sm", ".", "names", ".", "split", "(", ")", ":", "fname", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "smlt_dir", ",", "name", ")", ")", "if", "oqparam", ".", "calculation_mode", ".", "startswith", "(", "'ucerf'", ")", ":", "sg", "=", "copy", ".", "copy", "(", "grp", ")", "sg", ".", "id", "=", "grp_id", "src", "=", "sg", "[", "0", "]", ".", "new", "(", "sm", ".", "ordinal", ",", "sm", ".", "names", ")", "# one source", "source_ids", ".", "add", "(", "src", ".", "source_id", ")", "src", ".", "src_group_id", "=", "grp_id", "src", ".", "id", "=", "idx", "if", "oqparam", ".", "number_of_logic_tree_samples", ":", "src", ".", "samples", "=", "sm", ".", "samples", "sg", ".", "sources", "=", "[", "src", "]", "src_groups", ".", "append", "(", "sg", ")", "idx", "+=", "1", "grp_id", "+=", "1", "data", "=", "[", "(", "(", "sg", ".", "id", ",", "src", ".", "source_id", ",", "src", ".", "code", ",", "0", ",", "0", ",", "src", ".", "num_ruptures", ",", "0", ",", "0", ",", "0", ")", ")", "]", "hdf5", ".", "extend", "(", "sources", ",", "numpy", ".", "array", "(", "data", ",", "source_info_dt", ")", ")", "elif", "in_memory", ":", "newsm", "=", "make_sm", "(", "fname", ",", "dic", "[", "fname", "]", ",", "apply_unc", ",", "oqparam", ".", "investigation_time", ")", "for", "sg", "in", "newsm", ":", "nr", "+=", "sum", "(", "src", ".", "num_ruptures", "for", "src", "in", "sg", ")", "# sample a source for each group", "if", "os", ".", "environ", ".", "get", "(", "'OQ_SAMPLE_SOURCES'", ")", ":", "sg", ".", "sources", "=", "random_filtered_sources", "(", "sg", ".", "sources", ",", "srcfilter", ",", "sg", ".", "id", "+", "oqparam", ".", "random_seed", ")", "for", "src", "in", "sg", ":", "source_ids", ".", "add", "(", "src", ".", "source_id", ")", "src", ".", "src_group_id", "=", "grp_id", "src", ".", "id", "=", "idx", "idx", "+=", "1", "sg", ".", "id", "=", "grp_id", "grp_id", "+=", "1", "src_groups", ".", "append", "(", "sg", ")", "if", "monitor", ".", "hdf5", ":", "store_sm", "(", "newsm", ",", "filename", ",", "monitor", ")", "else", ":", "# just collect the TRT models", "groups", "=", "logictree", ".", "read_source_groups", "(", "fname", ")", "for", "group", "in", "groups", ":", "source_ids", ".", "update", "(", "src", "[", "'id'", "]", "for", "src", "in", "group", ")", "src_groups", ".", "extend", "(", "groups", ")", "if", "grp_id", ">=", "TWO16", ":", "# the limit is really needed only for event based calculations", "raise", "ValueError", "(", "'There is a limit of %d src groups!'", "%", "TWO16", ")", "for", "brid", ",", "srcids", "in", "source_model_lt", ".", "info", ".", "applytosources", ".", "items", "(", ")", ":", "for", "srcid", "in", "srcids", ":", "if", "srcid", "not", "in", "source_ids", ":", "raise", "ValueError", "(", "'The source %s is not in the source model, please fix '", "'applyToSources in %s or the source model'", "%", "(", "srcid", ",", "source_model_lt", ".", "filename", ")", ")", "num_sources", "=", "sum", "(", "len", "(", "sg", ".", "sources", ")", "for", "sg", "in", "src_groups", ")", "sm", ".", "src_groups", "=", "src_groups", "trts", "=", "[", "mod", ".", "trt", "for", "mod", "in", "src_groups", "]", "source_model_lt", ".", "tectonic_region_types", ".", "update", "(", "trts", ")", "logging", ".", "info", "(", "'Processed source model %d with %d gsim path(s) and %d '", "'sources'", ",", "sm", ".", "ordinal", "+", "1", ",", "sm", ".", "num_gsim_paths", ",", "num_sources", ")", "gsim_file", "=", "oqparam", ".", "inputs", ".", "get", "(", "'gsim_logic_tree'", ")", "if", "gsim_file", ":", "# check TRTs", "for", "src_group", "in", "src_groups", ":", "if", "src_group", ".", "trt", "not", "in", "gsim_lt", ".", "values", ":", "raise", "ValueError", "(", "\"Found in %r a tectonic region type %r inconsistent \"", "\"with the ones in %r\"", "%", "(", "sm", ",", "src_group", ".", "trt", ",", "gsim_file", ")", ")", "yield", "sm", "logging", ".", "info", "(", "'The composite source model has {:,d} ruptures'", ".", "format", "(", "nr", ")", ")", "# log if some source file is being used more than once", "dupl", "=", "0", "for", "fname", ",", "hits", "in", "make_sm", ".", "fname_hits", ".", "items", "(", ")", ":", "if", "hits", ">", "1", ":", "logging", ".", "info", "(", "'%s has been considered %d times'", ",", "fname", ",", "hits", ")", "if", "not", "make_sm", ".", "changes", ":", "dupl", "+=", "hits", "if", "(", "dupl", "and", "not", "oqparam", ".", "optimize_same_id_sources", "and", "not", "oqparam", ".", "is_event_based", "(", ")", ")", ":", "logging", ".", "warning", "(", "'You are doing redundant calculations: please make sure '", "'that different sources have different IDs and set '", "'optimize_same_id_sources=true in your .ini file'", ")", "if", "make_sm", ".", "changes", ":", "logging", ".", "info", "(", "'Applied %d changes to the composite source model'", ",", "make_sm", ".", "changes", ")" ]
Build all the source models generated by the logic tree. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param gsim_lt: a :class:`openquake.commonlib.logictree.GsimLogicTree` instance :param source_model_lt: a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance :param monitor: a `openquake.baselib.performance.Monitor` instance :param in_memory: if True, keep in memory the sources, else just collect the TRTs :param srcfilter: a SourceFilter instance with an .filename pointing to the cache file :returns: an iterator over :class:`openquake.commonlib.logictree.LtSourceModel` tuples
[ "Build", "all", "the", "source", "models", "generated", "by", "the", "logic", "tree", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L640-L790
gem/oq-engine
openquake/commonlib/readinput.py
random_filtered_sources
def random_filtered_sources(sources, srcfilter, seed): """ :param sources: a list of sources :param srcfilte: a SourceFilter instance :param seed: a random seed :returns: an empty list or a list with a single filtered source """ random.seed(seed) while sources: src = random.choice(sources) if srcfilter.get_close_sites(src) is not None: return [src] sources.remove(src) return []
python
def random_filtered_sources(sources, srcfilter, seed): random.seed(seed) while sources: src = random.choice(sources) if srcfilter.get_close_sites(src) is not None: return [src] sources.remove(src) return []
[ "def", "random_filtered_sources", "(", "sources", ",", "srcfilter", ",", "seed", ")", ":", "random", ".", "seed", "(", "seed", ")", "while", "sources", ":", "src", "=", "random", ".", "choice", "(", "sources", ")", "if", "srcfilter", ".", "get_close_sites", "(", "src", ")", "is", "not", "None", ":", "return", "[", "src", "]", "sources", ".", "remove", "(", "src", ")", "return", "[", "]" ]
:param sources: a list of sources :param srcfilte: a SourceFilter instance :param seed: a random seed :returns: an empty list or a list with a single filtered source
[ ":", "param", "sources", ":", "a", "list", "of", "sources", ":", "param", "srcfilte", ":", "a", "SourceFilter", "instance", ":", "param", "seed", ":", "a", "random", "seed", ":", "returns", ":", "an", "empty", "list", "or", "a", "list", "with", "a", "single", "filtered", "source" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L800-L813
gem/oq-engine
openquake/commonlib/readinput.py
get_composite_source_model
def get_composite_source_model(oqparam, monitor=None, in_memory=True, srcfilter=SourceFilter(None, {})): """ Parse the XML and build a complete composite source model in memory. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param monitor: a `openquake.baselib.performance.Monitor` instance :param in_memory: if False, just parse the XML without instantiating the sources :param srcfilter: if not None, use it to prefilter the sources """ ucerf = oqparam.calculation_mode.startswith('ucerf') source_model_lt = get_source_model_lt(oqparam, validate=not ucerf) trts = source_model_lt.tectonic_region_types trts_lower = {trt.lower() for trt in trts} reqv = oqparam.inputs.get('reqv', {}) for trt in reqv: # these are lowercase because they come from the job.ini if trt not in trts_lower: raise ValueError('Unknown TRT=%s in %s [reqv]' % (trt, oqparam.inputs['job_ini'])) gsim_lt = get_gsim_lt(oqparam, trts or ['*']) p = source_model_lt.num_paths * gsim_lt.get_num_paths() if oqparam.number_of_logic_tree_samples: logging.info('Considering {:,d} logic tree paths out of {:,d}'.format( oqparam.number_of_logic_tree_samples, p)) else: # full enumeration if oqparam.is_event_based() and p > oqparam.max_potential_paths: raise ValueError( 'There are too many potential logic tree paths (%d) ' 'use sampling instead of full enumeration' % p) logging.info('Potential number of logic tree paths = {:,d}'.format(p)) if source_model_lt.on_each_source: logging.info('There is a logic tree on each source') if monitor is None: monitor = performance.Monitor() smodels = [] for source_model in get_source_models( oqparam, gsim_lt, source_model_lt, monitor, in_memory, srcfilter): for src_group in source_model.src_groups: src_group.sources = sorted(src_group, key=getid) for src in src_group: # there are two cases depending on the flag in_memory: # 1) src is a hazardlib source and has a src_group_id # attribute; in that case the source has to be numbered # 2) src is a Node object, then nothing must be done if isinstance(src, Node): continue smodels.append(source_model) csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels, oqparam.optimize_same_id_sources) for sm in csm.source_models: counter = collections.Counter() for sg in sm.src_groups: for srcid in map(getid, sg): counter[srcid] += 1 dupl = [srcid for srcid in counter if counter[srcid] > 1] if dupl: raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s' % (sm, dupl)) if not in_memory: return csm if oqparam.is_event_based(): # initialize the rupture serial numbers before splitting/filtering; in # this way the serials are independent from the site collection csm.init_serials(oqparam.ses_seed) if oqparam.disagg_by_src: csm = csm.grp_by_src() # one group per source csm.info.gsim_lt.check_imts(oqparam.imtls) parallel.Starmap.shutdown() # save memory return csm
python
def get_composite_source_model(oqparam, monitor=None, in_memory=True, srcfilter=SourceFilter(None, {})): ucerf = oqparam.calculation_mode.startswith('ucerf') source_model_lt = get_source_model_lt(oqparam, validate=not ucerf) trts = source_model_lt.tectonic_region_types trts_lower = {trt.lower() for trt in trts} reqv = oqparam.inputs.get('reqv', {}) for trt in reqv: if trt not in trts_lower: raise ValueError('Unknown TRT=%s in %s [reqv]' % (trt, oqparam.inputs['job_ini'])) gsim_lt = get_gsim_lt(oqparam, trts or ['*']) p = source_model_lt.num_paths * gsim_lt.get_num_paths() if oqparam.number_of_logic_tree_samples: logging.info('Considering {:,d} logic tree paths out of {:,d}'.format( oqparam.number_of_logic_tree_samples, p)) else: if oqparam.is_event_based() and p > oqparam.max_potential_paths: raise ValueError( 'There are too many potential logic tree paths (%d) ' 'use sampling instead of full enumeration' % p) logging.info('Potential number of logic tree paths = {:,d}'.format(p)) if source_model_lt.on_each_source: logging.info('There is a logic tree on each source') if monitor is None: monitor = performance.Monitor() smodels = [] for source_model in get_source_models( oqparam, gsim_lt, source_model_lt, monitor, in_memory, srcfilter): for src_group in source_model.src_groups: src_group.sources = sorted(src_group, key=getid) for src in src_group: if isinstance(src, Node): continue smodels.append(source_model) csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels, oqparam.optimize_same_id_sources) for sm in csm.source_models: counter = collections.Counter() for sg in sm.src_groups: for srcid in map(getid, sg): counter[srcid] += 1 dupl = [srcid for srcid in counter if counter[srcid] > 1] if dupl: raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s' % (sm, dupl)) if not in_memory: return csm if oqparam.is_event_based(): csm.init_serials(oqparam.ses_seed) if oqparam.disagg_by_src: csm = csm.grp_by_src() csm.info.gsim_lt.check_imts(oqparam.imtls) parallel.Starmap.shutdown() return csm
[ "def", "get_composite_source_model", "(", "oqparam", ",", "monitor", "=", "None", ",", "in_memory", "=", "True", ",", "srcfilter", "=", "SourceFilter", "(", "None", ",", "{", "}", ")", ")", ":", "ucerf", "=", "oqparam", ".", "calculation_mode", ".", "startswith", "(", "'ucerf'", ")", "source_model_lt", "=", "get_source_model_lt", "(", "oqparam", ",", "validate", "=", "not", "ucerf", ")", "trts", "=", "source_model_lt", ".", "tectonic_region_types", "trts_lower", "=", "{", "trt", ".", "lower", "(", ")", "for", "trt", "in", "trts", "}", "reqv", "=", "oqparam", ".", "inputs", ".", "get", "(", "'reqv'", ",", "{", "}", ")", "for", "trt", "in", "reqv", ":", "# these are lowercase because they come from the job.ini", "if", "trt", "not", "in", "trts_lower", ":", "raise", "ValueError", "(", "'Unknown TRT=%s in %s [reqv]'", "%", "(", "trt", ",", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ")", ")", "gsim_lt", "=", "get_gsim_lt", "(", "oqparam", ",", "trts", "or", "[", "'*'", "]", ")", "p", "=", "source_model_lt", ".", "num_paths", "*", "gsim_lt", ".", "get_num_paths", "(", ")", "if", "oqparam", ".", "number_of_logic_tree_samples", ":", "logging", ".", "info", "(", "'Considering {:,d} logic tree paths out of {:,d}'", ".", "format", "(", "oqparam", ".", "number_of_logic_tree_samples", ",", "p", ")", ")", "else", ":", "# full enumeration", "if", "oqparam", ".", "is_event_based", "(", ")", "and", "p", ">", "oqparam", ".", "max_potential_paths", ":", "raise", "ValueError", "(", "'There are too many potential logic tree paths (%d) '", "'use sampling instead of full enumeration'", "%", "p", ")", "logging", ".", "info", "(", "'Potential number of logic tree paths = {:,d}'", ".", "format", "(", "p", ")", ")", "if", "source_model_lt", ".", "on_each_source", ":", "logging", ".", "info", "(", "'There is a logic tree on each source'", ")", "if", "monitor", "is", "None", ":", "monitor", "=", "performance", ".", "Monitor", "(", ")", "smodels", "=", "[", "]", "for", "source_model", "in", "get_source_models", "(", "oqparam", ",", "gsim_lt", ",", "source_model_lt", ",", "monitor", ",", "in_memory", ",", "srcfilter", ")", ":", "for", "src_group", "in", "source_model", ".", "src_groups", ":", "src_group", ".", "sources", "=", "sorted", "(", "src_group", ",", "key", "=", "getid", ")", "for", "src", "in", "src_group", ":", "# there are two cases depending on the flag in_memory:", "# 1) src is a hazardlib source and has a src_group_id", "# attribute; in that case the source has to be numbered", "# 2) src is a Node object, then nothing must be done", "if", "isinstance", "(", "src", ",", "Node", ")", ":", "continue", "smodels", ".", "append", "(", "source_model", ")", "csm", "=", "source", ".", "CompositeSourceModel", "(", "gsim_lt", ",", "source_model_lt", ",", "smodels", ",", "oqparam", ".", "optimize_same_id_sources", ")", "for", "sm", "in", "csm", ".", "source_models", ":", "counter", "=", "collections", ".", "Counter", "(", ")", "for", "sg", "in", "sm", ".", "src_groups", ":", "for", "srcid", "in", "map", "(", "getid", ",", "sg", ")", ":", "counter", "[", "srcid", "]", "+=", "1", "dupl", "=", "[", "srcid", "for", "srcid", "in", "counter", "if", "counter", "[", "srcid", "]", ">", "1", "]", "if", "dupl", ":", "raise", "nrml", ".", "DuplicatedID", "(", "'Found duplicated source IDs in %s: %s'", "%", "(", "sm", ",", "dupl", ")", ")", "if", "not", "in_memory", ":", "return", "csm", "if", "oqparam", ".", "is_event_based", "(", ")", ":", "# initialize the rupture serial numbers before splitting/filtering; in", "# this way the serials are independent from the site collection", "csm", ".", "init_serials", "(", "oqparam", ".", "ses_seed", ")", "if", "oqparam", ".", "disagg_by_src", ":", "csm", "=", "csm", ".", "grp_by_src", "(", ")", "# one group per source", "csm", ".", "info", ".", "gsim_lt", ".", "check_imts", "(", "oqparam", ".", "imtls", ")", "parallel", ".", "Starmap", ".", "shutdown", "(", ")", "# save memory", "return", "csm" ]
Parse the XML and build a complete composite source model in memory. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param monitor: a `openquake.baselib.performance.Monitor` instance :param in_memory: if False, just parse the XML without instantiating the sources :param srcfilter: if not None, use it to prefilter the sources
[ "Parse", "the", "XML", "and", "build", "a", "complete", "composite", "source", "model", "in", "memory", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L816-L892
gem/oq-engine
openquake/commonlib/readinput.py
get_risk_model
def get_risk_model(oqparam): """ Return a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ tmap = _get_taxonomy_mapping(oqparam.inputs) fragdict = get_risk_models(oqparam, 'fragility') vulndict = get_risk_models(oqparam, 'vulnerability') consdict = get_risk_models(oqparam, 'consequence') if not tmap: # the risk ids are the taxonomies already d = dict(ids=['?'], weights=[1.0]) for risk_id in set(fragdict) | set(vulndict) | set(consdict): tmap[risk_id] = dict( fragility=d, consequence=d, vulnerability=d) for risk_id in consdict: cdict, fdict = consdict[risk_id], fragdict[risk_id] for loss_type, _ in cdict: c = cdict[loss_type, 'consequence'] f = fdict[loss_type, 'fragility'] csq_dmg_states = len(c.params) if csq_dmg_states != len(f): raise ValueError( 'The damage states in %s are different from the ' 'damage states in the fragility functions, %s' % (c, fragdict.limit_states)) dic = {} dic.update(fragdict) dic.update(vulndict) oqparam.set_risk_imtls(dic) if oqparam.calculation_mode.endswith('_bcr'): retro = get_risk_models(oqparam, 'vulnerability_retrofitted') else: retro = {} return riskinput.CompositeRiskModel( oqparam, tmap, fragdict, vulndict, consdict, retro)
python
def get_risk_model(oqparam): tmap = _get_taxonomy_mapping(oqparam.inputs) fragdict = get_risk_models(oqparam, 'fragility') vulndict = get_risk_models(oqparam, 'vulnerability') consdict = get_risk_models(oqparam, 'consequence') if not tmap: d = dict(ids=['?'], weights=[1.0]) for risk_id in set(fragdict) | set(vulndict) | set(consdict): tmap[risk_id] = dict( fragility=d, consequence=d, vulnerability=d) for risk_id in consdict: cdict, fdict = consdict[risk_id], fragdict[risk_id] for loss_type, _ in cdict: c = cdict[loss_type, 'consequence'] f = fdict[loss_type, 'fragility'] csq_dmg_states = len(c.params) if csq_dmg_states != len(f): raise ValueError( 'The damage states in %s are different from the ' 'damage states in the fragility functions, %s' % (c, fragdict.limit_states)) dic = {} dic.update(fragdict) dic.update(vulndict) oqparam.set_risk_imtls(dic) if oqparam.calculation_mode.endswith('_bcr'): retro = get_risk_models(oqparam, 'vulnerability_retrofitted') else: retro = {} return riskinput.CompositeRiskModel( oqparam, tmap, fragdict, vulndict, consdict, retro)
[ "def", "get_risk_model", "(", "oqparam", ")", ":", "tmap", "=", "_get_taxonomy_mapping", "(", "oqparam", ".", "inputs", ")", "fragdict", "=", "get_risk_models", "(", "oqparam", ",", "'fragility'", ")", "vulndict", "=", "get_risk_models", "(", "oqparam", ",", "'vulnerability'", ")", "consdict", "=", "get_risk_models", "(", "oqparam", ",", "'consequence'", ")", "if", "not", "tmap", ":", "# the risk ids are the taxonomies already", "d", "=", "dict", "(", "ids", "=", "[", "'?'", "]", ",", "weights", "=", "[", "1.0", "]", ")", "for", "risk_id", "in", "set", "(", "fragdict", ")", "|", "set", "(", "vulndict", ")", "|", "set", "(", "consdict", ")", ":", "tmap", "[", "risk_id", "]", "=", "dict", "(", "fragility", "=", "d", ",", "consequence", "=", "d", ",", "vulnerability", "=", "d", ")", "for", "risk_id", "in", "consdict", ":", "cdict", ",", "fdict", "=", "consdict", "[", "risk_id", "]", ",", "fragdict", "[", "risk_id", "]", "for", "loss_type", ",", "_", "in", "cdict", ":", "c", "=", "cdict", "[", "loss_type", ",", "'consequence'", "]", "f", "=", "fdict", "[", "loss_type", ",", "'fragility'", "]", "csq_dmg_states", "=", "len", "(", "c", ".", "params", ")", "if", "csq_dmg_states", "!=", "len", "(", "f", ")", ":", "raise", "ValueError", "(", "'The damage states in %s are different from the '", "'damage states in the fragility functions, %s'", "%", "(", "c", ",", "fragdict", ".", "limit_states", ")", ")", "dic", "=", "{", "}", "dic", ".", "update", "(", "fragdict", ")", "dic", ".", "update", "(", "vulndict", ")", "oqparam", ".", "set_risk_imtls", "(", "dic", ")", "if", "oqparam", ".", "calculation_mode", ".", "endswith", "(", "'_bcr'", ")", ":", "retro", "=", "get_risk_models", "(", "oqparam", ",", "'vulnerability_retrofitted'", ")", "else", ":", "retro", "=", "{", "}", "return", "riskinput", ".", "CompositeRiskModel", "(", "oqparam", ",", "tmap", ",", "fragdict", ",", "vulndict", ",", "consdict", ",", "retro", ")" ]
Return a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance
[ "Return", "a", ":", "class", ":", "openquake", ".", "risklib", ".", "riskinput", ".", "CompositeRiskModel", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L910-L946
gem/oq-engine
openquake/commonlib/readinput.py
get_exposure
def get_exposure(oqparam): """ Read the full exposure in memory and build a list of :class:`openquake.risklib.asset.Asset` instances. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: an :class:`Exposure` instance or a compatible AssetCollection """ exposure = asset.Exposure.read( oqparam.inputs['exposure'], oqparam.calculation_mode, oqparam.region, oqparam.ignore_missing_costs, by_country='country' in oqparam.aggregate_by) exposure.mesh, exposure.assets_by_site = exposure.get_mesh_assets_by_site() return exposure
python
def get_exposure(oqparam): exposure = asset.Exposure.read( oqparam.inputs['exposure'], oqparam.calculation_mode, oqparam.region, oqparam.ignore_missing_costs, by_country='country' in oqparam.aggregate_by) exposure.mesh, exposure.assets_by_site = exposure.get_mesh_assets_by_site() return exposure
[ "def", "get_exposure", "(", "oqparam", ")", ":", "exposure", "=", "asset", ".", "Exposure", ".", "read", "(", "oqparam", ".", "inputs", "[", "'exposure'", "]", ",", "oqparam", ".", "calculation_mode", ",", "oqparam", ".", "region", ",", "oqparam", ".", "ignore_missing_costs", ",", "by_country", "=", "'country'", "in", "oqparam", ".", "aggregate_by", ")", "exposure", ".", "mesh", ",", "exposure", ".", "assets_by_site", "=", "exposure", ".", "get_mesh_assets_by_site", "(", ")", "return", "exposure" ]
Read the full exposure in memory and build a list of :class:`openquake.risklib.asset.Asset` instances. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: an :class:`Exposure` instance or a compatible AssetCollection
[ "Read", "the", "full", "exposure", "in", "memory", "and", "build", "a", "list", "of", ":", "class", ":", "openquake", ".", "risklib", ".", "asset", ".", "Asset", "instances", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L949-L964
gem/oq-engine
openquake/commonlib/readinput.py
get_sitecol_assetcol
def get_sitecol_assetcol(oqparam, haz_sitecol=None, cost_types=()): """ :param oqparam: calculation parameters :param haz_sitecol: the hazard site collection :param cost_types: the expected cost types :returns: (site collection, asset collection, discarded) """ global exposure asset_hazard_distance = oqparam.asset_hazard_distance['default'] if exposure is None: # haz_sitecol not extracted from the exposure exposure = get_exposure(oqparam) if haz_sitecol is None: haz_sitecol = get_site_collection(oqparam) if oqparam.region_grid_spacing: haz_distance = oqparam.region_grid_spacing * 1.414 if haz_distance != asset_hazard_distance: logging.info('Using asset_hazard_distance=%d km instead of %d km', haz_distance, asset_hazard_distance) else: haz_distance = asset_hazard_distance if haz_sitecol.mesh != exposure.mesh: # associate the assets to the hazard sites sitecol, assets_by, discarded = geo.utils.assoc( exposure.assets_by_site, haz_sitecol, haz_distance, 'filter', exposure.asset_refs) assets_by_site = [[] for _ in sitecol.complete.sids] num_assets = 0 for sid, assets in zip(sitecol.sids, assets_by): assets_by_site[sid] = assets num_assets += len(assets) logging.info( 'Associated %d assets to %d sites', num_assets, len(sitecol)) else: # asset sites and hazard sites are the same sitecol = haz_sitecol assets_by_site = exposure.assets_by_site discarded = [] logging.info('Read %d sites and %d assets from the exposure', len(sitecol), sum(len(a) for a in assets_by_site)) assetcol = asset.AssetCollection( exposure, assets_by_site, oqparam.time_event) if assetcol.occupancy_periods: missing = set(cost_types) - set(exposure.cost_types['name']) - set( ['occupants']) else: missing = set(cost_types) - set(exposure.cost_types['name']) if missing and not oqparam.calculation_mode.endswith('damage'): raise InvalidFile('The exposure %s is missing %s' % (oqparam.inputs['exposure'], missing)) if (not oqparam.hazard_calculation_id and 'gmfs' not in oqparam.inputs and 'hazard_curves' not in oqparam.inputs and sitecol is not sitecol.complete): assetcol = assetcol.reduce_also(sitecol) return sitecol, assetcol, discarded
python
def get_sitecol_assetcol(oqparam, haz_sitecol=None, cost_types=()): global exposure asset_hazard_distance = oqparam.asset_hazard_distance['default'] if exposure is None: exposure = get_exposure(oqparam) if haz_sitecol is None: haz_sitecol = get_site_collection(oqparam) if oqparam.region_grid_spacing: haz_distance = oqparam.region_grid_spacing * 1.414 if haz_distance != asset_hazard_distance: logging.info('Using asset_hazard_distance=%d km instead of %d km', haz_distance, asset_hazard_distance) else: haz_distance = asset_hazard_distance if haz_sitecol.mesh != exposure.mesh: sitecol, assets_by, discarded = geo.utils.assoc( exposure.assets_by_site, haz_sitecol, haz_distance, 'filter', exposure.asset_refs) assets_by_site = [[] for _ in sitecol.complete.sids] num_assets = 0 for sid, assets in zip(sitecol.sids, assets_by): assets_by_site[sid] = assets num_assets += len(assets) logging.info( 'Associated %d assets to %d sites', num_assets, len(sitecol)) else: sitecol = haz_sitecol assets_by_site = exposure.assets_by_site discarded = [] logging.info('Read %d sites and %d assets from the exposure', len(sitecol), sum(len(a) for a in assets_by_site)) assetcol = asset.AssetCollection( exposure, assets_by_site, oqparam.time_event) if assetcol.occupancy_periods: missing = set(cost_types) - set(exposure.cost_types['name']) - set( ['occupants']) else: missing = set(cost_types) - set(exposure.cost_types['name']) if missing and not oqparam.calculation_mode.endswith('damage'): raise InvalidFile('The exposure %s is missing %s' % (oqparam.inputs['exposure'], missing)) if (not oqparam.hazard_calculation_id and 'gmfs' not in oqparam.inputs and 'hazard_curves' not in oqparam.inputs and sitecol is not sitecol.complete): assetcol = assetcol.reduce_also(sitecol) return sitecol, assetcol, discarded
[ "def", "get_sitecol_assetcol", "(", "oqparam", ",", "haz_sitecol", "=", "None", ",", "cost_types", "=", "(", ")", ")", ":", "global", "exposure", "asset_hazard_distance", "=", "oqparam", ".", "asset_hazard_distance", "[", "'default'", "]", "if", "exposure", "is", "None", ":", "# haz_sitecol not extracted from the exposure", "exposure", "=", "get_exposure", "(", "oqparam", ")", "if", "haz_sitecol", "is", "None", ":", "haz_sitecol", "=", "get_site_collection", "(", "oqparam", ")", "if", "oqparam", ".", "region_grid_spacing", ":", "haz_distance", "=", "oqparam", ".", "region_grid_spacing", "*", "1.414", "if", "haz_distance", "!=", "asset_hazard_distance", ":", "logging", ".", "info", "(", "'Using asset_hazard_distance=%d km instead of %d km'", ",", "haz_distance", ",", "asset_hazard_distance", ")", "else", ":", "haz_distance", "=", "asset_hazard_distance", "if", "haz_sitecol", ".", "mesh", "!=", "exposure", ".", "mesh", ":", "# associate the assets to the hazard sites", "sitecol", ",", "assets_by", ",", "discarded", "=", "geo", ".", "utils", ".", "assoc", "(", "exposure", ".", "assets_by_site", ",", "haz_sitecol", ",", "haz_distance", ",", "'filter'", ",", "exposure", ".", "asset_refs", ")", "assets_by_site", "=", "[", "[", "]", "for", "_", "in", "sitecol", ".", "complete", ".", "sids", "]", "num_assets", "=", "0", "for", "sid", ",", "assets", "in", "zip", "(", "sitecol", ".", "sids", ",", "assets_by", ")", ":", "assets_by_site", "[", "sid", "]", "=", "assets", "num_assets", "+=", "len", "(", "assets", ")", "logging", ".", "info", "(", "'Associated %d assets to %d sites'", ",", "num_assets", ",", "len", "(", "sitecol", ")", ")", "else", ":", "# asset sites and hazard sites are the same", "sitecol", "=", "haz_sitecol", "assets_by_site", "=", "exposure", ".", "assets_by_site", "discarded", "=", "[", "]", "logging", ".", "info", "(", "'Read %d sites and %d assets from the exposure'", ",", "len", "(", "sitecol", ")", ",", "sum", "(", "len", "(", "a", ")", "for", "a", "in", "assets_by_site", ")", ")", "assetcol", "=", "asset", ".", "AssetCollection", "(", "exposure", ",", "assets_by_site", ",", "oqparam", ".", "time_event", ")", "if", "assetcol", ".", "occupancy_periods", ":", "missing", "=", "set", "(", "cost_types", ")", "-", "set", "(", "exposure", ".", "cost_types", "[", "'name'", "]", ")", "-", "set", "(", "[", "'occupants'", "]", ")", "else", ":", "missing", "=", "set", "(", "cost_types", ")", "-", "set", "(", "exposure", ".", "cost_types", "[", "'name'", "]", ")", "if", "missing", "and", "not", "oqparam", ".", "calculation_mode", ".", "endswith", "(", "'damage'", ")", ":", "raise", "InvalidFile", "(", "'The exposure %s is missing %s'", "%", "(", "oqparam", ".", "inputs", "[", "'exposure'", "]", ",", "missing", ")", ")", "if", "(", "not", "oqparam", ".", "hazard_calculation_id", "and", "'gmfs'", "not", "in", "oqparam", ".", "inputs", "and", "'hazard_curves'", "not", "in", "oqparam", ".", "inputs", "and", "sitecol", "is", "not", "sitecol", ".", "complete", ")", ":", "assetcol", "=", "assetcol", ".", "reduce_also", "(", "sitecol", ")", "return", "sitecol", ",", "assetcol", ",", "discarded" ]
:param oqparam: calculation parameters :param haz_sitecol: the hazard site collection :param cost_types: the expected cost types :returns: (site collection, asset collection, discarded)
[ ":", "param", "oqparam", ":", "calculation", "parameters", ":", "param", "haz_sitecol", ":", "the", "hazard", "site", "collection", ":", "param", "cost_types", ":", "the", "expected", "cost", "types", ":", "returns", ":", "(", "site", "collection", "asset", "collection", "discarded", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L967-L1022
gem/oq-engine
openquake/commonlib/readinput.py
get_pmap_from_csv
def get_pmap_from_csv(oqparam, fnames): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fnames: a space-separated list of .csv relative filenames :returns: the site mesh and the hazard curves read by the .csv files """ if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) if not oqparam.imtls: raise ValueError('Missing intensity_measure_types_and_levels in %s' % oqparam.inputs['job_ini']) dic = {wrapper.imt: wrapper.array for wrapper in map(writers.read_composite_array, fnames)} array = dic[next(iter(dic))] mesh = geo.Mesh(array['lon'], array['lat']) num_levels = sum(len(imls) for imls in oqparam.imtls.values()) data = numpy.zeros((len(mesh), num_levels)) level = 0 for im in oqparam.imtls: arr = dic[im] for poe in arr.dtype.names[3:]: data[:, level] = arr[poe] level += 1 for field in ('lon', 'lat', 'depth'): # sanity check numpy.testing.assert_equal(arr[field], array[field]) return mesh, ProbabilityMap.from_array(data, range(len(mesh)))
python
def get_pmap_from_csv(oqparam, fnames): if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) if not oqparam.imtls: raise ValueError('Missing intensity_measure_types_and_levels in %s' % oqparam.inputs['job_ini']) dic = {wrapper.imt: wrapper.array for wrapper in map(writers.read_composite_array, fnames)} array = dic[next(iter(dic))] mesh = geo.Mesh(array['lon'], array['lat']) num_levels = sum(len(imls) for imls in oqparam.imtls.values()) data = numpy.zeros((len(mesh), num_levels)) level = 0 for im in oqparam.imtls: arr = dic[im] for poe in arr.dtype.names[3:]: data[:, level] = arr[poe] level += 1 for field in ('lon', 'lat', 'depth'): numpy.testing.assert_equal(arr[field], array[field]) return mesh, ProbabilityMap.from_array(data, range(len(mesh)))
[ "def", "get_pmap_from_csv", "(", "oqparam", ",", "fnames", ")", ":", "if", "not", "oqparam", ".", "imtls", ":", "oqparam", ".", "set_risk_imtls", "(", "get_risk_models", "(", "oqparam", ")", ")", "if", "not", "oqparam", ".", "imtls", ":", "raise", "ValueError", "(", "'Missing intensity_measure_types_and_levels in %s'", "%", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ")", "dic", "=", "{", "wrapper", ".", "imt", ":", "wrapper", ".", "array", "for", "wrapper", "in", "map", "(", "writers", ".", "read_composite_array", ",", "fnames", ")", "}", "array", "=", "dic", "[", "next", "(", "iter", "(", "dic", ")", ")", "]", "mesh", "=", "geo", ".", "Mesh", "(", "array", "[", "'lon'", "]", ",", "array", "[", "'lat'", "]", ")", "num_levels", "=", "sum", "(", "len", "(", "imls", ")", "for", "imls", "in", "oqparam", ".", "imtls", ".", "values", "(", ")", ")", "data", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "mesh", ")", ",", "num_levels", ")", ")", "level", "=", "0", "for", "im", "in", "oqparam", ".", "imtls", ":", "arr", "=", "dic", "[", "im", "]", "for", "poe", "in", "arr", ".", "dtype", ".", "names", "[", "3", ":", "]", ":", "data", "[", ":", ",", "level", "]", "=", "arr", "[", "poe", "]", "level", "+=", "1", "for", "field", "in", "(", "'lon'", ",", "'lat'", ",", "'depth'", ")", ":", "# sanity check", "numpy", ".", "testing", ".", "assert_equal", "(", "arr", "[", "field", "]", ",", "array", "[", "field", "]", ")", "return", "mesh", ",", "ProbabilityMap", ".", "from_array", "(", "data", ",", "range", "(", "len", "(", "mesh", ")", ")", ")" ]
:param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fnames: a space-separated list of .csv relative filenames :returns: the site mesh and the hazard curves read by the .csv files
[ ":", "param", "oqparam", ":", "an", ":", "class", ":", "openquake", ".", "commonlib", ".", "oqvalidation", ".", "OqParam", "instance", ":", "param", "fnames", ":", "a", "space", "-", "separated", "list", "of", ".", "csv", "relative", "filenames", ":", "returns", ":", "the", "site", "mesh", "and", "the", "hazard", "curves", "read", "by", "the", ".", "csv", "files" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1080-L1109
gem/oq-engine
openquake/commonlib/readinput.py
get_pmap_from_nrml
def get_pmap_from_nrml(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: an XML file containing hazard curves :returns: site mesh, curve array """ hcurves_by_imt = {} oqparam.hazard_imtls = imtls = {} for hcurves in nrml.read(fname): imt = hcurves['IMT'] oqparam.investigation_time = hcurves['investigationTime'] if imt == 'SA': imt += '(%s)' % hcurves['saPeriod'] imtls[imt] = ~hcurves.IMLs data = sorted((~node.Point.pos, ~node.poEs) for node in hcurves[1:]) hcurves_by_imt[imt] = numpy.array([d[1] for d in data]) lons, lats = [], [] for xy, poes in data: lons.append(xy[0]) lats.append(xy[1]) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) num_levels = sum(len(v) for v in imtls.values()) array = numpy.zeros((len(mesh), num_levels)) imtls = DictArray(imtls) for imt_ in hcurves_by_imt: array[:, imtls(imt_)] = hcurves_by_imt[imt_] return mesh, ProbabilityMap.from_array(array, range(len(mesh)))
python
def get_pmap_from_nrml(oqparam, fname): hcurves_by_imt = {} oqparam.hazard_imtls = imtls = {} for hcurves in nrml.read(fname): imt = hcurves['IMT'] oqparam.investigation_time = hcurves['investigationTime'] if imt == 'SA': imt += '(%s)' % hcurves['saPeriod'] imtls[imt] = ~hcurves.IMLs data = sorted((~node.Point.pos, ~node.poEs) for node in hcurves[1:]) hcurves_by_imt[imt] = numpy.array([d[1] for d in data]) lons, lats = [], [] for xy, poes in data: lons.append(xy[0]) lats.append(xy[1]) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) num_levels = sum(len(v) for v in imtls.values()) array = numpy.zeros((len(mesh), num_levels)) imtls = DictArray(imtls) for imt_ in hcurves_by_imt: array[:, imtls(imt_)] = hcurves_by_imt[imt_] return mesh, ProbabilityMap.from_array(array, range(len(mesh)))
[ "def", "get_pmap_from_nrml", "(", "oqparam", ",", "fname", ")", ":", "hcurves_by_imt", "=", "{", "}", "oqparam", ".", "hazard_imtls", "=", "imtls", "=", "{", "}", "for", "hcurves", "in", "nrml", ".", "read", "(", "fname", ")", ":", "imt", "=", "hcurves", "[", "'IMT'", "]", "oqparam", ".", "investigation_time", "=", "hcurves", "[", "'investigationTime'", "]", "if", "imt", "==", "'SA'", ":", "imt", "+=", "'(%s)'", "%", "hcurves", "[", "'saPeriod'", "]", "imtls", "[", "imt", "]", "=", "~", "hcurves", ".", "IMLs", "data", "=", "sorted", "(", "(", "~", "node", ".", "Point", ".", "pos", ",", "~", "node", ".", "poEs", ")", "for", "node", "in", "hcurves", "[", "1", ":", "]", ")", "hcurves_by_imt", "[", "imt", "]", "=", "numpy", ".", "array", "(", "[", "d", "[", "1", "]", "for", "d", "in", "data", "]", ")", "lons", ",", "lats", "=", "[", "]", ",", "[", "]", "for", "xy", ",", "poes", "in", "data", ":", "lons", ".", "append", "(", "xy", "[", "0", "]", ")", "lats", ".", "append", "(", "xy", "[", "1", "]", ")", "mesh", "=", "geo", ".", "Mesh", "(", "numpy", ".", "array", "(", "lons", ")", ",", "numpy", ".", "array", "(", "lats", ")", ")", "num_levels", "=", "sum", "(", "len", "(", "v", ")", "for", "v", "in", "imtls", ".", "values", "(", ")", ")", "array", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "mesh", ")", ",", "num_levels", ")", ")", "imtls", "=", "DictArray", "(", "imtls", ")", "for", "imt_", "in", "hcurves_by_imt", ":", "array", "[", ":", ",", "imtls", "(", "imt_", ")", "]", "=", "hcurves_by_imt", "[", "imt_", "]", "return", "mesh", ",", "ProbabilityMap", ".", "from_array", "(", "array", ",", "range", "(", "len", "(", "mesh", ")", ")", ")" ]
:param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: an XML file containing hazard curves :returns: site mesh, curve array
[ ":", "param", "oqparam", ":", "an", ":", "class", ":", "openquake", ".", "commonlib", ".", "oqvalidation", ".", "OqParam", "instance", ":", "param", "fname", ":", "an", "XML", "file", "containing", "hazard", "curves", ":", "returns", ":", "site", "mesh", "curve", "array" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1113-L1142
gem/oq-engine
openquake/commonlib/readinput.py
get_scenario_from_nrml
def get_scenario_from_nrml(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: the NRML files containing the GMFs :returns: a pair (eids, gmf array) """ if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) imts = sorted(oqparam.imtls) num_imts = len(imts) imt_dt = numpy.dtype([(imt, F32) for imt in imts]) gmfset = nrml.read(fname).gmfCollection.gmfSet eids, sitecounts = _extract_eids_sitecounts(gmfset) coords = sorted(sitecounts) oqparam.sites = [(lon, lat, 0) for lon, lat in coords] site_idx = {lonlat: i for i, lonlat in enumerate(coords)} oqparam.number_of_ground_motion_fields = num_events = len(eids) num_sites = len(oqparam.sites) gmf_by_imt = numpy.zeros((num_events, num_sites), imt_dt) counts = collections.Counter() for i, gmf in enumerate(gmfset): if len(gmf) != num_sites: # there must be one node per site raise InvalidFile('Expected %d sites, got %d nodes in %s, line %d' % (num_sites, len(gmf), fname, gmf.lineno)) counts[gmf['ruptureId']] += 1 imt = gmf['IMT'] if imt == 'SA': imt = 'SA(%s)' % gmf['saPeriod'] for node in gmf: sid = site_idx[node['lon'], node['lat']] gmf_by_imt[imt][i % num_events, sid] = node['gmv'] for rupid, count in sorted(counts.items()): if count < num_imts: raise InvalidFile("Found a missing ruptureId %d in %s" % (rupid, fname)) elif count > num_imts: raise InvalidFile("Found a duplicated ruptureId '%s' in %s" % (rupid, fname)) expected_gmvs_per_site = num_imts * len(eids) for lonlat, counts in sitecounts.items(): if counts != expected_gmvs_per_site: raise InvalidFile( '%s: expected %d gmvs at location %s, found %d' % (fname, expected_gmvs_per_site, lonlat, counts)) return eids, gmf_by_imt.T
python
def get_scenario_from_nrml(oqparam, fname): if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) imts = sorted(oqparam.imtls) num_imts = len(imts) imt_dt = numpy.dtype([(imt, F32) for imt in imts]) gmfset = nrml.read(fname).gmfCollection.gmfSet eids, sitecounts = _extract_eids_sitecounts(gmfset) coords = sorted(sitecounts) oqparam.sites = [(lon, lat, 0) for lon, lat in coords] site_idx = {lonlat: i for i, lonlat in enumerate(coords)} oqparam.number_of_ground_motion_fields = num_events = len(eids) num_sites = len(oqparam.sites) gmf_by_imt = numpy.zeros((num_events, num_sites), imt_dt) counts = collections.Counter() for i, gmf in enumerate(gmfset): if len(gmf) != num_sites: raise InvalidFile('Expected %d sites, got %d nodes in %s, line %d' % (num_sites, len(gmf), fname, gmf.lineno)) counts[gmf['ruptureId']] += 1 imt = gmf['IMT'] if imt == 'SA': imt = 'SA(%s)' % gmf['saPeriod'] for node in gmf: sid = site_idx[node['lon'], node['lat']] gmf_by_imt[imt][i % num_events, sid] = node['gmv'] for rupid, count in sorted(counts.items()): if count < num_imts: raise InvalidFile("Found a missing ruptureId %d in %s" % (rupid, fname)) elif count > num_imts: raise InvalidFile("Found a duplicated ruptureId '%s' in %s" % (rupid, fname)) expected_gmvs_per_site = num_imts * len(eids) for lonlat, counts in sitecounts.items(): if counts != expected_gmvs_per_site: raise InvalidFile( '%s: expected %d gmvs at location %s, found %d' % (fname, expected_gmvs_per_site, lonlat, counts)) return eids, gmf_by_imt.T
[ "def", "get_scenario_from_nrml", "(", "oqparam", ",", "fname", ")", ":", "if", "not", "oqparam", ".", "imtls", ":", "oqparam", ".", "set_risk_imtls", "(", "get_risk_models", "(", "oqparam", ")", ")", "imts", "=", "sorted", "(", "oqparam", ".", "imtls", ")", "num_imts", "=", "len", "(", "imts", ")", "imt_dt", "=", "numpy", ".", "dtype", "(", "[", "(", "imt", ",", "F32", ")", "for", "imt", "in", "imts", "]", ")", "gmfset", "=", "nrml", ".", "read", "(", "fname", ")", ".", "gmfCollection", ".", "gmfSet", "eids", ",", "sitecounts", "=", "_extract_eids_sitecounts", "(", "gmfset", ")", "coords", "=", "sorted", "(", "sitecounts", ")", "oqparam", ".", "sites", "=", "[", "(", "lon", ",", "lat", ",", "0", ")", "for", "lon", ",", "lat", "in", "coords", "]", "site_idx", "=", "{", "lonlat", ":", "i", "for", "i", ",", "lonlat", "in", "enumerate", "(", "coords", ")", "}", "oqparam", ".", "number_of_ground_motion_fields", "=", "num_events", "=", "len", "(", "eids", ")", "num_sites", "=", "len", "(", "oqparam", ".", "sites", ")", "gmf_by_imt", "=", "numpy", ".", "zeros", "(", "(", "num_events", ",", "num_sites", ")", ",", "imt_dt", ")", "counts", "=", "collections", ".", "Counter", "(", ")", "for", "i", ",", "gmf", "in", "enumerate", "(", "gmfset", ")", ":", "if", "len", "(", "gmf", ")", "!=", "num_sites", ":", "# there must be one node per site", "raise", "InvalidFile", "(", "'Expected %d sites, got %d nodes in %s, line %d'", "%", "(", "num_sites", ",", "len", "(", "gmf", ")", ",", "fname", ",", "gmf", ".", "lineno", ")", ")", "counts", "[", "gmf", "[", "'ruptureId'", "]", "]", "+=", "1", "imt", "=", "gmf", "[", "'IMT'", "]", "if", "imt", "==", "'SA'", ":", "imt", "=", "'SA(%s)'", "%", "gmf", "[", "'saPeriod'", "]", "for", "node", "in", "gmf", ":", "sid", "=", "site_idx", "[", "node", "[", "'lon'", "]", ",", "node", "[", "'lat'", "]", "]", "gmf_by_imt", "[", "imt", "]", "[", "i", "%", "num_events", ",", "sid", "]", "=", "node", "[", "'gmv'", "]", "for", "rupid", ",", "count", "in", "sorted", "(", "counts", ".", "items", "(", ")", ")", ":", "if", "count", "<", "num_imts", ":", "raise", "InvalidFile", "(", "\"Found a missing ruptureId %d in %s\"", "%", "(", "rupid", ",", "fname", ")", ")", "elif", "count", ">", "num_imts", ":", "raise", "InvalidFile", "(", "\"Found a duplicated ruptureId '%s' in %s\"", "%", "(", "rupid", ",", "fname", ")", ")", "expected_gmvs_per_site", "=", "num_imts", "*", "len", "(", "eids", ")", "for", "lonlat", ",", "counts", "in", "sitecounts", ".", "items", "(", ")", ":", "if", "counts", "!=", "expected_gmvs_per_site", ":", "raise", "InvalidFile", "(", "'%s: expected %d gmvs at location %s, found %d'", "%", "(", "fname", ",", "expected_gmvs_per_site", ",", "lonlat", ",", "counts", ")", ")", "return", "eids", ",", "gmf_by_imt", ".", "T" ]
:param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: the NRML files containing the GMFs :returns: a pair (eids, gmf array)
[ ":", "param", "oqparam", ":", "an", ":", "class", ":", "openquake", ".", "commonlib", ".", "oqvalidation", ".", "OqParam", "instance", ":", "param", "fname", ":", "the", "NRML", "files", "containing", "the", "GMFs", ":", "returns", ":", "a", "pair", "(", "eids", "gmf", "array", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1161-L1209
gem/oq-engine
openquake/commonlib/readinput.py
get_mesh_hcurves
def get_mesh_hcurves(oqparam): """ Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: the mesh of points and the data as a dictionary imt -> array of curves for each site """ imtls = oqparam.imtls lon_lats = set() data = AccumDict() # imt -> list of arrays ncols = len(imtls) + 1 # lon_lat + curve_per_imt ... csvfile = oqparam.inputs['hazard_curves'] for line, row in enumerate(csv.reader(csvfile), 1): try: if len(row) != ncols: raise ValueError('Expected %d columns, found %d' % ncols, len(row)) x, y = row[0].split() lon_lat = valid.longitude(x), valid.latitude(y) if lon_lat in lon_lats: raise DuplicatedPoint(lon_lat) lon_lats.add(lon_lat) for i, imt_ in enumerate(imtls, 1): values = valid.decreasing_probabilities(row[i]) if len(values) != len(imtls[imt_]): raise ValueError('Found %d values, expected %d' % (len(values), len(imtls([imt_])))) data += {imt_: [numpy.array(values)]} except (ValueError, DuplicatedPoint) as err: raise err.__class__('%s: file %s, line %d' % (err, csvfile, line)) lons, lats = zip(*sorted(lon_lats)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) return mesh, {imt: numpy.array(lst) for imt, lst in data.items()}
python
def get_mesh_hcurves(oqparam): imtls = oqparam.imtls lon_lats = set() data = AccumDict() ncols = len(imtls) + 1 csvfile = oqparam.inputs['hazard_curves'] for line, row in enumerate(csv.reader(csvfile), 1): try: if len(row) != ncols: raise ValueError('Expected %d columns, found %d' % ncols, len(row)) x, y = row[0].split() lon_lat = valid.longitude(x), valid.latitude(y) if lon_lat in lon_lats: raise DuplicatedPoint(lon_lat) lon_lats.add(lon_lat) for i, imt_ in enumerate(imtls, 1): values = valid.decreasing_probabilities(row[i]) if len(values) != len(imtls[imt_]): raise ValueError('Found %d values, expected %d' % (len(values), len(imtls([imt_])))) data += {imt_: [numpy.array(values)]} except (ValueError, DuplicatedPoint) as err: raise err.__class__('%s: file %s, line %d' % (err, csvfile, line)) lons, lats = zip(*sorted(lon_lats)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) return mesh, {imt: numpy.array(lst) for imt, lst in data.items()}
[ "def", "get_mesh_hcurves", "(", "oqparam", ")", ":", "imtls", "=", "oqparam", ".", "imtls", "lon_lats", "=", "set", "(", ")", "data", "=", "AccumDict", "(", ")", "# imt -> list of arrays", "ncols", "=", "len", "(", "imtls", ")", "+", "1", "# lon_lat + curve_per_imt ...", "csvfile", "=", "oqparam", ".", "inputs", "[", "'hazard_curves'", "]", "for", "line", ",", "row", "in", "enumerate", "(", "csv", ".", "reader", "(", "csvfile", ")", ",", "1", ")", ":", "try", ":", "if", "len", "(", "row", ")", "!=", "ncols", ":", "raise", "ValueError", "(", "'Expected %d columns, found %d'", "%", "ncols", ",", "len", "(", "row", ")", ")", "x", ",", "y", "=", "row", "[", "0", "]", ".", "split", "(", ")", "lon_lat", "=", "valid", ".", "longitude", "(", "x", ")", ",", "valid", ".", "latitude", "(", "y", ")", "if", "lon_lat", "in", "lon_lats", ":", "raise", "DuplicatedPoint", "(", "lon_lat", ")", "lon_lats", ".", "add", "(", "lon_lat", ")", "for", "i", ",", "imt_", "in", "enumerate", "(", "imtls", ",", "1", ")", ":", "values", "=", "valid", ".", "decreasing_probabilities", "(", "row", "[", "i", "]", ")", "if", "len", "(", "values", ")", "!=", "len", "(", "imtls", "[", "imt_", "]", ")", ":", "raise", "ValueError", "(", "'Found %d values, expected %d'", "%", "(", "len", "(", "values", ")", ",", "len", "(", "imtls", "(", "[", "imt_", "]", ")", ")", ")", ")", "data", "+=", "{", "imt_", ":", "[", "numpy", ".", "array", "(", "values", ")", "]", "}", "except", "(", "ValueError", ",", "DuplicatedPoint", ")", "as", "err", ":", "raise", "err", ".", "__class__", "(", "'%s: file %s, line %d'", "%", "(", "err", ",", "csvfile", ",", "line", ")", ")", "lons", ",", "lats", "=", "zip", "(", "*", "sorted", "(", "lon_lats", ")", ")", "mesh", "=", "geo", ".", "Mesh", "(", "numpy", ".", "array", "(", "lons", ")", ",", "numpy", ".", "array", "(", "lats", ")", ")", "return", "mesh", ",", "{", "imt", ":", "numpy", ".", "array", "(", "lst", ")", "for", "imt", ",", "lst", "in", "data", ".", "items", "(", ")", "}" ]
Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: the mesh of points and the data as a dictionary imt -> array of curves for each site
[ "Read", "CSV", "data", "in", "the", "format", "lon", "lat", "v1", "-", "vN", "w1", "-", "wN", "...", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1212-L1247
gem/oq-engine
openquake/commonlib/readinput.py
reduce_source_model
def reduce_source_model(smlt_file, source_ids, remove=True): """ Extract sources from the composite source model """ found = 0 to_remove = [] for paths in logictree.collect_info(smlt_file).smpaths.values(): for path in paths: logging.info('Reading %s', path) root = nrml.read(path) model = Node('sourceModel', root[0].attrib) origmodel = root[0] if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4': for src_node in origmodel: if src_node['id'] in source_ids: model.nodes.append(src_node) else: # nrml/0.5 for src_group in origmodel: sg = copy.copy(src_group) sg.nodes = [] weights = src_group.get('srcs_weights') if weights: assert len(weights) == len(src_group.nodes) else: weights = [1] * len(src_group.nodes) src_group['srcs_weights'] = reduced_weigths = [] for src_node, weight in zip(src_group, weights): if src_node['id'] in source_ids: found += 1 sg.nodes.append(src_node) reduced_weigths.append(weight) if sg.nodes: model.nodes.append(sg) shutil.copy(path, path + '.bak') if model: with open(path, 'wb') as f: nrml.write([model], f, xmlns=root['xmlns']) elif remove: # remove the files completely reduced to_remove.append(path) if found: for path in to_remove: os.remove(path)
python
def reduce_source_model(smlt_file, source_ids, remove=True): found = 0 to_remove = [] for paths in logictree.collect_info(smlt_file).smpaths.values(): for path in paths: logging.info('Reading %s', path) root = nrml.read(path) model = Node('sourceModel', root[0].attrib) origmodel = root[0] if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4': for src_node in origmodel: if src_node['id'] in source_ids: model.nodes.append(src_node) else: for src_group in origmodel: sg = copy.copy(src_group) sg.nodes = [] weights = src_group.get('srcs_weights') if weights: assert len(weights) == len(src_group.nodes) else: weights = [1] * len(src_group.nodes) src_group['srcs_weights'] = reduced_weigths = [] for src_node, weight in zip(src_group, weights): if src_node['id'] in source_ids: found += 1 sg.nodes.append(src_node) reduced_weigths.append(weight) if sg.nodes: model.nodes.append(sg) shutil.copy(path, path + '.bak') if model: with open(path, 'wb') as f: nrml.write([model], f, xmlns=root['xmlns']) elif remove: to_remove.append(path) if found: for path in to_remove: os.remove(path)
[ "def", "reduce_source_model", "(", "smlt_file", ",", "source_ids", ",", "remove", "=", "True", ")", ":", "found", "=", "0", "to_remove", "=", "[", "]", "for", "paths", "in", "logictree", ".", "collect_info", "(", "smlt_file", ")", ".", "smpaths", ".", "values", "(", ")", ":", "for", "path", "in", "paths", ":", "logging", ".", "info", "(", "'Reading %s'", ",", "path", ")", "root", "=", "nrml", ".", "read", "(", "path", ")", "model", "=", "Node", "(", "'sourceModel'", ",", "root", "[", "0", "]", ".", "attrib", ")", "origmodel", "=", "root", "[", "0", "]", "if", "root", "[", "'xmlns'", "]", "==", "'http://openquake.org/xmlns/nrml/0.4'", ":", "for", "src_node", "in", "origmodel", ":", "if", "src_node", "[", "'id'", "]", "in", "source_ids", ":", "model", ".", "nodes", ".", "append", "(", "src_node", ")", "else", ":", "# nrml/0.5", "for", "src_group", "in", "origmodel", ":", "sg", "=", "copy", ".", "copy", "(", "src_group", ")", "sg", ".", "nodes", "=", "[", "]", "weights", "=", "src_group", ".", "get", "(", "'srcs_weights'", ")", "if", "weights", ":", "assert", "len", "(", "weights", ")", "==", "len", "(", "src_group", ".", "nodes", ")", "else", ":", "weights", "=", "[", "1", "]", "*", "len", "(", "src_group", ".", "nodes", ")", "src_group", "[", "'srcs_weights'", "]", "=", "reduced_weigths", "=", "[", "]", "for", "src_node", ",", "weight", "in", "zip", "(", "src_group", ",", "weights", ")", ":", "if", "src_node", "[", "'id'", "]", "in", "source_ids", ":", "found", "+=", "1", "sg", ".", "nodes", ".", "append", "(", "src_node", ")", "reduced_weigths", ".", "append", "(", "weight", ")", "if", "sg", ".", "nodes", ":", "model", ".", "nodes", ".", "append", "(", "sg", ")", "shutil", ".", "copy", "(", "path", ",", "path", "+", "'.bak'", ")", "if", "model", ":", "with", "open", "(", "path", ",", "'wb'", ")", "as", "f", ":", "nrml", ".", "write", "(", "[", "model", "]", ",", "f", ",", "xmlns", "=", "root", "[", "'xmlns'", "]", ")", "elif", "remove", ":", "# remove the files completely reduced", "to_remove", ".", "append", "(", "path", ")", "if", "found", ":", "for", "path", "in", "to_remove", ":", "os", ".", "remove", "(", "path", ")" ]
Extract sources from the composite source model
[ "Extract", "sources", "from", "the", "composite", "source", "model" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1251-L1292
gem/oq-engine
openquake/commonlib/readinput.py
get_input_files
def get_input_files(oqparam, hazard=False): """ :param oqparam: an OqParam instance :param hazard: if True, consider only the hazard files :returns: input path names in a specific order """ fnames = [] # files entering in the checksum for key in oqparam.inputs: fname = oqparam.inputs[key] if hazard and key not in ('site_model', 'source_model_logic_tree', 'gsim_logic_tree', 'source'): continue # collect .hdf5 tables for the GSIMs, if any elif key == 'gsim_logic_tree': gsim_lt = get_gsim_lt(oqparam) for gsims in gsim_lt.values.values(): for gsim in gsims: table = getattr(gsim, 'GMPE_TABLE', None) if table: fnames.append(table) fnames.append(fname) elif key == 'source_model': # UCERF f = oqparam.inputs['source_model'] fnames.append(f) fname = nrml.read(f).sourceModel.UCERFSource['filename'] fnames.append(os.path.join(os.path.dirname(f), fname)) elif key == 'exposure': # fname is a list for exp in asset.Exposure.read_headers(fname): fnames.extend(exp.datafiles) fnames.extend(fname) elif isinstance(fname, dict): fnames.extend(fname.values()) elif isinstance(fname, list): for f in fname: if f == oqparam.input_dir: raise InvalidFile('%s there is an empty path in %s' % (oqparam.inputs['job_ini'], key)) fnames.extend(fname) elif key == 'source_model_logic_tree': for smpaths in logictree.collect_info(fname).smpaths.values(): fnames.extend(smpaths) fnames.append(fname) else: fnames.append(fname) return sorted(fnames)
python
def get_input_files(oqparam, hazard=False): fnames = [] for key in oqparam.inputs: fname = oqparam.inputs[key] if hazard and key not in ('site_model', 'source_model_logic_tree', 'gsim_logic_tree', 'source'): continue elif key == 'gsim_logic_tree': gsim_lt = get_gsim_lt(oqparam) for gsims in gsim_lt.values.values(): for gsim in gsims: table = getattr(gsim, 'GMPE_TABLE', None) if table: fnames.append(table) fnames.append(fname) elif key == 'source_model': f = oqparam.inputs['source_model'] fnames.append(f) fname = nrml.read(f).sourceModel.UCERFSource['filename'] fnames.append(os.path.join(os.path.dirname(f), fname)) elif key == 'exposure': for exp in asset.Exposure.read_headers(fname): fnames.extend(exp.datafiles) fnames.extend(fname) elif isinstance(fname, dict): fnames.extend(fname.values()) elif isinstance(fname, list): for f in fname: if f == oqparam.input_dir: raise InvalidFile('%s there is an empty path in %s' % (oqparam.inputs['job_ini'], key)) fnames.extend(fname) elif key == 'source_model_logic_tree': for smpaths in logictree.collect_info(fname).smpaths.values(): fnames.extend(smpaths) fnames.append(fname) else: fnames.append(fname) return sorted(fnames)
[ "def", "get_input_files", "(", "oqparam", ",", "hazard", "=", "False", ")", ":", "fnames", "=", "[", "]", "# files entering in the checksum", "for", "key", "in", "oqparam", ".", "inputs", ":", "fname", "=", "oqparam", ".", "inputs", "[", "key", "]", "if", "hazard", "and", "key", "not", "in", "(", "'site_model'", ",", "'source_model_logic_tree'", ",", "'gsim_logic_tree'", ",", "'source'", ")", ":", "continue", "# collect .hdf5 tables for the GSIMs, if any", "elif", "key", "==", "'gsim_logic_tree'", ":", "gsim_lt", "=", "get_gsim_lt", "(", "oqparam", ")", "for", "gsims", "in", "gsim_lt", ".", "values", ".", "values", "(", ")", ":", "for", "gsim", "in", "gsims", ":", "table", "=", "getattr", "(", "gsim", ",", "'GMPE_TABLE'", ",", "None", ")", "if", "table", ":", "fnames", ".", "append", "(", "table", ")", "fnames", ".", "append", "(", "fname", ")", "elif", "key", "==", "'source_model'", ":", "# UCERF", "f", "=", "oqparam", ".", "inputs", "[", "'source_model'", "]", "fnames", ".", "append", "(", "f", ")", "fname", "=", "nrml", ".", "read", "(", "f", ")", ".", "sourceModel", ".", "UCERFSource", "[", "'filename'", "]", "fnames", ".", "append", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "f", ")", ",", "fname", ")", ")", "elif", "key", "==", "'exposure'", ":", "# fname is a list", "for", "exp", "in", "asset", ".", "Exposure", ".", "read_headers", "(", "fname", ")", ":", "fnames", ".", "extend", "(", "exp", ".", "datafiles", ")", "fnames", ".", "extend", "(", "fname", ")", "elif", "isinstance", "(", "fname", ",", "dict", ")", ":", "fnames", ".", "extend", "(", "fname", ".", "values", "(", ")", ")", "elif", "isinstance", "(", "fname", ",", "list", ")", ":", "for", "f", "in", "fname", ":", "if", "f", "==", "oqparam", ".", "input_dir", ":", "raise", "InvalidFile", "(", "'%s there is an empty path in %s'", "%", "(", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ",", "key", ")", ")", "fnames", ".", "extend", "(", "fname", ")", "elif", "key", "==", "'source_model_logic_tree'", ":", "for", "smpaths", "in", "logictree", ".", "collect_info", "(", "fname", ")", ".", "smpaths", ".", "values", "(", ")", ":", "fnames", ".", "extend", "(", "smpaths", ")", "fnames", ".", "append", "(", "fname", ")", "else", ":", "fnames", ".", "append", "(", "fname", ")", "return", "sorted", "(", "fnames", ")" ]
:param oqparam: an OqParam instance :param hazard: if True, consider only the hazard files :returns: input path names in a specific order
[ ":", "param", "oqparam", ":", "an", "OqParam", "instance", ":", "param", "hazard", ":", "if", "True", "consider", "only", "the", "hazard", "files", ":", "returns", ":", "input", "path", "names", "in", "a", "specific", "order" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1328-L1372
gem/oq-engine
openquake/commonlib/readinput.py
get_checksum32
def get_checksum32(oqparam, hazard=False): """ Build an unsigned 32 bit integer from the input files of a calculation. :param oqparam: an OqParam instance :param hazard: if True, consider only the hazard files :returns: the checkume """ # NB: using adler32 & 0xffffffff is the documented way to get a checksum # which is the same between Python 2 and Python 3 checksum = 0 for fname in get_input_files(oqparam, hazard): checksum = _checksum(fname, checksum) if hazard: hazard_params = [] for key, val in vars(oqparam).items(): if key in ('rupture_mesh_spacing', 'complex_fault_mesh_spacing', 'width_of_mfd_bin', 'area_source_discretization', 'random_seed', 'ses_seed', 'truncation_level', 'maximum_distance', 'investigation_time', 'number_of_logic_tree_samples', 'imtls', 'ses_per_logic_tree_path', 'minimum_magnitude', 'prefilter_sources', 'sites', 'pointsource_distance', 'filter_distance'): hazard_params.append('%s = %s' % (key, val)) data = '\n'.join(hazard_params).encode('utf8') checksum = zlib.adler32(data, checksum) & 0xffffffff return checksum
python
def get_checksum32(oqparam, hazard=False): checksum = 0 for fname in get_input_files(oqparam, hazard): checksum = _checksum(fname, checksum) if hazard: hazard_params = [] for key, val in vars(oqparam).items(): if key in ('rupture_mesh_spacing', 'complex_fault_mesh_spacing', 'width_of_mfd_bin', 'area_source_discretization', 'random_seed', 'ses_seed', 'truncation_level', 'maximum_distance', 'investigation_time', 'number_of_logic_tree_samples', 'imtls', 'ses_per_logic_tree_path', 'minimum_magnitude', 'prefilter_sources', 'sites', 'pointsource_distance', 'filter_distance'): hazard_params.append('%s = %s' % (key, val)) data = '\n'.join(hazard_params).encode('utf8') checksum = zlib.adler32(data, checksum) & 0xffffffff return checksum
[ "def", "get_checksum32", "(", "oqparam", ",", "hazard", "=", "False", ")", ":", "# NB: using adler32 & 0xffffffff is the documented way to get a checksum", "# which is the same between Python 2 and Python 3", "checksum", "=", "0", "for", "fname", "in", "get_input_files", "(", "oqparam", ",", "hazard", ")", ":", "checksum", "=", "_checksum", "(", "fname", ",", "checksum", ")", "if", "hazard", ":", "hazard_params", "=", "[", "]", "for", "key", ",", "val", "in", "vars", "(", "oqparam", ")", ".", "items", "(", ")", ":", "if", "key", "in", "(", "'rupture_mesh_spacing'", ",", "'complex_fault_mesh_spacing'", ",", "'width_of_mfd_bin'", ",", "'area_source_discretization'", ",", "'random_seed'", ",", "'ses_seed'", ",", "'truncation_level'", ",", "'maximum_distance'", ",", "'investigation_time'", ",", "'number_of_logic_tree_samples'", ",", "'imtls'", ",", "'ses_per_logic_tree_path'", ",", "'minimum_magnitude'", ",", "'prefilter_sources'", ",", "'sites'", ",", "'pointsource_distance'", ",", "'filter_distance'", ")", ":", "hazard_params", ".", "append", "(", "'%s = %s'", "%", "(", "key", ",", "val", ")", ")", "data", "=", "'\\n'", ".", "join", "(", "hazard_params", ")", ".", "encode", "(", "'utf8'", ")", "checksum", "=", "zlib", ".", "adler32", "(", "data", ",", "checksum", ")", "&", "0xffffffff", "return", "checksum" ]
Build an unsigned 32 bit integer from the input files of a calculation. :param oqparam: an OqParam instance :param hazard: if True, consider only the hazard files :returns: the checkume
[ "Build", "an", "unsigned", "32", "bit", "integer", "from", "the", "input", "files", "of", "a", "calculation", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1388-L1415
gem/oq-engine
openquake/commands/dump.py
smart_save
def smart_save(dbpath, archive, calc_id): """ Make a copy of the db, remove the incomplete jobs and add the copy to the archive """ tmpdir = tempfile.mkdtemp() newdb = os.path.join(tmpdir, os.path.basename(dbpath)) shutil.copy(dbpath, newdb) try: with sqlite3.connect(newdb) as conn: conn.execute('DELETE FROM job WHERE status != "complete"') if calc_id: conn.execute('DELETE FROM job WHERE id != %d' % calc_id) except: safeprint('Please check the copy of the db in %s' % newdb) raise zipfiles([newdb], archive, 'a', safeprint) shutil.rmtree(tmpdir)
python
def smart_save(dbpath, archive, calc_id): tmpdir = tempfile.mkdtemp() newdb = os.path.join(tmpdir, os.path.basename(dbpath)) shutil.copy(dbpath, newdb) try: with sqlite3.connect(newdb) as conn: conn.execute('DELETE FROM job WHERE status != "complete"') if calc_id: conn.execute('DELETE FROM job WHERE id != %d' % calc_id) except: safeprint('Please check the copy of the db in %s' % newdb) raise zipfiles([newdb], archive, 'a', safeprint) shutil.rmtree(tmpdir)
[ "def", "smart_save", "(", "dbpath", ",", "archive", ",", "calc_id", ")", ":", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "newdb", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "os", ".", "path", ".", "basename", "(", "dbpath", ")", ")", "shutil", ".", "copy", "(", "dbpath", ",", "newdb", ")", "try", ":", "with", "sqlite3", ".", "connect", "(", "newdb", ")", "as", "conn", ":", "conn", ".", "execute", "(", "'DELETE FROM job WHERE status != \"complete\"'", ")", "if", "calc_id", ":", "conn", ".", "execute", "(", "'DELETE FROM job WHERE id != %d'", "%", "calc_id", ")", "except", ":", "safeprint", "(", "'Please check the copy of the db in %s'", "%", "newdb", ")", "raise", "zipfiles", "(", "[", "newdb", "]", ",", "archive", ",", "'a'", ",", "safeprint", ")", "shutil", ".", "rmtree", "(", "tmpdir", ")" ]
Make a copy of the db, remove the incomplete jobs and add the copy to the archive
[ "Make", "a", "copy", "of", "the", "db", "remove", "the", "incomplete", "jobs", "and", "add", "the", "copy", "to", "the", "archive" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/dump.py#L28-L45
gem/oq-engine
openquake/commands/dump.py
dump
def dump(archive, calc_id=0, user=None): """ Dump the openquake database and all the complete calculations into a zip file. In a multiuser installation must be run as administrator. """ t0 = time.time() assert archive.endswith('.zip'), archive getfnames = 'select ds_calc_dir || ".hdf5" from job where ?A' param = dict(status='complete') if calc_id: param['id'] = calc_id if user: param['user_name'] = user fnames = [f for f, in db(getfnames, param) if os.path.exists(f)] zipfiles(fnames, archive, 'w', safeprint) pending_jobs = db('select id, status, description from job ' 'where status="executing"') if pending_jobs: safeprint('WARNING: there were calculations executing during the dump,' ' they have been not copied') for job_id, status, descr in pending_jobs: safeprint('%d %s %s' % (job_id, status, descr)) # this also checks that the copied db is not corrupted smart_save(db.path, archive, calc_id) dt = time.time() - t0 safeprint('Archived %d calculations into %s in %d seconds' % (len(fnames), archive, dt))
python
def dump(archive, calc_id=0, user=None): t0 = time.time() assert archive.endswith('.zip'), archive getfnames = 'select ds_calc_dir || ".hdf5" from job where ?A' param = dict(status='complete') if calc_id: param['id'] = calc_id if user: param['user_name'] = user fnames = [f for f, in db(getfnames, param) if os.path.exists(f)] zipfiles(fnames, archive, 'w', safeprint) pending_jobs = db('select id, status, description from job ' 'where status="executing"') if pending_jobs: safeprint('WARNING: there were calculations executing during the dump,' ' they have been not copied') for job_id, status, descr in pending_jobs: safeprint('%d %s %s' % (job_id, status, descr)) smart_save(db.path, archive, calc_id) dt = time.time() - t0 safeprint('Archived %d calculations into %s in %d seconds' % (len(fnames), archive, dt))
[ "def", "dump", "(", "archive", ",", "calc_id", "=", "0", ",", "user", "=", "None", ")", ":", "t0", "=", "time", ".", "time", "(", ")", "assert", "archive", ".", "endswith", "(", "'.zip'", ")", ",", "archive", "getfnames", "=", "'select ds_calc_dir || \".hdf5\" from job where ?A'", "param", "=", "dict", "(", "status", "=", "'complete'", ")", "if", "calc_id", ":", "param", "[", "'id'", "]", "=", "calc_id", "if", "user", ":", "param", "[", "'user_name'", "]", "=", "user", "fnames", "=", "[", "f", "for", "f", ",", "in", "db", "(", "getfnames", ",", "param", ")", "if", "os", ".", "path", ".", "exists", "(", "f", ")", "]", "zipfiles", "(", "fnames", ",", "archive", ",", "'w'", ",", "safeprint", ")", "pending_jobs", "=", "db", "(", "'select id, status, description from job '", "'where status=\"executing\"'", ")", "if", "pending_jobs", ":", "safeprint", "(", "'WARNING: there were calculations executing during the dump,'", "' they have been not copied'", ")", "for", "job_id", ",", "status", ",", "descr", "in", "pending_jobs", ":", "safeprint", "(", "'%d %s %s'", "%", "(", "job_id", ",", "status", ",", "descr", ")", ")", "# this also checks that the copied db is not corrupted", "smart_save", "(", "db", ".", "path", ",", "archive", ",", "calc_id", ")", "dt", "=", "time", ".", "time", "(", ")", "-", "t0", "safeprint", "(", "'Archived %d calculations into %s in %d seconds'", "%", "(", "len", "(", "fnames", ")", ",", "archive", ",", "dt", ")", ")" ]
Dump the openquake database and all the complete calculations into a zip file. In a multiuser installation must be run as administrator.
[ "Dump", "the", "openquake", "database", "and", "all", "the", "complete", "calculations", "into", "a", "zip", "file", ".", "In", "a", "multiuser", "installation", "must", "be", "run", "as", "administrator", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/dump.py#L49-L77
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
Type1RecurrenceModel.cumulative_value
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): ''' Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983) ''' delta_m = (mmax - mag_value) a_1 = self._get_a1_value(bbar, dbar, slip / 10., beta, mmax) return a_1 * np.exp(bbar * delta_m) * (delta_m > 0.0)
python
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): delta_m = (mmax - mag_value) a_1 = self._get_a1_value(bbar, dbar, slip / 10., beta, mmax) return a_1 * np.exp(bbar * delta_m) * (delta_m > 0.0)
[ "def", "cumulative_value", "(", "self", ",", "slip", ",", "mmax", ",", "mag_value", ",", "bbar", ",", "dbar", ",", "beta", ")", ":", "delta_m", "=", "(", "mmax", "-", "mag_value", ")", "a_1", "=", "self", ".", "_get_a1_value", "(", "bbar", ",", "dbar", ",", "slip", "/", "10.", ",", "beta", ",", "mmax", ")", "return", "a_1", "*", "np", ".", "exp", "(", "bbar", "*", "delta_m", ")", "*", "(", "delta_m", ">", "0.0", ")" ]
Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983)
[ "Returns", "the", "rate", "of", "events", "with", "M", ">", "mag_value" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L83-L102
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
Type1RecurrenceModel._get_a1_value
def _get_a1_value(bbar, dbar, slip, beta, mmax): """ Returns the A1 value defined in I.9 (Table 2) """ return ((dbar - bbar) / dbar) * (slip / beta) *\ np.exp(-(dbar / 2.) * mmax)
python
def _get_a1_value(bbar, dbar, slip, beta, mmax): return ((dbar - bbar) / dbar) * (slip / beta) *\ np.exp(-(dbar / 2.) * mmax)
[ "def", "_get_a1_value", "(", "bbar", ",", "dbar", ",", "slip", ",", "beta", ",", "mmax", ")", ":", "return", "(", "(", "dbar", "-", "bbar", ")", "/", "dbar", ")", "*", "(", "slip", "/", "beta", ")", "*", "np", ".", "exp", "(", "-", "(", "dbar", "/", "2.", ")", "*", "mmax", ")" ]
Returns the A1 value defined in I.9 (Table 2)
[ "Returns", "the", "A1", "value", "defined", "in", "I", ".", "9", "(", "Table", "2", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L105-L110
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
Type2RecurrenceModel.cumulative_value
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): ''' Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983) ''' delta_m = mmax - mag_value a_2 = self._get_a2_value(bbar, dbar, slip / 10., beta, mmax) return a_2 * (np.exp(bbar * delta_m) - 1.0) * (delta_m > 0.0)
python
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): delta_m = mmax - mag_value a_2 = self._get_a2_value(bbar, dbar, slip / 10., beta, mmax) return a_2 * (np.exp(bbar * delta_m) - 1.0) * (delta_m > 0.0)
[ "def", "cumulative_value", "(", "self", ",", "slip", ",", "mmax", ",", "mag_value", ",", "bbar", ",", "dbar", ",", "beta", ")", ":", "delta_m", "=", "mmax", "-", "mag_value", "a_2", "=", "self", ".", "_get_a2_value", "(", "bbar", ",", "dbar", ",", "slip", "/", "10.", ",", "beta", ",", "mmax", ")", "return", "a_2", "*", "(", "np", ".", "exp", "(", "bbar", "*", "delta_m", ")", "-", "1.0", ")", "*", "(", "delta_m", ">", "0.0", ")" ]
Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983)
[ "Returns", "the", "rate", "of", "events", "with", "M", ">", "mag_value" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L119-L138
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
Type2RecurrenceModel._get_a2_value
def _get_a2_value(bbar, dbar, slip, beta, mmax): """ Returns the A2 value defined in II.8 (Table 3) """ return ((dbar - bbar) / bbar) * (slip / beta) *\ np.exp(-(dbar / 2.) * mmax)
python
def _get_a2_value(bbar, dbar, slip, beta, mmax): return ((dbar - bbar) / bbar) * (slip / beta) *\ np.exp(-(dbar / 2.) * mmax)
[ "def", "_get_a2_value", "(", "bbar", ",", "dbar", ",", "slip", ",", "beta", ",", "mmax", ")", ":", "return", "(", "(", "dbar", "-", "bbar", ")", "/", "bbar", ")", "*", "(", "slip", "/", "beta", ")", "*", "np", ".", "exp", "(", "-", "(", "dbar", "/", "2.", ")", "*", "mmax", ")" ]
Returns the A2 value defined in II.8 (Table 3)
[ "Returns", "the", "A2", "value", "defined", "in", "II", ".", "8", "(", "Table", "3", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L141-L146
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
Type3RecurrenceModel.cumulative_value
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): ''' Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983) ''' delta_m = mmax - mag_value a_3 = self._get_a3_value(bbar, dbar, slip / 10., beta, mmax) central_term = np.exp(bbar * delta_m) - 1.0 - (bbar * delta_m) return a_3 * central_term * (delta_m > 0.0)
python
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): delta_m = mmax - mag_value a_3 = self._get_a3_value(bbar, dbar, slip / 10., beta, mmax) central_term = np.exp(bbar * delta_m) - 1.0 - (bbar * delta_m) return a_3 * central_term * (delta_m > 0.0)
[ "def", "cumulative_value", "(", "self", ",", "slip", ",", "mmax", ",", "mag_value", ",", "bbar", ",", "dbar", ",", "beta", ")", ":", "delta_m", "=", "mmax", "-", "mag_value", "a_3", "=", "self", ".", "_get_a3_value", "(", "bbar", ",", "dbar", ",", "slip", "/", "10.", ",", "beta", ",", "mmax", ")", "central_term", "=", "np", ".", "exp", "(", "bbar", "*", "delta_m", ")", "-", "1.0", "-", "(", "bbar", "*", "delta_m", ")", "return", "a_3", "*", "central_term", "*", "(", "delta_m", ">", "0.0", ")" ]
Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983)
[ "Returns", "the", "rate", "of", "events", "with", "M", ">", "mag_value" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L155-L175
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
Type3RecurrenceModel._get_a3_value
def _get_a3_value(bbar, dbar, slip, beta, mmax): """ Returns the A3 value defined in III.4 (Table 4) """ return (dbar * (dbar - bbar) / (bbar ** 2.)) * (slip / beta) *\ np.exp(-(dbar / 2.) * mmax)
python
def _get_a3_value(bbar, dbar, slip, beta, mmax): return (dbar * (dbar - bbar) / (bbar ** 2.)) * (slip / beta) *\ np.exp(-(dbar / 2.) * mmax)
[ "def", "_get_a3_value", "(", "bbar", ",", "dbar", ",", "slip", ",", "beta", ",", "mmax", ")", ":", "return", "(", "dbar", "*", "(", "dbar", "-", "bbar", ")", "/", "(", "bbar", "**", "2.", ")", ")", "*", "(", "slip", "/", "beta", ")", "*", "np", ".", "exp", "(", "-", "(", "dbar", "/", "2.", ")", "*", "mmax", ")" ]
Returns the A3 value defined in III.4 (Table 4)
[ "Returns", "the", "A3", "value", "defined", "in", "III", ".", "4", "(", "Table", "4", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L178-L183
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py
AndersonLucoAreaMmax.get_mfd
def get_mfd(self, slip, fault_width, shear_modulus=30.0, disp_length_ratio=1.25E-5): ''' Calculates activity rate on the fault :param float slip: Slip rate in mm/yr :param fault_width: Width of the fault (km) :param float shear_modulus: Shear modulus of the fault (GPa) :param float disp_length_ratio: Displacement to length ratio (dimensionless) :returns: * Minimum Magnitude (float) * Bin width (float) * Occurrence Rates (numpy.ndarray) ''' beta = np.sqrt((disp_length_ratio * (10.0 ** C_VALUE)) / ((shear_modulus * 1.0E10) * (fault_width * 1E5))) dbar = D_VALUE * np.log(10.0) bbar = self.b_value * np.log(10.0) mag = np.arange(self.mmin - (self.bin_width / 2.), self.mmax + self.bin_width, self.bin_width) if bbar > dbar: print('b-value larger than 1.5 will produce invalid results in ' 'Anderson & Luco models') self.occurrence_rate = np.nan * np.ones(len(mag) - 1) return self.mmin, self.bin_width, self.occurrence_rate self.occurrence_rate = np.zeros(len(mag) - 1, dtype=float) for ival in range(0, len(mag) - 1): self.occurrence_rate[ival] = \ RECURRENCE_MAP[self.mfd_type].cumulative_value( slip, self.mmax, mag[ival], bbar, dbar, beta) - \ RECURRENCE_MAP[self.mfd_type].cumulative_value( slip, self.mmax, mag[ival + 1], bbar, dbar, beta) if self.occurrence_rate[ival] < 0.: self.occurrence_rate[ival] = 0. return self.mmin, self.bin_width, self.occurrence_rate
python
def get_mfd(self, slip, fault_width, shear_modulus=30.0, disp_length_ratio=1.25E-5): beta = np.sqrt((disp_length_ratio * (10.0 ** C_VALUE)) / ((shear_modulus * 1.0E10) * (fault_width * 1E5))) dbar = D_VALUE * np.log(10.0) bbar = self.b_value * np.log(10.0) mag = np.arange(self.mmin - (self.bin_width / 2.), self.mmax + self.bin_width, self.bin_width) if bbar > dbar: print('b-value larger than 1.5 will produce invalid results in ' 'Anderson & Luco models') self.occurrence_rate = np.nan * np.ones(len(mag) - 1) return self.mmin, self.bin_width, self.occurrence_rate self.occurrence_rate = np.zeros(len(mag) - 1, dtype=float) for ival in range(0, len(mag) - 1): self.occurrence_rate[ival] = \ RECURRENCE_MAP[self.mfd_type].cumulative_value( slip, self.mmax, mag[ival], bbar, dbar, beta) - \ RECURRENCE_MAP[self.mfd_type].cumulative_value( slip, self.mmax, mag[ival + 1], bbar, dbar, beta) if self.occurrence_rate[ival] < 0.: self.occurrence_rate[ival] = 0. return self.mmin, self.bin_width, self.occurrence_rate
[ "def", "get_mfd", "(", "self", ",", "slip", ",", "fault_width", ",", "shear_modulus", "=", "30.0", ",", "disp_length_ratio", "=", "1.25E-5", ")", ":", "beta", "=", "np", ".", "sqrt", "(", "(", "disp_length_ratio", "*", "(", "10.0", "**", "C_VALUE", ")", ")", "/", "(", "(", "shear_modulus", "*", "1.0E10", ")", "*", "(", "fault_width", "*", "1E5", ")", ")", ")", "dbar", "=", "D_VALUE", "*", "np", ".", "log", "(", "10.0", ")", "bbar", "=", "self", ".", "b_value", "*", "np", ".", "log", "(", "10.0", ")", "mag", "=", "np", ".", "arange", "(", "self", ".", "mmin", "-", "(", "self", ".", "bin_width", "/", "2.", ")", ",", "self", ".", "mmax", "+", "self", ".", "bin_width", ",", "self", ".", "bin_width", ")", "if", "bbar", ">", "dbar", ":", "print", "(", "'b-value larger than 1.5 will produce invalid results in '", "'Anderson & Luco models'", ")", "self", ".", "occurrence_rate", "=", "np", ".", "nan", "*", "np", ".", "ones", "(", "len", "(", "mag", ")", "-", "1", ")", "return", "self", ".", "mmin", ",", "self", ".", "bin_width", ",", "self", ".", "occurrence_rate", "self", ".", "occurrence_rate", "=", "np", ".", "zeros", "(", "len", "(", "mag", ")", "-", "1", ",", "dtype", "=", "float", ")", "for", "ival", "in", "range", "(", "0", ",", "len", "(", "mag", ")", "-", "1", ")", ":", "self", ".", "occurrence_rate", "[", "ival", "]", "=", "RECURRENCE_MAP", "[", "self", ".", "mfd_type", "]", ".", "cumulative_value", "(", "slip", ",", "self", ".", "mmax", ",", "mag", "[", "ival", "]", ",", "bbar", ",", "dbar", ",", "beta", ")", "-", "RECURRENCE_MAP", "[", "self", ".", "mfd_type", "]", ".", "cumulative_value", "(", "slip", ",", "self", ".", "mmax", ",", "mag", "[", "ival", "+", "1", "]", ",", "bbar", ",", "dbar", ",", "beta", ")", "if", "self", ".", "occurrence_rate", "[", "ival", "]", "<", "0.", ":", "self", ".", "occurrence_rate", "[", "ival", "]", "=", "0.", "return", "self", ".", "mmin", ",", "self", ".", "bin_width", ",", "self", ".", "occurrence_rate" ]
Calculates activity rate on the fault :param float slip: Slip rate in mm/yr :param fault_width: Width of the fault (km) :param float shear_modulus: Shear modulus of the fault (GPa) :param float disp_length_ratio: Displacement to length ratio (dimensionless) :returns: * Minimum Magnitude (float) * Bin width (float) * Occurrence Rates (numpy.ndarray)
[ "Calculates", "activity", "rate", "on", "the", "fault" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_area_mmax.py#L275-L320
conan-io/conan-package-tools
setup.py
load_version
def load_version(): """Loads a file content""" filename = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "cpt", "__init__.py")) with open(filename, "rt") as version_file: conan_init = version_file.read() version = re.search("__version__ = '([0-9a-z.-]+)'", conan_init).group(1) return version
python
def load_version(): filename = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "cpt", "__init__.py")) with open(filename, "rt") as version_file: conan_init = version_file.read() version = re.search("__version__ = '([0-9a-z.-]+)'", conan_init).group(1) return version
[ "def", "load_version", "(", ")", ":", "filename", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ",", "\"cpt\"", ",", "\"__init__.py\"", ")", ")", "with", "open", "(", "filename", ",", "\"rt\"", ")", "as", "version_file", ":", "conan_init", "=", "version_file", ".", "read", "(", ")", "version", "=", "re", ".", "search", "(", "\"__version__ = '([0-9a-z.-]+)'\"", ",", "conan_init", ")", ".", "group", "(", "1", ")", "return", "version" ]
Loads a file content
[ "Loads", "a", "file", "content" ]
train
https://github.com/conan-io/conan-package-tools/blob/3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324/setup.py#L25-L32
conan-io/conan-package-tools
cpt/packager.py
ConanMultiPackager.builds
def builds(self, confs): """For retro compatibility directly assigning builds""" self._named_builds = {} self._builds = [] for values in confs: if len(values) == 2: self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference)) elif len(values) == 4: self._builds.append(BuildConf(values[0], values[1], values[2], values[3], self.reference)) elif len(values) != 5: raise Exception("Invalid build configuration, has to be a tuple of " "(settings, options, env_vars, build_requires, reference)") else: self._builds.append(BuildConf(*values))
python
def builds(self, confs): self._named_builds = {} self._builds = [] for values in confs: if len(values) == 2: self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference)) elif len(values) == 4: self._builds.append(BuildConf(values[0], values[1], values[2], values[3], self.reference)) elif len(values) != 5: raise Exception("Invalid build configuration, has to be a tuple of " "(settings, options, env_vars, build_requires, reference)") else: self._builds.append(BuildConf(*values))
[ "def", "builds", "(", "self", ",", "confs", ")", ":", "self", ".", "_named_builds", "=", "{", "}", "self", ".", "_builds", "=", "[", "]", "for", "values", "in", "confs", ":", "if", "len", "(", "values", ")", "==", "2", ":", "self", ".", "_builds", ".", "append", "(", "BuildConf", "(", "values", "[", "0", "]", ",", "values", "[", "1", "]", ",", "{", "}", ",", "{", "}", ",", "self", ".", "reference", ")", ")", "elif", "len", "(", "values", ")", "==", "4", ":", "self", ".", "_builds", ".", "append", "(", "BuildConf", "(", "values", "[", "0", "]", ",", "values", "[", "1", "]", ",", "values", "[", "2", "]", ",", "values", "[", "3", "]", ",", "self", ".", "reference", ")", ")", "elif", "len", "(", "values", ")", "!=", "5", ":", "raise", "Exception", "(", "\"Invalid build configuration, has to be a tuple of \"", "\"(settings, options, env_vars, build_requires, reference)\"", ")", "else", ":", "self", ".", "_builds", ".", "append", "(", "BuildConf", "(", "*", "values", ")", ")" ]
For retro compatibility directly assigning builds
[ "For", "retro", "compatibility", "directly", "assigning", "builds" ]
train
https://github.com/conan-io/conan-package-tools/blob/3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324/cpt/packager.py#L357-L371
conan-io/conan-package-tools
cpt/profiles.py
patch_default_base_profile
def patch_default_base_profile(conan_api, profile_abs_path): """If we have a profile including default, but the users default in config is that the default is other, we have to change the include""" text = tools.load(profile_abs_path) if "include(default)" in text: # User didn't specified a custom profile if Version(conan_version) < Version("1.12.0"): cache = conan_api._client_cache else: cache = conan_api._cache default_profile_name = os.path.basename(cache.default_profile_path) if not os.path.exists(cache.default_profile_path): conan_api.create_profile(default_profile_name, detect=True) if default_profile_name != "default": # User have a different default profile name # https://github.com/conan-io/conan-package-tools/issues/121 text = text.replace("include(default)", "include(%s)" % default_profile_name) tools.save(profile_abs_path, text)
python
def patch_default_base_profile(conan_api, profile_abs_path): text = tools.load(profile_abs_path) if "include(default)" in text: if Version(conan_version) < Version("1.12.0"): cache = conan_api._client_cache else: cache = conan_api._cache default_profile_name = os.path.basename(cache.default_profile_path) if not os.path.exists(cache.default_profile_path): conan_api.create_profile(default_profile_name, detect=True) if default_profile_name != "default": text = text.replace("include(default)", "include(%s)" % default_profile_name) tools.save(profile_abs_path, text)
[ "def", "patch_default_base_profile", "(", "conan_api", ",", "profile_abs_path", ")", ":", "text", "=", "tools", ".", "load", "(", "profile_abs_path", ")", "if", "\"include(default)\"", "in", "text", ":", "# User didn't specified a custom profile", "if", "Version", "(", "conan_version", ")", "<", "Version", "(", "\"1.12.0\"", ")", ":", "cache", "=", "conan_api", ".", "_client_cache", "else", ":", "cache", "=", "conan_api", ".", "_cache", "default_profile_name", "=", "os", ".", "path", ".", "basename", "(", "cache", ".", "default_profile_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "cache", ".", "default_profile_path", ")", ":", "conan_api", ".", "create_profile", "(", "default_profile_name", ",", "detect", "=", "True", ")", "if", "default_profile_name", "!=", "\"default\"", ":", "# User have a different default profile name", "# https://github.com/conan-io/conan-package-tools/issues/121", "text", "=", "text", ".", "replace", "(", "\"include(default)\"", ",", "\"include(%s)\"", "%", "default_profile_name", ")", "tools", ".", "save", "(", "profile_abs_path", ",", "text", ")" ]
If we have a profile including default, but the users default in config is that the default is other, we have to change the include
[ "If", "we", "have", "a", "profile", "including", "default", "but", "the", "users", "default", "in", "config", "is", "that", "the", "default", "is", "other", "we", "have", "to", "change", "the", "include" ]
train
https://github.com/conan-io/conan-package-tools/blob/3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324/cpt/profiles.py#L51-L68
edx/auth-backends
auth_backends/pipeline.py
get_user_if_exists
def get_user_if_exists(strategy, details, user=None, *args, **kwargs): """Return a User with the given username iff the User exists.""" if user: return {'is_new': False} try: username = details.get('username') # Return the user if it exists return { 'is_new': False, 'user': User.objects.get(username=username) } except User.DoesNotExist: # Fall to the default return value pass # Nothing to return since we don't have a user return {}
python
def get_user_if_exists(strategy, details, user=None, *args, **kwargs): if user: return {'is_new': False} try: username = details.get('username') return { 'is_new': False, 'user': User.objects.get(username=username) } except User.DoesNotExist: pass return {}
[ "def", "get_user_if_exists", "(", "strategy", ",", "details", ",", "user", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "user", ":", "return", "{", "'is_new'", ":", "False", "}", "try", ":", "username", "=", "details", ".", "get", "(", "'username'", ")", "# Return the user if it exists", "return", "{", "'is_new'", ":", "False", ",", "'user'", ":", "User", ".", "objects", ".", "get", "(", "username", "=", "username", ")", "}", "except", "User", ".", "DoesNotExist", ":", "# Fall to the default return value", "pass", "# Nothing to return since we don't have a user", "return", "{", "}" ]
Return a User with the given username iff the User exists.
[ "Return", "a", "User", "with", "the", "given", "username", "iff", "the", "User", "exists", "." ]
train
https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/pipeline.py#L14-L31
edx/auth-backends
auth_backends/pipeline.py
update_email
def update_email(strategy, details, user=None, *args, **kwargs): """Update the user's email address using data from provider.""" if user: email = details.get('email') if email and user.email != email: user.email = email strategy.storage.user.changed(user)
python
def update_email(strategy, details, user=None, *args, **kwargs): if user: email = details.get('email') if email and user.email != email: user.email = email strategy.storage.user.changed(user)
[ "def", "update_email", "(", "strategy", ",", "details", ",", "user", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "user", ":", "email", "=", "details", ".", "get", "(", "'email'", ")", "if", "email", "and", "user", ".", "email", "!=", "email", ":", "user", ".", "email", "=", "email", "strategy", ".", "storage", ".", "user", ".", "changed", "(", "user", ")" ]
Update the user's email address using data from provider.
[ "Update", "the", "user", "s", "email", "address", "using", "data", "from", "provider", "." ]
train
https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/pipeline.py#L34-L41
edx/auth-backends
auth_backends/backends.py
_merge_two_dicts
def _merge_two_dicts(x, y): """ Given two dicts, merge them into a new dict as a shallow copy. Once Python 3.6+ only is supported, replace method with ``z = {**x, **y}`` """ z = x.copy() z.update(y) return z
python
def _merge_two_dicts(x, y): z = x.copy() z.update(y) return z
[ "def", "_merge_two_dicts", "(", "x", ",", "y", ")", ":", "z", "=", "x", ".", "copy", "(", ")", "z", ".", "update", "(", "y", ")", "return", "z" ]
Given two dicts, merge them into a new dict as a shallow copy. Once Python 3.6+ only is supported, replace method with ``z = {**x, **y}``
[ "Given", "two", "dicts", "merge", "them", "into", "a", "new", "dict", "as", "a", "shallow", "copy", "." ]
train
https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/backends.py#L29-L37
edx/auth-backends
auth_backends/backends.py
EdXBackendMixin._map_user_details
def _map_user_details(self, response): """Maps key/values from the response to key/values in the user model. Does not transfer any key/value that is empty or not present in the response. """ dest = {} for source_key, dest_key in self.CLAIMS_TO_DETAILS_KEY_MAP.items(): value = response.get(source_key) if value is not None: dest[dest_key] = value return dest
python
def _map_user_details(self, response): dest = {} for source_key, dest_key in self.CLAIMS_TO_DETAILS_KEY_MAP.items(): value = response.get(source_key) if value is not None: dest[dest_key] = value return dest
[ "def", "_map_user_details", "(", "self", ",", "response", ")", ":", "dest", "=", "{", "}", "for", "source_key", ",", "dest_key", "in", "self", ".", "CLAIMS_TO_DETAILS_KEY_MAP", ".", "items", "(", ")", ":", "value", "=", "response", ".", "get", "(", "source_key", ")", "if", "value", "is", "not", "None", ":", "dest", "[", "dest_key", "]", "=", "value", "return", "dest" ]
Maps key/values from the response to key/values in the user model. Does not transfer any key/value that is empty or not present in the response.
[ "Maps", "key", "/", "values", "from", "the", "response", "to", "key", "/", "values", "in", "the", "user", "model", "." ]
train
https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/backends.py#L66-L77
edx/auth-backends
auth_backends/backends.py
EdXOpenIdConnect.get_user_claims
def get_user_claims(self, access_token, claims=None, token_type='Bearer'): """Returns a dictionary with the values for each claim requested.""" data = self.get_json( self.USER_INFO_URL, headers={'Authorization': '{token_type} {token}'.format(token_type=token_type, token=access_token)} ) if claims: claims_names = set(claims) data = {k: v for (k, v) in six.iteritems(data) if k in claims_names} return data
python
def get_user_claims(self, access_token, claims=None, token_type='Bearer'): data = self.get_json( self.USER_INFO_URL, headers={'Authorization': '{token_type} {token}'.format(token_type=token_type, token=access_token)} ) if claims: claims_names = set(claims) data = {k: v for (k, v) in six.iteritems(data) if k in claims_names} return data
[ "def", "get_user_claims", "(", "self", ",", "access_token", ",", "claims", "=", "None", ",", "token_type", "=", "'Bearer'", ")", ":", "data", "=", "self", ".", "get_json", "(", "self", ".", "USER_INFO_URL", ",", "headers", "=", "{", "'Authorization'", ":", "'{token_type} {token}'", ".", "format", "(", "token_type", "=", "token_type", ",", "token", "=", "access_token", ")", "}", ")", "if", "claims", ":", "claims_names", "=", "set", "(", "claims", ")", "data", "=", "{", "k", ":", "v", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "data", ")", "if", "k", "in", "claims_names", "}", "return", "data" ]
Returns a dictionary with the values for each claim requested.
[ "Returns", "a", "dictionary", "with", "the", "values", "for", "each", "claim", "requested", "." ]
train
https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/backends.py#L170-L181
napalm-automation/napalm-logs
napalm_logs/server.py
NapalmLogsServerProc._setup_ipc
def _setup_ipc(self): ''' Setup the IPC pub and sub. Subscript to the listener IPC and publish to the device specific IPC. ''' log.debug('Setting up the server IPC puller to receive from the listener') self.ctx = zmq.Context() # subscribe to listener self.sub = self.ctx.socket(zmq.PULL) self.sub.bind(LST_IPC_URL) try: self.sub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) # device publishers log.debug('Creating the router ICP on the server') self.pub = self.ctx.socket(zmq.ROUTER) self.pub.bind(DEV_IPC_URL) try: self.pub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) # Pipe to the publishers self.publisher_pub = self.ctx.socket(zmq.PUB) self.publisher_pub.connect(PUB_PX_IPC_URL) try: self.publisher_pub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
python
def _setup_ipc(self): log.debug('Setting up the server IPC puller to receive from the listener') self.ctx = zmq.Context() self.sub = self.ctx.socket(zmq.PULL) self.sub.bind(LST_IPC_URL) try: self.sub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) log.debug('Creating the router ICP on the server') self.pub = self.ctx.socket(zmq.ROUTER) self.pub.bind(DEV_IPC_URL) try: self.pub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) self.publisher_pub = self.ctx.socket(zmq.PUB) self.publisher_pub.connect(PUB_PX_IPC_URL) try: self.publisher_pub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
[ "def", "_setup_ipc", "(", "self", ")", ":", "log", ".", "debug", "(", "'Setting up the server IPC puller to receive from the listener'", ")", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "# subscribe to listener", "self", ".", "sub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PULL", ")", "self", ".", "sub", ".", "bind", "(", "LST_IPC_URL", ")", "try", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# device publishers", "log", ".", "debug", "(", "'Creating the router ICP on the server'", ")", "self", ".", "pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "ROUTER", ")", "self", ".", "pub", ".", "bind", "(", "DEV_IPC_URL", ")", "try", ":", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# Pipe to the publishers", "self", ".", "publisher_pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PUB", ")", "self", ".", "publisher_pub", ".", "connect", "(", "PUB_PX_IPC_URL", ")", "try", ":", "self", ".", "publisher_pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "publisher_pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")" ]
Setup the IPC pub and sub. Subscript to the listener IPC and publish to the device specific IPC.
[ "Setup", "the", "IPC", "pub", "and", "sub", ".", "Subscript", "to", "the", "listener", "IPC", "and", "publish", "to", "the", "device", "specific", "IPC", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L55-L90
napalm-automation/napalm-logs
napalm_logs/server.py
NapalmLogsServerProc._cleanup_buffer
def _cleanup_buffer(self): ''' Periodically cleanup the buffer. ''' if not self._buffer: return while True: time.sleep(60) log.debug('Cleaning up buffer') items = self._buffer.items() # The ``items`` function should also cleanup the buffer log.debug('Collected items') log.debug(list(items))
python
def _cleanup_buffer(self): if not self._buffer: return while True: time.sleep(60) log.debug('Cleaning up buffer') items = self._buffer.items() log.debug('Collected items') log.debug(list(items))
[ "def", "_cleanup_buffer", "(", "self", ")", ":", "if", "not", "self", ".", "_buffer", ":", "return", "while", "True", ":", "time", ".", "sleep", "(", "60", ")", "log", ".", "debug", "(", "'Cleaning up buffer'", ")", "items", "=", "self", ".", "_buffer", ".", "items", "(", ")", "# The ``items`` function should also cleanup the buffer", "log", ".", "debug", "(", "'Collected items'", ")", "log", ".", "debug", "(", "list", "(", "items", ")", ")" ]
Periodically cleanup the buffer.
[ "Periodically", "cleanup", "the", "buffer", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L92-L104
napalm-automation/napalm-logs
napalm_logs/server.py
NapalmLogsServerProc._compile_prefixes
def _compile_prefixes(self): ''' Create a dict of all OS prefixes and their compiled regexs ''' self.compiled_prefixes = {} for dev_os, os_config in self.config.items(): if not os_config: continue self.compiled_prefixes[dev_os] = [] for prefix in os_config.get('prefixes', []): values = prefix.get('values', {}) line = prefix.get('line', '') if prefix.get('__python_fun__'): self.compiled_prefixes[dev_os].append({ '__python_fun__': prefix['__python_fun__'], '__python_mod__': prefix['__python_mod__'] }) continue # if python profiler defined for this prefix, # no need to go further, but jump to the next prefix # Add 'pri' and 'message' to the line, and values line = '{{pri}}{}{{message}}'.format(line) # PRI https://tools.ietf.org/html/rfc5424#section-6.2.1 values['pri'] = r'\<(\d+)\>' values['message'] = '(.*)' # We will now figure out which position each value is in so we can use it with the match statement position = {} for key in values.keys(): position[line.find('{' + key + '}')] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 # Escape the line, then remove the escape for the curly bracets so they can be used when formatting escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') # Replace a whitespace with \s+ escaped = escaped.replace(r'\ ', r'\s+') self.compiled_prefixes[dev_os].append({ 'prefix': re.compile(escaped.format(**values)), 'prefix_positions': sorted_position, 'raw_prefix': escaped.format(**values), 'values': values })
python
def _compile_prefixes(self): self.compiled_prefixes = {} for dev_os, os_config in self.config.items(): if not os_config: continue self.compiled_prefixes[dev_os] = [] for prefix in os_config.get('prefixes', []): values = prefix.get('values', {}) line = prefix.get('line', '') if prefix.get('__python_fun__'): self.compiled_prefixes[dev_os].append({ '__python_fun__': prefix['__python_fun__'], '__python_mod__': prefix['__python_mod__'] }) continue line = '{{pri}}{}{{message}}'.format(line) values['pri'] = r'\<(\d+)\>' values['message'] = '(.*)' position = {} for key in values.keys(): position[line.find('{' + key + '}')] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') escaped = escaped.replace(r'\ ', r'\s+') self.compiled_prefixes[dev_os].append({ 'prefix': re.compile(escaped.format(**values)), 'prefix_positions': sorted_position, 'raw_prefix': escaped.format(**values), 'values': values })
[ "def", "_compile_prefixes", "(", "self", ")", ":", "self", ".", "compiled_prefixes", "=", "{", "}", "for", "dev_os", ",", "os_config", "in", "self", ".", "config", ".", "items", "(", ")", ":", "if", "not", "os_config", ":", "continue", "self", ".", "compiled_prefixes", "[", "dev_os", "]", "=", "[", "]", "for", "prefix", "in", "os_config", ".", "get", "(", "'prefixes'", ",", "[", "]", ")", ":", "values", "=", "prefix", ".", "get", "(", "'values'", ",", "{", "}", ")", "line", "=", "prefix", ".", "get", "(", "'line'", ",", "''", ")", "if", "prefix", ".", "get", "(", "'__python_fun__'", ")", ":", "self", ".", "compiled_prefixes", "[", "dev_os", "]", ".", "append", "(", "{", "'__python_fun__'", ":", "prefix", "[", "'__python_fun__'", "]", ",", "'__python_mod__'", ":", "prefix", "[", "'__python_mod__'", "]", "}", ")", "continue", "# if python profiler defined for this prefix,", "# no need to go further, but jump to the next prefix", "# Add 'pri' and 'message' to the line, and values", "line", "=", "'{{pri}}{}{{message}}'", ".", "format", "(", "line", ")", "# PRI https://tools.ietf.org/html/rfc5424#section-6.2.1", "values", "[", "'pri'", "]", "=", "r'\\<(\\d+)\\>'", "values", "[", "'message'", "]", "=", "'(.*)'", "# We will now figure out which position each value is in so we can use it with the match statement", "position", "=", "{", "}", "for", "key", "in", "values", ".", "keys", "(", ")", ":", "position", "[", "line", ".", "find", "(", "'{'", "+", "key", "+", "'}'", ")", "]", "=", "key", "sorted_position", "=", "{", "}", "for", "i", ",", "elem", "in", "enumerate", "(", "sorted", "(", "position", ".", "items", "(", ")", ")", ")", ":", "sorted_position", "[", "elem", "[", "1", "]", "]", "=", "i", "+", "1", "# Escape the line, then remove the escape for the curly bracets so they can be used when formatting", "escaped", "=", "re", ".", "escape", "(", "line", ")", ".", "replace", "(", "r'\\{'", ",", "'{'", ")", ".", "replace", "(", "r'\\}'", ",", "'}'", ")", "# Replace a whitespace with \\s+", "escaped", "=", "escaped", ".", "replace", "(", "r'\\ '", ",", "r'\\s+'", ")", "self", ".", "compiled_prefixes", "[", "dev_os", "]", ".", "append", "(", "{", "'prefix'", ":", "re", ".", "compile", "(", "escaped", ".", "format", "(", "*", "*", "values", ")", ")", ",", "'prefix_positions'", ":", "sorted_position", ",", "'raw_prefix'", ":", "escaped", ".", "format", "(", "*", "*", "values", ")", ",", "'values'", ":", "values", "}", ")" ]
Create a dict of all OS prefixes and their compiled regexs
[ "Create", "a", "dict", "of", "all", "OS", "prefixes", "and", "their", "compiled", "regexs" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L106-L146
napalm-automation/napalm-logs
napalm_logs/server.py
NapalmLogsServerProc._identify_prefix
def _identify_prefix(self, msg, data): ''' Check the message again each OS prefix and if matched return the message dict ''' prefix_id = -1 for prefix in data: msg_dict = {} prefix_id += 1 match = None if '__python_fun__' in prefix: log.debug('Trying to match using the %s custom python profiler', prefix['__python_mod__']) try: match = prefix['__python_fun__'](msg) except Exception: log.error('Exception while parsing %s with the %s python profiler', msg, prefix['__python_mod__'], exc_info=True) else: log.debug('Matching using YAML-defined profiler:') log.debug(prefix['raw_prefix']) match = prefix['prefix'].search(msg) if not match: log.debug('Match not found') continue if '__python_fun__' in prefix: log.debug('%s matched using the custom python profiler %s', msg, prefix['__python_mod__']) msg_dict = match # the output as-is from the custom function else: positions = prefix.get('prefix_positions', {}) values = prefix.get('values') msg_dict = {} for key in values.keys(): msg_dict[key] = match.group(positions.get(key)) # Remove whitespace from the start or end of the message msg_dict['__prefix_id__'] = prefix_id msg_dict['message'] = msg_dict['message'].strip() # The pri has to be an int as it is retrived using regex '\<(\d+)\>' if 'pri' in msg_dict: msg_dict['facility'] = int(int(msg_dict['pri']) / 8) msg_dict['severity'] = int(int(msg_dict['pri']) - (msg_dict['facility'] * 8)) return msg_dict
python
def _identify_prefix(self, msg, data): prefix_id = -1 for prefix in data: msg_dict = {} prefix_id += 1 match = None if '__python_fun__' in prefix: log.debug('Trying to match using the %s custom python profiler', prefix['__python_mod__']) try: match = prefix['__python_fun__'](msg) except Exception: log.error('Exception while parsing %s with the %s python profiler', msg, prefix['__python_mod__'], exc_info=True) else: log.debug('Matching using YAML-defined profiler:') log.debug(prefix['raw_prefix']) match = prefix['prefix'].search(msg) if not match: log.debug('Match not found') continue if '__python_fun__' in prefix: log.debug('%s matched using the custom python profiler %s', msg, prefix['__python_mod__']) msg_dict = match else: positions = prefix.get('prefix_positions', {}) values = prefix.get('values') msg_dict = {} for key in values.keys(): msg_dict[key] = match.group(positions.get(key)) msg_dict['__prefix_id__'] = prefix_id msg_dict['message'] = msg_dict['message'].strip() if 'pri' in msg_dict: msg_dict['facility'] = int(int(msg_dict['pri']) / 8) msg_dict['severity'] = int(int(msg_dict['pri']) - (msg_dict['facility'] * 8)) return msg_dict
[ "def", "_identify_prefix", "(", "self", ",", "msg", ",", "data", ")", ":", "prefix_id", "=", "-", "1", "for", "prefix", "in", "data", ":", "msg_dict", "=", "{", "}", "prefix_id", "+=", "1", "match", "=", "None", "if", "'__python_fun__'", "in", "prefix", ":", "log", ".", "debug", "(", "'Trying to match using the %s custom python profiler'", ",", "prefix", "[", "'__python_mod__'", "]", ")", "try", ":", "match", "=", "prefix", "[", "'__python_fun__'", "]", "(", "msg", ")", "except", "Exception", ":", "log", ".", "error", "(", "'Exception while parsing %s with the %s python profiler'", ",", "msg", ",", "prefix", "[", "'__python_mod__'", "]", ",", "exc_info", "=", "True", ")", "else", ":", "log", ".", "debug", "(", "'Matching using YAML-defined profiler:'", ")", "log", ".", "debug", "(", "prefix", "[", "'raw_prefix'", "]", ")", "match", "=", "prefix", "[", "'prefix'", "]", ".", "search", "(", "msg", ")", "if", "not", "match", ":", "log", ".", "debug", "(", "'Match not found'", ")", "continue", "if", "'__python_fun__'", "in", "prefix", ":", "log", ".", "debug", "(", "'%s matched using the custom python profiler %s'", ",", "msg", ",", "prefix", "[", "'__python_mod__'", "]", ")", "msg_dict", "=", "match", "# the output as-is from the custom function", "else", ":", "positions", "=", "prefix", ".", "get", "(", "'prefix_positions'", ",", "{", "}", ")", "values", "=", "prefix", ".", "get", "(", "'values'", ")", "msg_dict", "=", "{", "}", "for", "key", "in", "values", ".", "keys", "(", ")", ":", "msg_dict", "[", "key", "]", "=", "match", ".", "group", "(", "positions", ".", "get", "(", "key", ")", ")", "# Remove whitespace from the start or end of the message", "msg_dict", "[", "'__prefix_id__'", "]", "=", "prefix_id", "msg_dict", "[", "'message'", "]", "=", "msg_dict", "[", "'message'", "]", ".", "strip", "(", ")", "# The pri has to be an int as it is retrived using regex '\\<(\\d+)\\>'", "if", "'pri'", "in", "msg_dict", ":", "msg_dict", "[", "'facility'", "]", "=", "int", "(", "int", "(", "msg_dict", "[", "'pri'", "]", ")", "/", "8", ")", "msg_dict", "[", "'severity'", "]", "=", "int", "(", "int", "(", "msg_dict", "[", "'pri'", "]", ")", "-", "(", "msg_dict", "[", "'facility'", "]", "*", "8", ")", ")", "return", "msg_dict" ]
Check the message again each OS prefix and if matched return the message dict
[ "Check", "the", "message", "again", "each", "OS", "prefix", "and", "if", "matched", "return", "the", "message", "dict" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L150-L191
napalm-automation/napalm-logs
napalm_logs/server.py
NapalmLogsServerProc._identify_os
def _identify_os(self, msg): ''' Using the prefix of the syslog message, we are able to identify the operating system and then continue parsing. ''' ret = [] for dev_os, data in self.compiled_prefixes.items(): # TODO Should we prevent attepmting to determine the OS for the blacklisted? # [mircea] I think its good from a logging perspective to know at least that # that the server found the matching and it tells that it won't be processed # further. Later, we could potentially add an option to control this. log.debug('Matching under %s', dev_os) msg_dict = self._identify_prefix(msg, data) if msg_dict: log.debug('Adding %s to list of matched OS', dev_os) ret.append((dev_os, msg_dict)) else: log.debug('No match found for %s', dev_os) if not ret: log.debug('Not matched any OS, returning original log') msg_dict = {'message': msg} ret.append((None, msg_dict)) return ret
python
def _identify_os(self, msg): ret = [] for dev_os, data in self.compiled_prefixes.items(): log.debug('Matching under %s', dev_os) msg_dict = self._identify_prefix(msg, data) if msg_dict: log.debug('Adding %s to list of matched OS', dev_os) ret.append((dev_os, msg_dict)) else: log.debug('No match found for %s', dev_os) if not ret: log.debug('Not matched any OS, returning original log') msg_dict = {'message': msg} ret.append((None, msg_dict)) return ret
[ "def", "_identify_os", "(", "self", ",", "msg", ")", ":", "ret", "=", "[", "]", "for", "dev_os", ",", "data", "in", "self", ".", "compiled_prefixes", ".", "items", "(", ")", ":", "# TODO Should we prevent attepmting to determine the OS for the blacklisted?", "# [mircea] I think its good from a logging perspective to know at least that", "# that the server found the matching and it tells that it won't be processed", "# further. Later, we could potentially add an option to control this.", "log", ".", "debug", "(", "'Matching under %s'", ",", "dev_os", ")", "msg_dict", "=", "self", ".", "_identify_prefix", "(", "msg", ",", "data", ")", "if", "msg_dict", ":", "log", ".", "debug", "(", "'Adding %s to list of matched OS'", ",", "dev_os", ")", "ret", ".", "append", "(", "(", "dev_os", ",", "msg_dict", ")", ")", "else", ":", "log", ".", "debug", "(", "'No match found for %s'", ",", "dev_os", ")", "if", "not", "ret", ":", "log", ".", "debug", "(", "'Not matched any OS, returning original log'", ")", "msg_dict", "=", "{", "'message'", ":", "msg", "}", "ret", ".", "append", "(", "(", "None", ",", "msg_dict", ")", ")", "return", "ret" ]
Using the prefix of the syslog message, we are able to identify the operating system and then continue parsing.
[ "Using", "the", "prefix", "of", "the", "syslog", "message", "we", "are", "able", "to", "identify", "the", "operating", "system", "and", "then", "continue", "parsing", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L193-L215
napalm-automation/napalm-logs
napalm_logs/server.py
NapalmLogsServerProc.start
def start(self): ''' Take the messages from the queue, inspect and identify the operating system, then queue the message correspondingly. ''' # metric counters napalm_logs_server_messages_received = Counter( "napalm_logs_server_messages_received", "Count of messages received from listener processes" ) napalm_logs_server_skipped_buffered_messages = Counter( 'napalm_logs_server_skipped_buffered_messages', 'Count of messages skipped as they were already buffered', ['device_os'] ) napalm_logs_server_messages_with_identified_os = Counter( "napalm_logs_server_messages_with_identified_os", "Count of messages with positive os identification", ['device_os'] ) napalm_logs_server_messages_without_identified_os = Counter( "napalm_logs_server_messages_without_identified_os", "Count of messages with negative os identification" ) napalm_logs_server_messages_failed_device_queuing = Counter( "napalm_logs_server_messages_failed_device_queuing", "Count of messages per device os that fail to be queued to a device process", ['device_os'] ) napalm_logs_server_messages_device_queued = Counter( "napalm_logs_server_messages_device_queued", "Count of messages queued to device processes", ['device_os'] ) napalm_logs_server_messages_unknown_queued = Counter( "napalm_logs_server_messages_unknown_queued", "Count of messages queued as unknown" ) self._setup_ipc() # Start suicide polling thread cleanup = threading.Thread(target=self._cleanup_buffer) cleanup.start() thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: # Take messages from the main queue try: bin_obj = self.sub.recv() msg, address = umsgpack.unpackb(bin_obj, use_list=False) except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) if six.PY3: msg = str(msg, 'utf-8') else: msg = msg.encode('utf-8') log.debug('[%s] Dequeued message from %s: %s', address, msg, time.time()) napalm_logs_server_messages_received.inc() os_list = self._identify_os(msg) for dev_os, msg_dict in os_list: if dev_os and dev_os in self.started_os_proc: # Identified the OS and the corresponding process is started. # Then send the message in the right queue log.debug('Identified OS: %s', dev_os) log.debug('Queueing message to %s', dev_os) if six.PY3: dev_os = bytes(dev_os, 'utf-8') if self._buffer: message = '{dev_os}/{host}/{msg}'.format(dev_os=dev_os, host=msg_dict['host'], msg=msg_dict['message']) message_key = base64.b64encode(message) if self._buffer[message_key]: log.info('"%s" seems to be already buffered, skipping', msg_dict['message']) napalm_logs_server_skipped_buffered_messages.labels(device_os=dev_os).inc() continue log.debug('"%s" is not buffered yet, added', msg_dict['message']) self._buffer[message_key] = 1 self.pub.send_multipart([dev_os, umsgpack.packb((msg_dict, address))]) # self.os_pipes[dev_os].send((msg_dict, address)) napalm_logs_server_messages_with_identified_os.labels(device_os=dev_os).inc() napalm_logs_server_messages_device_queued.labels(device_os=dev_os).inc() elif dev_os and dev_os not in self.started_os_proc: # Identified the OS, but the corresponding process does not seem to be started. log.info('Unable to queue the message to %s. Is the sub-process started?', dev_os) napalm_logs_server_messages_with_identified_os.labels(device_os=dev_os).inc() napalm_logs_server_messages_failed_device_queuing.labels(device_os=dev_os).inc() elif not dev_os and self.opts['_server_send_unknown']: # OS not identified, but the user requested to publish the message as-is log.debug('Unable to identify the OS, sending directly to the publishers') to_publish = { 'ip': address, 'host': 'unknown', 'timestamp': int(time.time()), 'message_details': msg_dict, 'os': UNKNOWN_DEVICE_NAME, 'error': 'UNKNOWN', 'model_name': 'unknown' } self.publisher_pub.send(umsgpack.packb(to_publish)) napalm_logs_server_messages_unknown_queued.inc() napalm_logs_server_messages_without_identified_os.inc()
python
def start(self): napalm_logs_server_messages_received = Counter( "napalm_logs_server_messages_received", "Count of messages received from listener processes" ) napalm_logs_server_skipped_buffered_messages = Counter( 'napalm_logs_server_skipped_buffered_messages', 'Count of messages skipped as they were already buffered', ['device_os'] ) napalm_logs_server_messages_with_identified_os = Counter( "napalm_logs_server_messages_with_identified_os", "Count of messages with positive os identification", ['device_os'] ) napalm_logs_server_messages_without_identified_os = Counter( "napalm_logs_server_messages_without_identified_os", "Count of messages with negative os identification" ) napalm_logs_server_messages_failed_device_queuing = Counter( "napalm_logs_server_messages_failed_device_queuing", "Count of messages per device os that fail to be queued to a device process", ['device_os'] ) napalm_logs_server_messages_device_queued = Counter( "napalm_logs_server_messages_device_queued", "Count of messages queued to device processes", ['device_os'] ) napalm_logs_server_messages_unknown_queued = Counter( "napalm_logs_server_messages_unknown_queued", "Count of messages queued as unknown" ) self._setup_ipc() cleanup = threading.Thread(target=self._cleanup_buffer) cleanup.start() thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: try: bin_obj = self.sub.recv() msg, address = umsgpack.unpackb(bin_obj, use_list=False) except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) if six.PY3: msg = str(msg, 'utf-8') else: msg = msg.encode('utf-8') log.debug('[%s] Dequeued message from %s: %s', address, msg, time.time()) napalm_logs_server_messages_received.inc() os_list = self._identify_os(msg) for dev_os, msg_dict in os_list: if dev_os and dev_os in self.started_os_proc: log.debug('Identified OS: %s', dev_os) log.debug('Queueing message to %s', dev_os) if six.PY3: dev_os = bytes(dev_os, 'utf-8') if self._buffer: message = '{dev_os}/{host}/{msg}'.format(dev_os=dev_os, host=msg_dict['host'], msg=msg_dict['message']) message_key = base64.b64encode(message) if self._buffer[message_key]: log.info('"%s" seems to be already buffered, skipping', msg_dict['message']) napalm_logs_server_skipped_buffered_messages.labels(device_os=dev_os).inc() continue log.debug('"%s" is not buffered yet, added', msg_dict['message']) self._buffer[message_key] = 1 self.pub.send_multipart([dev_os, umsgpack.packb((msg_dict, address))]) napalm_logs_server_messages_with_identified_os.labels(device_os=dev_os).inc() napalm_logs_server_messages_device_queued.labels(device_os=dev_os).inc() elif dev_os and dev_os not in self.started_os_proc: log.info('Unable to queue the message to %s. Is the sub-process started?', dev_os) napalm_logs_server_messages_with_identified_os.labels(device_os=dev_os).inc() napalm_logs_server_messages_failed_device_queuing.labels(device_os=dev_os).inc() elif not dev_os and self.opts['_server_send_unknown']: log.debug('Unable to identify the OS, sending directly to the publishers') to_publish = { 'ip': address, 'host': 'unknown', 'timestamp': int(time.time()), 'message_details': msg_dict, 'os': UNKNOWN_DEVICE_NAME, 'error': 'UNKNOWN', 'model_name': 'unknown' } self.publisher_pub.send(umsgpack.packb(to_publish)) napalm_logs_server_messages_unknown_queued.inc() napalm_logs_server_messages_without_identified_os.inc()
[ "def", "start", "(", "self", ")", ":", "# metric counters", "napalm_logs_server_messages_received", "=", "Counter", "(", "\"napalm_logs_server_messages_received\"", ",", "\"Count of messages received from listener processes\"", ")", "napalm_logs_server_skipped_buffered_messages", "=", "Counter", "(", "'napalm_logs_server_skipped_buffered_messages'", ",", "'Count of messages skipped as they were already buffered'", ",", "[", "'device_os'", "]", ")", "napalm_logs_server_messages_with_identified_os", "=", "Counter", "(", "\"napalm_logs_server_messages_with_identified_os\"", ",", "\"Count of messages with positive os identification\"", ",", "[", "'device_os'", "]", ")", "napalm_logs_server_messages_without_identified_os", "=", "Counter", "(", "\"napalm_logs_server_messages_without_identified_os\"", ",", "\"Count of messages with negative os identification\"", ")", "napalm_logs_server_messages_failed_device_queuing", "=", "Counter", "(", "\"napalm_logs_server_messages_failed_device_queuing\"", ",", "\"Count of messages per device os that fail to be queued to a device process\"", ",", "[", "'device_os'", "]", ")", "napalm_logs_server_messages_device_queued", "=", "Counter", "(", "\"napalm_logs_server_messages_device_queued\"", ",", "\"Count of messages queued to device processes\"", ",", "[", "'device_os'", "]", ")", "napalm_logs_server_messages_unknown_queued", "=", "Counter", "(", "\"napalm_logs_server_messages_unknown_queued\"", ",", "\"Count of messages queued as unknown\"", ")", "self", ".", "_setup_ipc", "(", ")", "# Start suicide polling thread", "cleanup", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_cleanup_buffer", ")", "cleanup", ".", "start", "(", ")", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_suicide_when_without_parent", ",", "args", "=", "(", "os", ".", "getppid", "(", ")", ",", ")", ")", "thread", ".", "start", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_exit_gracefully", ")", "self", ".", "__up", "=", "True", "while", "self", ".", "__up", ":", "# Take messages from the main queue", "try", ":", "bin_obj", "=", "self", ".", "sub", ".", "recv", "(", ")", "msg", ",", "address", "=", "umsgpack", ".", "unpackb", "(", "bin_obj", ",", "use_list", "=", "False", ")", "except", "zmq", ".", "ZMQError", "as", "error", ":", "if", "self", ".", "__up", "is", "False", ":", "log", ".", "info", "(", "'Exiting on process shutdown'", ")", "return", "else", ":", "log", ".", "error", "(", "error", ",", "exc_info", "=", "True", ")", "raise", "NapalmLogsExit", "(", "error", ")", "if", "six", ".", "PY3", ":", "msg", "=", "str", "(", "msg", ",", "'utf-8'", ")", "else", ":", "msg", "=", "msg", ".", "encode", "(", "'utf-8'", ")", "log", ".", "debug", "(", "'[%s] Dequeued message from %s: %s'", ",", "address", ",", "msg", ",", "time", ".", "time", "(", ")", ")", "napalm_logs_server_messages_received", ".", "inc", "(", ")", "os_list", "=", "self", ".", "_identify_os", "(", "msg", ")", "for", "dev_os", ",", "msg_dict", "in", "os_list", ":", "if", "dev_os", "and", "dev_os", "in", "self", ".", "started_os_proc", ":", "# Identified the OS and the corresponding process is started.", "# Then send the message in the right queue", "log", ".", "debug", "(", "'Identified OS: %s'", ",", "dev_os", ")", "log", ".", "debug", "(", "'Queueing message to %s'", ",", "dev_os", ")", "if", "six", ".", "PY3", ":", "dev_os", "=", "bytes", "(", "dev_os", ",", "'utf-8'", ")", "if", "self", ".", "_buffer", ":", "message", "=", "'{dev_os}/{host}/{msg}'", ".", "format", "(", "dev_os", "=", "dev_os", ",", "host", "=", "msg_dict", "[", "'host'", "]", ",", "msg", "=", "msg_dict", "[", "'message'", "]", ")", "message_key", "=", "base64", ".", "b64encode", "(", "message", ")", "if", "self", ".", "_buffer", "[", "message_key", "]", ":", "log", ".", "info", "(", "'\"%s\" seems to be already buffered, skipping'", ",", "msg_dict", "[", "'message'", "]", ")", "napalm_logs_server_skipped_buffered_messages", ".", "labels", "(", "device_os", "=", "dev_os", ")", ".", "inc", "(", ")", "continue", "log", ".", "debug", "(", "'\"%s\" is not buffered yet, added'", ",", "msg_dict", "[", "'message'", "]", ")", "self", ".", "_buffer", "[", "message_key", "]", "=", "1", "self", ".", "pub", ".", "send_multipart", "(", "[", "dev_os", ",", "umsgpack", ".", "packb", "(", "(", "msg_dict", ",", "address", ")", ")", "]", ")", "# self.os_pipes[dev_os].send((msg_dict, address))", "napalm_logs_server_messages_with_identified_os", ".", "labels", "(", "device_os", "=", "dev_os", ")", ".", "inc", "(", ")", "napalm_logs_server_messages_device_queued", ".", "labels", "(", "device_os", "=", "dev_os", ")", ".", "inc", "(", ")", "elif", "dev_os", "and", "dev_os", "not", "in", "self", ".", "started_os_proc", ":", "# Identified the OS, but the corresponding process does not seem to be started.", "log", ".", "info", "(", "'Unable to queue the message to %s. Is the sub-process started?'", ",", "dev_os", ")", "napalm_logs_server_messages_with_identified_os", ".", "labels", "(", "device_os", "=", "dev_os", ")", ".", "inc", "(", ")", "napalm_logs_server_messages_failed_device_queuing", ".", "labels", "(", "device_os", "=", "dev_os", ")", ".", "inc", "(", ")", "elif", "not", "dev_os", "and", "self", ".", "opts", "[", "'_server_send_unknown'", "]", ":", "# OS not identified, but the user requested to publish the message as-is", "log", ".", "debug", "(", "'Unable to identify the OS, sending directly to the publishers'", ")", "to_publish", "=", "{", "'ip'", ":", "address", ",", "'host'", ":", "'unknown'", ",", "'timestamp'", ":", "int", "(", "time", ".", "time", "(", ")", ")", ",", "'message_details'", ":", "msg_dict", ",", "'os'", ":", "UNKNOWN_DEVICE_NAME", ",", "'error'", ":", "'UNKNOWN'", ",", "'model_name'", ":", "'unknown'", "}", "self", ".", "publisher_pub", ".", "send", "(", "umsgpack", ".", "packb", "(", "to_publish", ")", ")", "napalm_logs_server_messages_unknown_queued", ".", "inc", "(", ")", "napalm_logs_server_messages_without_identified_os", ".", "inc", "(", ")" ]
Take the messages from the queue, inspect and identify the operating system, then queue the message correspondingly.
[ "Take", "the", "messages", "from", "the", "queue", "inspect", "and", "identify", "the", "operating", "system", "then", "queue", "the", "message", "correspondingly", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L217-L329
napalm-automation/napalm-logs
napalm_logs/device.py
NapalmLogsDeviceProc._setup_ipc
def _setup_ipc(self): ''' Subscribe to the right topic in the device IPC and publish to the publisher proxy. ''' self.ctx = zmq.Context() # subscribe to device IPC log.debug('Creating the dealer IPC for %s', self._name) self.sub = self.ctx.socket(zmq.DEALER) if six.PY2: self.sub.setsockopt(zmq.IDENTITY, self._name) elif six.PY3: self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, 'utf-8')) try: self.sub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) # subscribe to the corresponding IPC pipe self.sub.connect(DEV_IPC_URL) # publish to the publisher IPC self.pub = self.ctx.socket(zmq.PUB) self.pub.connect(PUB_PX_IPC_URL) try: self.pub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
python
def _setup_ipc(self): self.ctx = zmq.Context() log.debug('Creating the dealer IPC for %s', self._name) self.sub = self.ctx.socket(zmq.DEALER) if six.PY2: self.sub.setsockopt(zmq.IDENTITY, self._name) elif six.PY3: self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, 'utf-8')) try: self.sub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) self.sub.connect(DEV_IPC_URL) self.pub = self.ctx.socket(zmq.PUB) self.pub.connect(PUB_PX_IPC_URL) try: self.pub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
[ "def", "_setup_ipc", "(", "self", ")", ":", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "# subscribe to device IPC", "log", ".", "debug", "(", "'Creating the dealer IPC for %s'", ",", "self", ".", "_name", ")", "self", ".", "sub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "DEALER", ")", "if", "six", ".", "PY2", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "IDENTITY", ",", "self", ".", "_name", ")", "elif", "six", ".", "PY3", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "IDENTITY", ",", "bytes", "(", "self", ".", "_name", ",", "'utf-8'", ")", ")", "try", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# subscribe to the corresponding IPC pipe", "self", ".", "sub", ".", "connect", "(", "DEV_IPC_URL", ")", "# publish to the publisher IPC", "self", ".", "pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PUB", ")", "self", ".", "pub", ".", "connect", "(", "PUB_PX_IPC_URL", ")", "try", ":", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")" ]
Subscribe to the right topic in the device IPC and publish to the publisher proxy.
[ "Subscribe", "to", "the", "right", "topic", "in", "the", "device", "IPC", "and", "publish", "to", "the", "publisher", "proxy", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L52-L82
napalm-automation/napalm-logs
napalm_logs/device.py
NapalmLogsDeviceProc._compile_messages
def _compile_messages(self): ''' Create a list of all OS messages and their compiled regexs ''' self.compiled_messages = [] if not self._config: return for message_dict in self._config.get('messages', {}): error = message_dict['error'] tag = message_dict['tag'] model = message_dict['model'] match_on = message_dict.get('match_on', 'tag') if '__python_fun__' in message_dict: self.compiled_messages.append({ 'error': error, 'tag': tag, 'match_on': match_on, 'model': model, '__python_fun__': message_dict['__python_fun__'] }) continue values = message_dict['values'] line = message_dict['line'] mapping = message_dict['mapping'] # We will now figure out which position each value is in so we can use it with the match statement position = {} replace = {} for key in values.keys(): if '|' in key: new_key, replace[new_key] = key.replace(' ', '').split('|') values[new_key] = values.pop(key) key = new_key position[line.find('{' + key + '}')] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 # Escape the line, then remove the escape for the curly bracets so they can be used when formatting escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') # Replace a whitespace with \s+ escaped = escaped.replace(r'\ ', r'\s+') self.compiled_messages.append( { 'error': error, 'tag': tag, 'match_on': match_on, 'line': re.compile(escaped.format(**values)), 'positions': sorted_position, 'values': values, 'replace': replace, 'model': model, 'mapping': mapping } ) log.debug('Compiled messages:') log.debug(self.compiled_messages)
python
def _compile_messages(self): self.compiled_messages = [] if not self._config: return for message_dict in self._config.get('messages', {}): error = message_dict['error'] tag = message_dict['tag'] model = message_dict['model'] match_on = message_dict.get('match_on', 'tag') if '__python_fun__' in message_dict: self.compiled_messages.append({ 'error': error, 'tag': tag, 'match_on': match_on, 'model': model, '__python_fun__': message_dict['__python_fun__'] }) continue values = message_dict['values'] line = message_dict['line'] mapping = message_dict['mapping'] position = {} replace = {} for key in values.keys(): if '|' in key: new_key, replace[new_key] = key.replace(' ', '').split('|') values[new_key] = values.pop(key) key = new_key position[line.find('{' + key + '}')] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') escaped = escaped.replace(r'\ ', r'\s+') self.compiled_messages.append( { 'error': error, 'tag': tag, 'match_on': match_on, 'line': re.compile(escaped.format(**values)), 'positions': sorted_position, 'values': values, 'replace': replace, 'model': model, 'mapping': mapping } ) log.debug('Compiled messages:') log.debug(self.compiled_messages)
[ "def", "_compile_messages", "(", "self", ")", ":", "self", ".", "compiled_messages", "=", "[", "]", "if", "not", "self", ".", "_config", ":", "return", "for", "message_dict", "in", "self", ".", "_config", ".", "get", "(", "'messages'", ",", "{", "}", ")", ":", "error", "=", "message_dict", "[", "'error'", "]", "tag", "=", "message_dict", "[", "'tag'", "]", "model", "=", "message_dict", "[", "'model'", "]", "match_on", "=", "message_dict", ".", "get", "(", "'match_on'", ",", "'tag'", ")", "if", "'__python_fun__'", "in", "message_dict", ":", "self", ".", "compiled_messages", ".", "append", "(", "{", "'error'", ":", "error", ",", "'tag'", ":", "tag", ",", "'match_on'", ":", "match_on", ",", "'model'", ":", "model", ",", "'__python_fun__'", ":", "message_dict", "[", "'__python_fun__'", "]", "}", ")", "continue", "values", "=", "message_dict", "[", "'values'", "]", "line", "=", "message_dict", "[", "'line'", "]", "mapping", "=", "message_dict", "[", "'mapping'", "]", "# We will now figure out which position each value is in so we can use it with the match statement", "position", "=", "{", "}", "replace", "=", "{", "}", "for", "key", "in", "values", ".", "keys", "(", ")", ":", "if", "'|'", "in", "key", ":", "new_key", ",", "replace", "[", "new_key", "]", "=", "key", ".", "replace", "(", "' '", ",", "''", ")", ".", "split", "(", "'|'", ")", "values", "[", "new_key", "]", "=", "values", ".", "pop", "(", "key", ")", "key", "=", "new_key", "position", "[", "line", ".", "find", "(", "'{'", "+", "key", "+", "'}'", ")", "]", "=", "key", "sorted_position", "=", "{", "}", "for", "i", ",", "elem", "in", "enumerate", "(", "sorted", "(", "position", ".", "items", "(", ")", ")", ")", ":", "sorted_position", "[", "elem", "[", "1", "]", "]", "=", "i", "+", "1", "# Escape the line, then remove the escape for the curly bracets so they can be used when formatting", "escaped", "=", "re", ".", "escape", "(", "line", ")", ".", "replace", "(", "r'\\{'", ",", "'{'", ")", ".", "replace", "(", "r'\\}'", ",", "'}'", ")", "# Replace a whitespace with \\s+", "escaped", "=", "escaped", ".", "replace", "(", "r'\\ '", ",", "r'\\s+'", ")", "self", ".", "compiled_messages", ".", "append", "(", "{", "'error'", ":", "error", ",", "'tag'", ":", "tag", ",", "'match_on'", ":", "match_on", ",", "'line'", ":", "re", ".", "compile", "(", "escaped", ".", "format", "(", "*", "*", "values", ")", ")", ",", "'positions'", ":", "sorted_position", ",", "'values'", ":", "values", ",", "'replace'", ":", "replace", ",", "'model'", ":", "model", ",", "'mapping'", ":", "mapping", "}", ")", "log", ".", "debug", "(", "'Compiled messages:'", ")", "log", ".", "debug", "(", "self", ".", "compiled_messages", ")" ]
Create a list of all OS messages and their compiled regexs
[ "Create", "a", "list", "of", "all", "OS", "messages", "and", "their", "compiled", "regexs" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L84-L138
napalm-automation/napalm-logs
napalm_logs/device.py
NapalmLogsDeviceProc._parse
def _parse(self, msg_dict): ''' Parse a syslog message and check what OpenConfig object should be generated. ''' error_present = False # log.debug('Matching the message:') # log.debug(msg_dict) for message in self.compiled_messages: # log.debug('Matching using:') # log.debug(message) match_on = message['match_on'] if match_on not in msg_dict: # log.debug('%s is not a valid key in the partially parsed dict', match_on) continue if message['tag'] != msg_dict[match_on]: continue if '__python_fun__' in message: return { 'model': message['model'], 'error': message['error'], '__python_fun__': message['__python_fun__'] } error_present = True match = message['line'].search(msg_dict['message']) if not match: continue positions = message.get('positions', {}) values = message.get('values') ret = { 'model': message['model'], 'mapping': message['mapping'], 'replace': message['replace'], 'error': message['error'] } for key in values.keys(): # Check if the value needs to be replaced if key in message['replace']: result = napalm_logs.utils.cast(match.group(positions.get(key)), message['replace'][key]) else: result = match.group(positions.get(key)) ret[key] = result return ret if error_present is True: log.info('Configured regex did not match for os: %s tag %s', self._name, msg_dict.get('tag', '')) else: log.info('Syslog message not configured for os: %s tag %s', self._name, msg_dict.get('tag', ''))
python
def _parse(self, msg_dict): error_present = False for message in self.compiled_messages: match_on = message['match_on'] if match_on not in msg_dict: continue if message['tag'] != msg_dict[match_on]: continue if '__python_fun__' in message: return { 'model': message['model'], 'error': message['error'], '__python_fun__': message['__python_fun__'] } error_present = True match = message['line'].search(msg_dict['message']) if not match: continue positions = message.get('positions', {}) values = message.get('values') ret = { 'model': message['model'], 'mapping': message['mapping'], 'replace': message['replace'], 'error': message['error'] } for key in values.keys(): if key in message['replace']: result = napalm_logs.utils.cast(match.group(positions.get(key)), message['replace'][key]) else: result = match.group(positions.get(key)) ret[key] = result return ret if error_present is True: log.info('Configured regex did not match for os: %s tag %s', self._name, msg_dict.get('tag', '')) else: log.info('Syslog message not configured for os: %s tag %s', self._name, msg_dict.get('tag', ''))
[ "def", "_parse", "(", "self", ",", "msg_dict", ")", ":", "error_present", "=", "False", "# log.debug('Matching the message:')", "# log.debug(msg_dict)", "for", "message", "in", "self", ".", "compiled_messages", ":", "# log.debug('Matching using:')", "# log.debug(message)", "match_on", "=", "message", "[", "'match_on'", "]", "if", "match_on", "not", "in", "msg_dict", ":", "# log.debug('%s is not a valid key in the partially parsed dict', match_on)", "continue", "if", "message", "[", "'tag'", "]", "!=", "msg_dict", "[", "match_on", "]", ":", "continue", "if", "'__python_fun__'", "in", "message", ":", "return", "{", "'model'", ":", "message", "[", "'model'", "]", ",", "'error'", ":", "message", "[", "'error'", "]", ",", "'__python_fun__'", ":", "message", "[", "'__python_fun__'", "]", "}", "error_present", "=", "True", "match", "=", "message", "[", "'line'", "]", ".", "search", "(", "msg_dict", "[", "'message'", "]", ")", "if", "not", "match", ":", "continue", "positions", "=", "message", ".", "get", "(", "'positions'", ",", "{", "}", ")", "values", "=", "message", ".", "get", "(", "'values'", ")", "ret", "=", "{", "'model'", ":", "message", "[", "'model'", "]", ",", "'mapping'", ":", "message", "[", "'mapping'", "]", ",", "'replace'", ":", "message", "[", "'replace'", "]", ",", "'error'", ":", "message", "[", "'error'", "]", "}", "for", "key", "in", "values", ".", "keys", "(", ")", ":", "# Check if the value needs to be replaced", "if", "key", "in", "message", "[", "'replace'", "]", ":", "result", "=", "napalm_logs", ".", "utils", ".", "cast", "(", "match", ".", "group", "(", "positions", ".", "get", "(", "key", ")", ")", ",", "message", "[", "'replace'", "]", "[", "key", "]", ")", "else", ":", "result", "=", "match", ".", "group", "(", "positions", ".", "get", "(", "key", ")", ")", "ret", "[", "key", "]", "=", "result", "return", "ret", "if", "error_present", "is", "True", ":", "log", ".", "info", "(", "'Configured regex did not match for os: %s tag %s'", ",", "self", ".", "_name", ",", "msg_dict", ".", "get", "(", "'tag'", ",", "''", ")", ")", "else", ":", "log", ".", "info", "(", "'Syslog message not configured for os: %s tag %s'", ",", "self", ".", "_name", ",", "msg_dict", ".", "get", "(", "'tag'", ",", "''", ")", ")" ]
Parse a syslog message and check what OpenConfig object should be generated.
[ "Parse", "a", "syslog", "message", "and", "check", "what", "OpenConfig", "object", "should", "be", "generated", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L140-L186
napalm-automation/napalm-logs
napalm_logs/device.py
NapalmLogsDeviceProc._emit
def _emit(self, **kwargs): ''' Emit an OpenConfig object given a certain combination of fields mappeed in the config to the corresponding hierarchy. ''' oc_dict = {} for mapping, result_key in kwargs['mapping']['variables'].items(): result = kwargs[result_key] oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict) for mapping, result in kwargs['mapping']['static'].items(): oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict) return oc_dict
python
def _emit(self, **kwargs): oc_dict = {} for mapping, result_key in kwargs['mapping']['variables'].items(): result = kwargs[result_key] oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict) for mapping, result in kwargs['mapping']['static'].items(): oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict) return oc_dict
[ "def", "_emit", "(", "self", ",", "*", "*", "kwargs", ")", ":", "oc_dict", "=", "{", "}", "for", "mapping", ",", "result_key", "in", "kwargs", "[", "'mapping'", "]", "[", "'variables'", "]", ".", "items", "(", ")", ":", "result", "=", "kwargs", "[", "result_key", "]", "oc_dict", "=", "napalm_logs", ".", "utils", ".", "setval", "(", "mapping", ".", "format", "(", "*", "*", "kwargs", ")", ",", "result", ",", "oc_dict", ")", "for", "mapping", ",", "result", "in", "kwargs", "[", "'mapping'", "]", "[", "'static'", "]", ".", "items", "(", ")", ":", "oc_dict", "=", "napalm_logs", ".", "utils", ".", "setval", "(", "mapping", ".", "format", "(", "*", "*", "kwargs", ")", ",", "result", ",", "oc_dict", ")", "return", "oc_dict" ]
Emit an OpenConfig object given a certain combination of fields mappeed in the config to the corresponding hierarchy.
[ "Emit", "an", "OpenConfig", "object", "given", "a", "certain", "combination", "of", "fields", "mappeed", "in", "the", "config", "to", "the", "corresponding", "hierarchy", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L188-L200
napalm-automation/napalm-logs
napalm_logs/device.py
NapalmLogsDeviceProc._publish
def _publish(self, obj): ''' Publish the OC object. ''' bin_obj = umsgpack.packb(obj) self.pub.send(bin_obj)
python
def _publish(self, obj): bin_obj = umsgpack.packb(obj) self.pub.send(bin_obj)
[ "def", "_publish", "(", "self", ",", "obj", ")", ":", "bin_obj", "=", "umsgpack", ".", "packb", "(", "obj", ")", "self", ".", "pub", ".", "send", "(", "bin_obj", ")" ]
Publish the OC object.
[ "Publish", "the", "OC", "object", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L202-L207
napalm-automation/napalm-logs
napalm_logs/device.py
NapalmLogsDeviceProc.start
def start(self): ''' Start the worker process. ''' # metrics napalm_logs_device_messages_received = Counter( 'napalm_logs_device_messages_received', "Count of messages received by the device process", ['device_os'] ) napalm_logs_device_raw_published_messages = Counter( 'napalm_logs_device_raw_published_messages', "Count of raw type published messages", ['device_os'] ) napalm_logs_device_published_messages = Counter( 'napalm_logs_device_published_messages', "Count of published messages", ['device_os'] ) napalm_logs_device_oc_object_failed = Counter( 'napalm_logs_device_oc_object_failed', "Counter of failed OpenConfig object generations", ['device_os'] ) self._setup_ipc() # Start suicide polling thread thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: # bin_obj = self.sub.recv() # msg_dict, address = umsgpack.unpackb(bin_obj, use_list=False) try: bin_obj = self.sub.recv() msg_dict, address = umsgpack.unpackb(bin_obj, use_list=False) except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown [%s]', self._name) return else: raise NapalmLogsExit(error) log.debug('%s: dequeued %s, received from %s', self._name, msg_dict, address) napalm_logs_device_messages_received.labels(device_os=self._name).inc() host = msg_dict.get('host') prefix_id = msg_dict.pop('__prefix_id__') if 'timestamp' in msg_dict: timestamp = msg_dict.pop('timestamp') else: timestamp = self._format_time(msg_dict.get('time', ''), msg_dict.get('date', ''), msg_dict.get('timeZone', 'UTC'), prefix_id) facility = msg_dict.get('facility') severity = msg_dict.get('severity') kwargs = self._parse(msg_dict) if not kwargs: # Unable to identify what model to generate for the message in cause. # But publish the message when the user requested to push raw messages. to_publish = { 'ip': address, 'host': host, 'timestamp': timestamp, 'message_details': msg_dict, 'os': self._name, 'error': 'RAW', 'model_name': 'raw', 'facility': facility, 'severity': severity } log.debug('Queueing to be published:') log.debug(to_publish) # self.pub_pipe.send(to_publish) self.pub.send(umsgpack.packb(to_publish)) napalm_logs_device_raw_published_messages.labels(device_os=self._name).inc() continue try: if '__python_fun__' in kwargs: log.debug('Using the Python parser to determine the YANG-equivalent object') yang_obj = kwargs['__python_fun__'](msg_dict) else: yang_obj = self._emit(**kwargs) except Exception: log.exception('Unexpected error when generating the OC object.', exc_info=True) napalm_logs_device_oc_object_failed.labels(device_os=self._name).inc() continue log.debug('Generated OC object:') log.debug(yang_obj) error = kwargs.get('error') model_name = kwargs.get('model') to_publish = { 'error': error, 'host': host, 'ip': address, 'timestamp': timestamp, 'yang_message': yang_obj, 'message_details': msg_dict, 'yang_model': model_name, 'os': self._name, 'facility': facility, 'severity': severity } log.debug('Queueing to be published:') log.debug(to_publish) # self.pub_pipe.send(to_publish) self.pub.send(umsgpack.packb(to_publish)) # self._publish(to_publish) napalm_logs_device_published_messages.labels(device_os=self._name).inc()
python
def start(self): napalm_logs_device_messages_received = Counter( 'napalm_logs_device_messages_received', "Count of messages received by the device process", ['device_os'] ) napalm_logs_device_raw_published_messages = Counter( 'napalm_logs_device_raw_published_messages', "Count of raw type published messages", ['device_os'] ) napalm_logs_device_published_messages = Counter( 'napalm_logs_device_published_messages', "Count of published messages", ['device_os'] ) napalm_logs_device_oc_object_failed = Counter( 'napalm_logs_device_oc_object_failed', "Counter of failed OpenConfig object generations", ['device_os'] ) self._setup_ipc() thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: try: bin_obj = self.sub.recv() msg_dict, address = umsgpack.unpackb(bin_obj, use_list=False) except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown [%s]', self._name) return else: raise NapalmLogsExit(error) log.debug('%s: dequeued %s, received from %s', self._name, msg_dict, address) napalm_logs_device_messages_received.labels(device_os=self._name).inc() host = msg_dict.get('host') prefix_id = msg_dict.pop('__prefix_id__') if 'timestamp' in msg_dict: timestamp = msg_dict.pop('timestamp') else: timestamp = self._format_time(msg_dict.get('time', ''), msg_dict.get('date', ''), msg_dict.get('timeZone', 'UTC'), prefix_id) facility = msg_dict.get('facility') severity = msg_dict.get('severity') kwargs = self._parse(msg_dict) if not kwargs: to_publish = { 'ip': address, 'host': host, 'timestamp': timestamp, 'message_details': msg_dict, 'os': self._name, 'error': 'RAW', 'model_name': 'raw', 'facility': facility, 'severity': severity } log.debug('Queueing to be published:') log.debug(to_publish) self.pub.send(umsgpack.packb(to_publish)) napalm_logs_device_raw_published_messages.labels(device_os=self._name).inc() continue try: if '__python_fun__' in kwargs: log.debug('Using the Python parser to determine the YANG-equivalent object') yang_obj = kwargs['__python_fun__'](msg_dict) else: yang_obj = self._emit(**kwargs) except Exception: log.exception('Unexpected error when generating the OC object.', exc_info=True) napalm_logs_device_oc_object_failed.labels(device_os=self._name).inc() continue log.debug('Generated OC object:') log.debug(yang_obj) error = kwargs.get('error') model_name = kwargs.get('model') to_publish = { 'error': error, 'host': host, 'ip': address, 'timestamp': timestamp, 'yang_message': yang_obj, 'message_details': msg_dict, 'yang_model': model_name, 'os': self._name, 'facility': facility, 'severity': severity } log.debug('Queueing to be published:') log.debug(to_publish) self.pub.send(umsgpack.packb(to_publish)) napalm_logs_device_published_messages.labels(device_os=self._name).inc()
[ "def", "start", "(", "self", ")", ":", "# metrics", "napalm_logs_device_messages_received", "=", "Counter", "(", "'napalm_logs_device_messages_received'", ",", "\"Count of messages received by the device process\"", ",", "[", "'device_os'", "]", ")", "napalm_logs_device_raw_published_messages", "=", "Counter", "(", "'napalm_logs_device_raw_published_messages'", ",", "\"Count of raw type published messages\"", ",", "[", "'device_os'", "]", ")", "napalm_logs_device_published_messages", "=", "Counter", "(", "'napalm_logs_device_published_messages'", ",", "\"Count of published messages\"", ",", "[", "'device_os'", "]", ")", "napalm_logs_device_oc_object_failed", "=", "Counter", "(", "'napalm_logs_device_oc_object_failed'", ",", "\"Counter of failed OpenConfig object generations\"", ",", "[", "'device_os'", "]", ")", "self", ".", "_setup_ipc", "(", ")", "# Start suicide polling thread", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_suicide_when_without_parent", ",", "args", "=", "(", "os", ".", "getppid", "(", ")", ",", ")", ")", "thread", ".", "start", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_exit_gracefully", ")", "self", ".", "__up", "=", "True", "while", "self", ".", "__up", ":", "# bin_obj = self.sub.recv()", "# msg_dict, address = umsgpack.unpackb(bin_obj, use_list=False)", "try", ":", "bin_obj", "=", "self", ".", "sub", ".", "recv", "(", ")", "msg_dict", ",", "address", "=", "umsgpack", ".", "unpackb", "(", "bin_obj", ",", "use_list", "=", "False", ")", "except", "zmq", ".", "ZMQError", "as", "error", ":", "if", "self", ".", "__up", "is", "False", ":", "log", ".", "info", "(", "'Exiting on process shutdown [%s]'", ",", "self", ".", "_name", ")", "return", "else", ":", "raise", "NapalmLogsExit", "(", "error", ")", "log", ".", "debug", "(", "'%s: dequeued %s, received from %s'", ",", "self", ".", "_name", ",", "msg_dict", ",", "address", ")", "napalm_logs_device_messages_received", ".", "labels", "(", "device_os", "=", "self", ".", "_name", ")", ".", "inc", "(", ")", "host", "=", "msg_dict", ".", "get", "(", "'host'", ")", "prefix_id", "=", "msg_dict", ".", "pop", "(", "'__prefix_id__'", ")", "if", "'timestamp'", "in", "msg_dict", ":", "timestamp", "=", "msg_dict", ".", "pop", "(", "'timestamp'", ")", "else", ":", "timestamp", "=", "self", ".", "_format_time", "(", "msg_dict", ".", "get", "(", "'time'", ",", "''", ")", ",", "msg_dict", ".", "get", "(", "'date'", ",", "''", ")", ",", "msg_dict", ".", "get", "(", "'timeZone'", ",", "'UTC'", ")", ",", "prefix_id", ")", "facility", "=", "msg_dict", ".", "get", "(", "'facility'", ")", "severity", "=", "msg_dict", ".", "get", "(", "'severity'", ")", "kwargs", "=", "self", ".", "_parse", "(", "msg_dict", ")", "if", "not", "kwargs", ":", "# Unable to identify what model to generate for the message in cause.", "# But publish the message when the user requested to push raw messages.", "to_publish", "=", "{", "'ip'", ":", "address", ",", "'host'", ":", "host", ",", "'timestamp'", ":", "timestamp", ",", "'message_details'", ":", "msg_dict", ",", "'os'", ":", "self", ".", "_name", ",", "'error'", ":", "'RAW'", ",", "'model_name'", ":", "'raw'", ",", "'facility'", ":", "facility", ",", "'severity'", ":", "severity", "}", "log", ".", "debug", "(", "'Queueing to be published:'", ")", "log", ".", "debug", "(", "to_publish", ")", "# self.pub_pipe.send(to_publish)", "self", ".", "pub", ".", "send", "(", "umsgpack", ".", "packb", "(", "to_publish", ")", ")", "napalm_logs_device_raw_published_messages", ".", "labels", "(", "device_os", "=", "self", ".", "_name", ")", ".", "inc", "(", ")", "continue", "try", ":", "if", "'__python_fun__'", "in", "kwargs", ":", "log", ".", "debug", "(", "'Using the Python parser to determine the YANG-equivalent object'", ")", "yang_obj", "=", "kwargs", "[", "'__python_fun__'", "]", "(", "msg_dict", ")", "else", ":", "yang_obj", "=", "self", ".", "_emit", "(", "*", "*", "kwargs", ")", "except", "Exception", ":", "log", ".", "exception", "(", "'Unexpected error when generating the OC object.'", ",", "exc_info", "=", "True", ")", "napalm_logs_device_oc_object_failed", ".", "labels", "(", "device_os", "=", "self", ".", "_name", ")", ".", "inc", "(", ")", "continue", "log", ".", "debug", "(", "'Generated OC object:'", ")", "log", ".", "debug", "(", "yang_obj", ")", "error", "=", "kwargs", ".", "get", "(", "'error'", ")", "model_name", "=", "kwargs", ".", "get", "(", "'model'", ")", "to_publish", "=", "{", "'error'", ":", "error", ",", "'host'", ":", "host", ",", "'ip'", ":", "address", ",", "'timestamp'", ":", "timestamp", ",", "'yang_message'", ":", "yang_obj", ",", "'message_details'", ":", "msg_dict", ",", "'yang_model'", ":", "model_name", ",", "'os'", ":", "self", ".", "_name", ",", "'facility'", ":", "facility", ",", "'severity'", ":", "severity", "}", "log", ".", "debug", "(", "'Queueing to be published:'", ")", "log", ".", "debug", "(", "to_publish", ")", "# self.pub_pipe.send(to_publish)", "self", ".", "pub", ".", "send", "(", "umsgpack", ".", "packb", "(", "to_publish", ")", ")", "# self._publish(to_publish)", "napalm_logs_device_published_messages", ".", "labels", "(", "device_os", "=", "self", ".", "_name", ")", ".", "inc", "(", ")" ]
Start the worker process.
[ "Start", "the", "worker", "process", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L241-L351