repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 39
1.84M
| func_code_tokens
listlengths 15
672k
| func_documentation_string
stringlengths 1
47.2k
| func_documentation_tokens
listlengths 1
3.92k
| split_name
stringclasses 1
value | func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|---|---|---|
gem/oq-engine | openquake/hazardlib/geo/surface/gridded.py | GriddedSurface.get_surface_boundaries | def get_surface_boundaries(self):
"""
:returns: (min_max lons, min_max lats)
"""
min_lon, min_lat, max_lon, max_lat = self.get_bounding_box()
return [[min_lon, max_lon]], [[min_lat, max_lat]] | python | def get_surface_boundaries(self):
min_lon, min_lat, max_lon, max_lat = self.get_bounding_box()
return [[min_lon, max_lon]], [[min_lat, max_lat]] | [
"def",
"get_surface_boundaries",
"(",
"self",
")",
":",
"min_lon",
",",
"min_lat",
",",
"max_lon",
",",
"max_lat",
"=",
"self",
".",
"get_bounding_box",
"(",
")",
"return",
"[",
"[",
"min_lon",
",",
"max_lon",
"]",
"]",
",",
"[",
"[",
"min_lat",
",",
"max_lat",
"]",
"]"
]
| :returns: (min_max lons, min_max lats) | [
":",
"returns",
":",
"(",
"min_max",
"lons",
"min_max",
"lats",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/gridded.py#L82-L87 |
gem/oq-engine | openquake/hazardlib/geo/surface/gridded.py | GriddedSurface.get_middle_point | def get_middle_point(self):
"""
Compute coordinates of surface middle point.
The actual definition of ``middle point`` depends on the type of
surface geometry.
:return:
instance of :class:`openquake.hazardlib.geo.point.Point`
representing surface middle point.
"""
lons = self.mesh.lons.squeeze()
lats = self.mesh.lats.squeeze()
depths = self.mesh.depths.squeeze()
lon_bar = lons.mean()
lat_bar = lats.mean()
idx = np.argmin((lons - lon_bar)**2 + (lats - lat_bar)**2)
return Point(lons[idx], lats[idx], depths[idx]) | python | def get_middle_point(self):
lons = self.mesh.lons.squeeze()
lats = self.mesh.lats.squeeze()
depths = self.mesh.depths.squeeze()
lon_bar = lons.mean()
lat_bar = lats.mean()
idx = np.argmin((lons - lon_bar)**2 + (lats - lat_bar)**2)
return Point(lons[idx], lats[idx], depths[idx]) | [
"def",
"get_middle_point",
"(",
"self",
")",
":",
"lons",
"=",
"self",
".",
"mesh",
".",
"lons",
".",
"squeeze",
"(",
")",
"lats",
"=",
"self",
".",
"mesh",
".",
"lats",
".",
"squeeze",
"(",
")",
"depths",
"=",
"self",
".",
"mesh",
".",
"depths",
".",
"squeeze",
"(",
")",
"lon_bar",
"=",
"lons",
".",
"mean",
"(",
")",
"lat_bar",
"=",
"lats",
".",
"mean",
"(",
")",
"idx",
"=",
"np",
".",
"argmin",
"(",
"(",
"lons",
"-",
"lon_bar",
")",
"**",
"2",
"+",
"(",
"lats",
"-",
"lat_bar",
")",
"**",
"2",
")",
"return",
"Point",
"(",
"lons",
"[",
"idx",
"]",
",",
"lats",
"[",
"idx",
"]",
",",
"depths",
"[",
"idx",
"]",
")"
]
| Compute coordinates of surface middle point.
The actual definition of ``middle point`` depends on the type of
surface geometry.
:return:
instance of :class:`openquake.hazardlib.geo.point.Point`
representing surface middle point. | [
"Compute",
"coordinates",
"of",
"surface",
"middle",
"point",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/gridded.py#L164-L181 |
gem/oq-engine | openquake/hazardlib/gsim/boore_atkinson_2011.py | BooreAtkinson2011.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# get mean and std using the superclass
mean, stddevs = super().get_mean_and_stddevs(
sites, rup, dists, imt, stddev_types)
# correction factor (see Atkinson and Boore, 2011; equation 5 at
# page 1126 and nga08_gm_tmr.for line 508
corr_fact = 10.0**(np.max([0, 3.888 - 0.674 * rup.mag]) -
(np.max([0, 2.933 - 0.510 * rup.mag]) *
np.log10(dists.rjb + 10.)))
return np.log(np.exp(mean)*corr_fact), stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
mean, stddevs = super().get_mean_and_stddevs(
sites, rup, dists, imt, stddev_types)
corr_fact = 10.0**(np.max([0, 3.888 - 0.674 * rup.mag]) -
(np.max([0, 2.933 - 0.510 * rup.mag]) *
np.log10(dists.rjb + 10.)))
return np.log(np.exp(mean)*corr_fact), stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# get mean and std using the superclass",
"mean",
",",
"stddevs",
"=",
"super",
"(",
")",
".",
"get_mean_and_stddevs",
"(",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
"# correction factor (see Atkinson and Boore, 2011; equation 5 at",
"# page 1126 and nga08_gm_tmr.for line 508",
"corr_fact",
"=",
"10.0",
"**",
"(",
"np",
".",
"max",
"(",
"[",
"0",
",",
"3.888",
"-",
"0.674",
"*",
"rup",
".",
"mag",
"]",
")",
"-",
"(",
"np",
".",
"max",
"(",
"[",
"0",
",",
"2.933",
"-",
"0.510",
"*",
"rup",
".",
"mag",
"]",
")",
"*",
"np",
".",
"log10",
"(",
"dists",
".",
"rjb",
"+",
"10.",
")",
")",
")",
"return",
"np",
".",
"log",
"(",
"np",
".",
"exp",
"(",
"mean",
")",
"*",
"corr_fact",
")",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_atkinson_2011.py#L39-L56 |
gem/oq-engine | openquake/hazardlib/gsim/boore_atkinson_2011.py | Atkinson2008prime.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# get mean and std using the superclass
mean, stddevs = super().get_mean_and_stddevs(
sites, rup, dists, imt, stddev_types)
A08 = self.A08_COEFFS[imt]
f_ena = 10.0 ** (A08["c"] + A08["d"] * dists.rjb)
return np.log(np.exp(mean)*f_ena), stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
mean, stddevs = super().get_mean_and_stddevs(
sites, rup, dists, imt, stddev_types)
A08 = self.A08_COEFFS[imt]
f_ena = 10.0 ** (A08["c"] + A08["d"] * dists.rjb)
return np.log(np.exp(mean)*f_ena), stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# get mean and std using the superclass",
"mean",
",",
"stddevs",
"=",
"super",
"(",
")",
".",
"get_mean_and_stddevs",
"(",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
"A08",
"=",
"self",
".",
"A08_COEFFS",
"[",
"imt",
"]",
"f_ena",
"=",
"10.0",
"**",
"(",
"A08",
"[",
"\"c\"",
"]",
"+",
"A08",
"[",
"\"d\"",
"]",
"*",
"dists",
".",
"rjb",
")",
"return",
"np",
".",
"log",
"(",
"np",
".",
"exp",
"(",
"mean",
")",
"*",
"f_ena",
")",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_atkinson_2011.py#L67-L81 |
gem/oq-engine | openquake/risklib/countries.py | get_country_code | def get_country_code(longname):
"""
:returns: the code of the country contained in `longname`, or a ValuError
>>> for country, code in country2code.items():
... assert get_country_code('Exp_' + country) == code, (country, code)
"""
mo = re.search(REGEX, longname, re.I)
if mo is None:
raise ValueError('Could not find a valid country in %s' % longname)
return country2code[COUNTRIES[mo.lastindex - 1]] | python | def get_country_code(longname):
mo = re.search(REGEX, longname, re.I)
if mo is None:
raise ValueError('Could not find a valid country in %s' % longname)
return country2code[COUNTRIES[mo.lastindex - 1]] | [
"def",
"get_country_code",
"(",
"longname",
")",
":",
"mo",
"=",
"re",
".",
"search",
"(",
"REGEX",
",",
"longname",
",",
"re",
".",
"I",
")",
"if",
"mo",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Could not find a valid country in %s'",
"%",
"longname",
")",
"return",
"country2code",
"[",
"COUNTRIES",
"[",
"mo",
".",
"lastindex",
"-",
"1",
"]",
"]"
]
| :returns: the code of the country contained in `longname`, or a ValuError
>>> for country, code in country2code.items():
... assert get_country_code('Exp_' + country) == code, (country, code) | [
":",
"returns",
":",
"the",
"code",
"of",
"the",
"country",
"contained",
"in",
"longname",
"or",
"a",
"ValuError"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/countries.py#L249-L259 |
gem/oq-engine | openquake/risklib/countries.py | from_exposures | def from_exposures(expnames):
"""
:returns: a dictionary E??_ -> country
"""
dic = {}
for i, expname in enumerate(expnames, 1):
cc = get_country_code(expname)
dic['E%02d_' % i] = cc
return dic | python | def from_exposures(expnames):
dic = {}
for i, expname in enumerate(expnames, 1):
cc = get_country_code(expname)
dic['E%02d_' % i] = cc
return dic | [
"def",
"from_exposures",
"(",
"expnames",
")",
":",
"dic",
"=",
"{",
"}",
"for",
"i",
",",
"expname",
"in",
"enumerate",
"(",
"expnames",
",",
"1",
")",
":",
"cc",
"=",
"get_country_code",
"(",
"expname",
")",
"dic",
"[",
"'E%02d_'",
"%",
"i",
"]",
"=",
"cc",
"return",
"dic"
]
| :returns: a dictionary E??_ -> country | [
":",
"returns",
":",
"a",
"dictionary",
"E??_",
"-",
">",
"country"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/countries.py#L262-L270 |
gem/oq-engine | openquake/hazardlib/mfd/base.py | BaseMFD.modify | def modify(self, modification, parameters):
"""
Apply a single modification to an MFD parameters.
Reflects the modification method and calls it passing ``parameters``
as keyword arguments. See also :attr:`MODIFICATIONS`.
Modifications can be applied one on top of another. The logic
of stacking modifications is up to a specific MFD implementation.
:param modification:
String name representing the type of modification.
:param parameters:
Dictionary of parameters needed for modification.
:raises ValueError:
If ``modification`` is missing from :attr:`MODIFICATIONS`.
"""
if modification not in self.MODIFICATIONS:
raise ValueError('Modification %s is not supported by %s' %
(modification, type(self).__name__))
meth = getattr(self, 'modify_%s' % modification)
meth(**parameters)
self.check_constraints() | python | def modify(self, modification, parameters):
if modification not in self.MODIFICATIONS:
raise ValueError('Modification %s is not supported by %s' %
(modification, type(self).__name__))
meth = getattr(self, 'modify_%s' % modification)
meth(**parameters)
self.check_constraints() | [
"def",
"modify",
"(",
"self",
",",
"modification",
",",
"parameters",
")",
":",
"if",
"modification",
"not",
"in",
"self",
".",
"MODIFICATIONS",
":",
"raise",
"ValueError",
"(",
"'Modification %s is not supported by %s'",
"%",
"(",
"modification",
",",
"type",
"(",
"self",
")",
".",
"__name__",
")",
")",
"meth",
"=",
"getattr",
"(",
"self",
",",
"'modify_%s'",
"%",
"modification",
")",
"meth",
"(",
"*",
"*",
"parameters",
")",
"self",
".",
"check_constraints",
"(",
")"
]
| Apply a single modification to an MFD parameters.
Reflects the modification method and calls it passing ``parameters``
as keyword arguments. See also :attr:`MODIFICATIONS`.
Modifications can be applied one on top of another. The logic
of stacking modifications is up to a specific MFD implementation.
:param modification:
String name representing the type of modification.
:param parameters:
Dictionary of parameters needed for modification.
:raises ValueError:
If ``modification`` is missing from :attr:`MODIFICATIONS`. | [
"Apply",
"a",
"single",
"modification",
"to",
"an",
"MFD",
"parameters",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/mfd/base.py#L34-L56 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
# Implements mean model (equation 12)
mean = (self._compute_magnitude(rup, C) +
self._compute_distance(dists, C) +
self._get_site_amplification(sites, rup, C) +
self._get_mechanism(rup, C))
stddevs = self._get_stddevs(rup, np.exp(mean), stddev_types, sites)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
mean = (self._compute_magnitude(rup, C) +
self._compute_distance(dists, C) +
self._get_site_amplification(sites, rup, C) +
self._get_mechanism(rup, C))
stddevs = self._get_stddevs(rup, np.exp(mean), stddev_types, sites)
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# extracting dictionary of coefficients specific to required",
"# intensity measure type.",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"# Implements mean model (equation 12)",
"mean",
"=",
"(",
"self",
".",
"_compute_magnitude",
"(",
"rup",
",",
"C",
")",
"+",
"self",
".",
"_compute_distance",
"(",
"dists",
",",
"C",
")",
"+",
"self",
".",
"_get_site_amplification",
"(",
"sites",
",",
"rup",
",",
"C",
")",
"+",
"self",
".",
"_get_mechanism",
"(",
"rup",
",",
"C",
")",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"rup",
",",
"np",
".",
"exp",
"(",
"mean",
")",
",",
"stddev_types",
",",
"sites",
")",
"return",
"mean",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L76-L93 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._get_stddevs | def _get_stddevs(self, rup, arias, stddev_types, sites):
"""
Return standard deviations as defined in table 1, p. 200.
"""
stddevs = []
# Magnitude dependent inter-event term (Eq. 13)
if rup.mag < 4.7:
tau = 0.611
elif rup.mag > 7.6:
tau = 0.475
else:
tau = 0.611 - 0.047 * (rup.mag - 4.7)
# Retrieve site-class dependent sigma
sigma1, sigma2 = self._get_intra_event_sigmas(sites)
sigma = np.copy(sigma1)
# Implements the nonlinear intra-event sigma (Eq. 14)
idx = arias >= 0.125
sigma[idx] = sigma2[idx]
idx = np.logical_and(arias > 0.013, arias < 0.125)
sigma[idx] = sigma1[idx] - 0.106 * (np.log(arias[idx]) -
np.log(0.0132))
sigma_total = np.sqrt(tau ** 2. + sigma ** 2.)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(sigma_total)
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(sigma)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau * np.ones_like(sites.vs30))
return stddevs | python | def _get_stddevs(self, rup, arias, stddev_types, sites):
stddevs = []
if rup.mag < 4.7:
tau = 0.611
elif rup.mag > 7.6:
tau = 0.475
else:
tau = 0.611 - 0.047 * (rup.mag - 4.7)
sigma1, sigma2 = self._get_intra_event_sigmas(sites)
sigma = np.copy(sigma1)
idx = arias >= 0.125
sigma[idx] = sigma2[idx]
idx = np.logical_and(arias > 0.013, arias < 0.125)
sigma[idx] = sigma1[idx] - 0.106 * (np.log(arias[idx]) -
np.log(0.0132))
sigma_total = np.sqrt(tau ** 2. + sigma ** 2.)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(sigma_total)
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(sigma)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau * np.ones_like(sites.vs30))
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"rup",
",",
"arias",
",",
"stddev_types",
",",
"sites",
")",
":",
"stddevs",
"=",
"[",
"]",
"# Magnitude dependent inter-event term (Eq. 13)",
"if",
"rup",
".",
"mag",
"<",
"4.7",
":",
"tau",
"=",
"0.611",
"elif",
"rup",
".",
"mag",
">",
"7.6",
":",
"tau",
"=",
"0.475",
"else",
":",
"tau",
"=",
"0.611",
"-",
"0.047",
"*",
"(",
"rup",
".",
"mag",
"-",
"4.7",
")",
"# Retrieve site-class dependent sigma",
"sigma1",
",",
"sigma2",
"=",
"self",
".",
"_get_intra_event_sigmas",
"(",
"sites",
")",
"sigma",
"=",
"np",
".",
"copy",
"(",
"sigma1",
")",
"# Implements the nonlinear intra-event sigma (Eq. 14)",
"idx",
"=",
"arias",
">=",
"0.125",
"sigma",
"[",
"idx",
"]",
"=",
"sigma2",
"[",
"idx",
"]",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"arias",
">",
"0.013",
",",
"arias",
"<",
"0.125",
")",
"sigma",
"[",
"idx",
"]",
"=",
"sigma1",
"[",
"idx",
"]",
"-",
"0.106",
"*",
"(",
"np",
".",
"log",
"(",
"arias",
"[",
"idx",
"]",
")",
"-",
"np",
".",
"log",
"(",
"0.0132",
")",
")",
"sigma_total",
"=",
"np",
".",
"sqrt",
"(",
"tau",
"**",
"2.",
"+",
"sigma",
"**",
"2.",
")",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"sigma_total",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"stddevs",
".",
"append",
"(",
"sigma",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"stddevs",
".",
"append",
"(",
"tau",
"*",
"np",
".",
"ones_like",
"(",
"sites",
".",
"vs30",
")",
")",
"return",
"stddevs"
]
| Return standard deviations as defined in table 1, p. 200. | [
"Return",
"standard",
"deviations",
"as",
"defined",
"in",
"table",
"1",
"p",
".",
"200",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L95-L128 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._get_intra_event_sigmas | def _get_intra_event_sigmas(self, sites):
"""
The intra-event term nonlinear and dependent on both the site class
and the expected ground motion. In this case the sigma coefficients
are determined from the site class as described below Eq. 14
"""
sigma1 = 1.18 * np.ones_like(sites.vs30)
sigma2 = 0.94 * np.ones_like(sites.vs30)
idx1 = np.logical_and(sites.vs30 >= 360.0, sites.vs30 < 760.0)
idx2 = sites.vs30 < 360.0
sigma1[idx1] = 1.17
sigma2[idx1] = 0.93
sigma1[idx2] = 0.96
sigma2[idx2] = 0.73
return sigma1, sigma2 | python | def _get_intra_event_sigmas(self, sites):
sigma1 = 1.18 * np.ones_like(sites.vs30)
sigma2 = 0.94 * np.ones_like(sites.vs30)
idx1 = np.logical_and(sites.vs30 >= 360.0, sites.vs30 < 760.0)
idx2 = sites.vs30 < 360.0
sigma1[idx1] = 1.17
sigma2[idx1] = 0.93
sigma1[idx2] = 0.96
sigma2[idx2] = 0.73
return sigma1, sigma2 | [
"def",
"_get_intra_event_sigmas",
"(",
"self",
",",
"sites",
")",
":",
"sigma1",
"=",
"1.18",
"*",
"np",
".",
"ones_like",
"(",
"sites",
".",
"vs30",
")",
"sigma2",
"=",
"0.94",
"*",
"np",
".",
"ones_like",
"(",
"sites",
".",
"vs30",
")",
"idx1",
"=",
"np",
".",
"logical_and",
"(",
"sites",
".",
"vs30",
">=",
"360.0",
",",
"sites",
".",
"vs30",
"<",
"760.0",
")",
"idx2",
"=",
"sites",
".",
"vs30",
"<",
"360.0",
"sigma1",
"[",
"idx1",
"]",
"=",
"1.17",
"sigma2",
"[",
"idx1",
"]",
"=",
"0.93",
"sigma1",
"[",
"idx2",
"]",
"=",
"0.96",
"sigma2",
"[",
"idx2",
"]",
"=",
"0.73",
"return",
"sigma1",
",",
"sigma2"
]
| The intra-event term nonlinear and dependent on both the site class
and the expected ground motion. In this case the sigma coefficients
are determined from the site class as described below Eq. 14 | [
"The",
"intra",
"-",
"event",
"term",
"nonlinear",
"and",
"dependent",
"on",
"both",
"the",
"site",
"class",
"and",
"the",
"expected",
"ground",
"motion",
".",
"In",
"this",
"case",
"the",
"sigma",
"coefficients",
"are",
"determined",
"from",
"the",
"site",
"class",
"as",
"described",
"below",
"Eq",
".",
"14"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L130-L145 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._compute_magnitude | def _compute_magnitude(self, rup, C):
"""
Compute the first term of the equation described on p. 1144:
``c1 + c2 * (M - 6) + c3 * log(M / 6)``
"""
return C['c1'] + C['c2'] * (rup.mag - 6.0) +\
(C['c3'] * np.log(rup.mag / 6.0)) | python | def _compute_magnitude(self, rup, C):
return C['c1'] + C['c2'] * (rup.mag - 6.0) +\
(C['c3'] * np.log(rup.mag / 6.0)) | [
"def",
"_compute_magnitude",
"(",
"self",
",",
"rup",
",",
"C",
")",
":",
"return",
"C",
"[",
"'c1'",
"]",
"+",
"C",
"[",
"'c2'",
"]",
"*",
"(",
"rup",
".",
"mag",
"-",
"6.0",
")",
"+",
"(",
"C",
"[",
"'c3'",
"]",
"*",
"np",
".",
"log",
"(",
"rup",
".",
"mag",
"/",
"6.0",
")",
")"
]
| Compute the first term of the equation described on p. 1144:
``c1 + c2 * (M - 6) + c3 * log(M / 6)`` | [
"Compute",
"the",
"first",
"term",
"of",
"the",
"equation",
"described",
"on",
"p",
".",
"1144",
":"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L147-L154 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._compute_distance | def _compute_distance(self, dists, C):
"""
Compute the second term of the equation described on p. 1144:
`` c4 * np.log(sqrt(R ** 2. + h ** 2.)
"""
return C["c4"] * np.log(np.sqrt(dists.rrup ** 2. + C["h"] ** 2.)) | python | def _compute_distance(self, dists, C):
return C["c4"] * np.log(np.sqrt(dists.rrup ** 2. + C["h"] ** 2.)) | [
"def",
"_compute_distance",
"(",
"self",
",",
"dists",
",",
"C",
")",
":",
"return",
"C",
"[",
"\"c4\"",
"]",
"*",
"np",
".",
"log",
"(",
"np",
".",
"sqrt",
"(",
"dists",
".",
"rrup",
"**",
"2.",
"+",
"C",
"[",
"\"h\"",
"]",
"**",
"2.",
")",
")"
]
| Compute the second term of the equation described on p. 1144:
`` c4 * np.log(sqrt(R ** 2. + h ** 2.) | [
"Compute",
"the",
"second",
"term",
"of",
"the",
"equation",
"described",
"on",
"p",
".",
"1144",
":"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L156-L162 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._get_site_amplification | def _get_site_amplification(self, sites, rup, C):
"""
Compute the third term of the equation described on p. 1144:
``(s11 + s12 * (M - 6)) * Sc + (s21 + s22 * (M - 6)) * Sd`
"""
Sc, Sd = self._get_site_type_dummy_variables(sites)
return (C["s11"] + C["s12"] * (rup.mag - 6.0)) * Sc +\
(C["s21"] + C["s22"] * (rup.mag - 6.0)) * Sd | python | def _get_site_amplification(self, sites, rup, C):
Sc, Sd = self._get_site_type_dummy_variables(sites)
return (C["s11"] + C["s12"] * (rup.mag - 6.0)) * Sc +\
(C["s21"] + C["s22"] * (rup.mag - 6.0)) * Sd | [
"def",
"_get_site_amplification",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"C",
")",
":",
"Sc",
",",
"Sd",
"=",
"self",
".",
"_get_site_type_dummy_variables",
"(",
"sites",
")",
"return",
"(",
"C",
"[",
"\"s11\"",
"]",
"+",
"C",
"[",
"\"s12\"",
"]",
"*",
"(",
"rup",
".",
"mag",
"-",
"6.0",
")",
")",
"*",
"Sc",
"+",
"(",
"C",
"[",
"\"s21\"",
"]",
"+",
"C",
"[",
"\"s22\"",
"]",
"*",
"(",
"rup",
".",
"mag",
"-",
"6.0",
")",
")",
"*",
"Sd"
]
| Compute the third term of the equation described on p. 1144:
``(s11 + s12 * (M - 6)) * Sc + (s21 + s22 * (M - 6)) * Sd` | [
"Compute",
"the",
"third",
"term",
"of",
"the",
"equation",
"described",
"on",
"p",
".",
"1144",
":"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L164-L172 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._get_site_type_dummy_variables | def _get_site_type_dummy_variables(self, sites):
"""
Get site type dummy variables, ``Sc`` (for soft and stiff soil sites)
and ``Sd`` (for rock sites).
"""
Sc = np.zeros_like(sites.vs30)
Sd = np.zeros_like(sites.vs30)
# Soft soil; Vs30 < 360 m/s. Page 199.
Sd[sites.vs30 < 360.0] = 1
# Stiff soil 360 <= Vs30 < 760
Sc[np.logical_and(sites.vs30 >= 360.0, sites.vs30 < 760.0)] = 1
return Sc, Sd | python | def _get_site_type_dummy_variables(self, sites):
Sc = np.zeros_like(sites.vs30)
Sd = np.zeros_like(sites.vs30)
Sd[sites.vs30 < 360.0] = 1
Sc[np.logical_and(sites.vs30 >= 360.0, sites.vs30 < 760.0)] = 1
return Sc, Sd | [
"def",
"_get_site_type_dummy_variables",
"(",
"self",
",",
"sites",
")",
":",
"Sc",
"=",
"np",
".",
"zeros_like",
"(",
"sites",
".",
"vs30",
")",
"Sd",
"=",
"np",
".",
"zeros_like",
"(",
"sites",
".",
"vs30",
")",
"# Soft soil; Vs30 < 360 m/s. Page 199.",
"Sd",
"[",
"sites",
".",
"vs30",
"<",
"360.0",
"]",
"=",
"1",
"# Stiff soil 360 <= Vs30 < 760",
"Sc",
"[",
"np",
".",
"logical_and",
"(",
"sites",
".",
"vs30",
">=",
"360.0",
",",
"sites",
".",
"vs30",
"<",
"760.0",
")",
"]",
"=",
"1",
"return",
"Sc",
",",
"Sd"
]
| Get site type dummy variables, ``Sc`` (for soft and stiff soil sites)
and ``Sd`` (for rock sites). | [
"Get",
"site",
"type",
"dummy",
"variables",
"Sc",
"(",
"for",
"soft",
"and",
"stiff",
"soil",
"sites",
")",
"and",
"Sd",
"(",
"for",
"rock",
"sites",
")",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L174-L186 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._get_mechanism | def _get_mechanism(self, rup, C):
"""
Compute the fourth term of the equation described on p. 199:
``f1 * Fn + f2 * Fr``
"""
Fn, Fr = self._get_fault_type_dummy_variables(rup)
return (C['f1'] * Fn) + (C['f2'] * Fr) | python | def _get_mechanism(self, rup, C):
Fn, Fr = self._get_fault_type_dummy_variables(rup)
return (C['f1'] * Fn) + (C['f2'] * Fr) | [
"def",
"_get_mechanism",
"(",
"self",
",",
"rup",
",",
"C",
")",
":",
"Fn",
",",
"Fr",
"=",
"self",
".",
"_get_fault_type_dummy_variables",
"(",
"rup",
")",
"return",
"(",
"C",
"[",
"'f1'",
"]",
"*",
"Fn",
")",
"+",
"(",
"C",
"[",
"'f2'",
"]",
"*",
"Fr",
")"
]
| Compute the fourth term of the equation described on p. 199:
``f1 * Fn + f2 * Fr`` | [
"Compute",
"the",
"fourth",
"term",
"of",
"the",
"equation",
"described",
"on",
"p",
".",
"199",
":"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L188-L195 |
gem/oq-engine | openquake/hazardlib/gsim/travasarou_2003.py | TravasarouEtAl2003._get_fault_type_dummy_variables | def _get_fault_type_dummy_variables(self, rup):
"""
The original classification considers four style of faulting categories
(normal, strike-slip, reverse-oblique and reverse).
"""
Fn, Fr = 0, 0
if rup.rake >= -112.5 and rup.rake <= -67.5:
# normal
Fn = 1
elif rup.rake >= 22.5 and rup.rake <= 157.5:
# Joins both the reverse and reverse-oblique categories
Fr = 1
return Fn, Fr | python | def _get_fault_type_dummy_variables(self, rup):
Fn, Fr = 0, 0
if rup.rake >= -112.5 and rup.rake <= -67.5:
Fn = 1
elif rup.rake >= 22.5 and rup.rake <= 157.5:
Fr = 1
return Fn, Fr | [
"def",
"_get_fault_type_dummy_variables",
"(",
"self",
",",
"rup",
")",
":",
"Fn",
",",
"Fr",
"=",
"0",
",",
"0",
"if",
"rup",
".",
"rake",
">=",
"-",
"112.5",
"and",
"rup",
".",
"rake",
"<=",
"-",
"67.5",
":",
"# normal",
"Fn",
"=",
"1",
"elif",
"rup",
".",
"rake",
">=",
"22.5",
"and",
"rup",
".",
"rake",
"<=",
"157.5",
":",
"# Joins both the reverse and reverse-oblique categories",
"Fr",
"=",
"1",
"return",
"Fn",
",",
"Fr"
]
| The original classification considers four style of faulting categories
(normal, strike-slip, reverse-oblique and reverse). | [
"The",
"original",
"classification",
"considers",
"four",
"style",
"of",
"faulting",
"categories",
"(",
"normal",
"strike",
"-",
"slip",
"reverse",
"-",
"oblique",
"and",
"reverse",
")",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L197-L210 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | california_basin_model | def california_basin_model(vs30):
"""
Returns the centred z1.0 (mu_z1) based on the California model
(equation 11)
"""
coeff = 570.94 ** 4.0
model = (-7.15 / 4.0) * np.log(
((vs30 ** 4.0) + coeff) / ((1360.0 ** 4.0) + coeff)
) - np.log(1000.)
return np.exp(model) | python | def california_basin_model(vs30):
coeff = 570.94 ** 4.0
model = (-7.15 / 4.0) * np.log(
((vs30 ** 4.0) + coeff) / ((1360.0 ** 4.0) + coeff)
) - np.log(1000.)
return np.exp(model) | [
"def",
"california_basin_model",
"(",
"vs30",
")",
":",
"coeff",
"=",
"570.94",
"**",
"4.0",
"model",
"=",
"(",
"-",
"7.15",
"/",
"4.0",
")",
"*",
"np",
".",
"log",
"(",
"(",
"(",
"vs30",
"**",
"4.0",
")",
"+",
"coeff",
")",
"/",
"(",
"(",
"1360.0",
"**",
"4.0",
")",
"+",
"coeff",
")",
")",
"-",
"np",
".",
"log",
"(",
"1000.",
")",
"return",
"np",
".",
"exp",
"(",
"model",
")"
]
| Returns the centred z1.0 (mu_z1) based on the California model
(equation 11) | [
"Returns",
"the",
"centred",
"z1",
".",
"0",
"(",
"mu_z1",
")",
"based",
"on",
"the",
"California",
"model",
"(",
"equation",
"11",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L614-L623 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | japan_basin_model | def japan_basin_model(vs30):
"""
Returns the centred z1.0 (mu_z1) based on the Japan model
(equation 12)
"""
coeff = 412.39 ** 2.0
model = (-5.23 / 2.0) * np.log(
((vs30 ** 2.0) + coeff) / ((1360.0 ** 2.0) + coeff)
) - np.log(1000.)
return np.exp(model) | python | def japan_basin_model(vs30):
coeff = 412.39 ** 2.0
model = (-5.23 / 2.0) * np.log(
((vs30 ** 2.0) + coeff) / ((1360.0 ** 2.0) + coeff)
) - np.log(1000.)
return np.exp(model) | [
"def",
"japan_basin_model",
"(",
"vs30",
")",
":",
"coeff",
"=",
"412.39",
"**",
"2.0",
"model",
"=",
"(",
"-",
"5.23",
"/",
"2.0",
")",
"*",
"np",
".",
"log",
"(",
"(",
"(",
"vs30",
"**",
"2.0",
")",
"+",
"coeff",
")",
"/",
"(",
"(",
"1360.0",
"**",
"2.0",
")",
"+",
"coeff",
")",
")",
"-",
"np",
".",
"log",
"(",
"1000.",
")",
"return",
"np",
".",
"exp",
"(",
"model",
")"
]
| Returns the centred z1.0 (mu_z1) based on the Japan model
(equation 12) | [
"Returns",
"the",
"centred",
"z1",
".",
"0",
"(",
"mu_z1",
")",
"based",
"on",
"the",
"Japan",
"model",
"(",
"equation",
"12",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L704-L713 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
imt_per = 0 if imt.name == 'PGV' else imt.period
pga_rock = self._get_pga_on_rock(C_PGA, rup, dists)
mean = (self._get_magnitude_scaling_term(C, rup) +
self._get_path_scaling(C, dists, rup.mag) +
self._get_site_scaling(C, pga_rock, sites, imt_per, dists.rjb))
stddevs = self._get_stddevs(C, rup, dists, sites, stddev_types)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
imt_per = 0 if imt.name == 'PGV' else imt.period
pga_rock = self._get_pga_on_rock(C_PGA, rup, dists)
mean = (self._get_magnitude_scaling_term(C, rup) +
self._get_path_scaling(C, dists, rup.mag) +
self._get_site_scaling(C, pga_rock, sites, imt_per, dists.rjb))
stddevs = self._get_stddevs(C, rup, dists, sites, stddev_types)
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# extracting dictionary of coefficients specific to required",
"# intensity measure type.",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"C_PGA",
"=",
"self",
".",
"COEFFS",
"[",
"PGA",
"(",
")",
"]",
"imt_per",
"=",
"0",
"if",
"imt",
".",
"name",
"==",
"'PGV'",
"else",
"imt",
".",
"period",
"pga_rock",
"=",
"self",
".",
"_get_pga_on_rock",
"(",
"C_PGA",
",",
"rup",
",",
"dists",
")",
"mean",
"=",
"(",
"self",
".",
"_get_magnitude_scaling_term",
"(",
"C",
",",
"rup",
")",
"+",
"self",
".",
"_get_path_scaling",
"(",
"C",
",",
"dists",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_get_site_scaling",
"(",
"C",
",",
"pga_rock",
",",
"sites",
",",
"imt_per",
",",
"dists",
".",
"rjb",
")",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"C",
",",
"rup",
",",
"dists",
",",
"sites",
",",
"stddev_types",
")",
"return",
"mean",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L85-L101 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_pga_on_rock | def _get_pga_on_rock(self, C, rup, dists):
"""
Returns the median PGA on rock, which is a sum of the
magnitude and distance scaling
"""
return np.exp(self._get_magnitude_scaling_term(C, rup) +
self._get_path_scaling(C, dists, rup.mag)) | python | def _get_pga_on_rock(self, C, rup, dists):
return np.exp(self._get_magnitude_scaling_term(C, rup) +
self._get_path_scaling(C, dists, rup.mag)) | [
"def",
"_get_pga_on_rock",
"(",
"self",
",",
"C",
",",
"rup",
",",
"dists",
")",
":",
"return",
"np",
".",
"exp",
"(",
"self",
".",
"_get_magnitude_scaling_term",
"(",
"C",
",",
"rup",
")",
"+",
"self",
".",
"_get_path_scaling",
"(",
"C",
",",
"dists",
",",
"rup",
".",
"mag",
")",
")"
]
| Returns the median PGA on rock, which is a sum of the
magnitude and distance scaling | [
"Returns",
"the",
"median",
"PGA",
"on",
"rock",
"which",
"is",
"a",
"sum",
"of",
"the",
"magnitude",
"and",
"distance",
"scaling"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L103-L109 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_magnitude_scaling_term | def _get_magnitude_scaling_term(self, C, rup):
"""
Returns the magnitude scling term defined in equation (2)
"""
dmag = rup.mag - C["Mh"]
if rup.mag <= C["Mh"]:
mag_term = (C["e4"] * dmag) + (C["e5"] * (dmag ** 2.0))
else:
mag_term = C["e6"] * dmag
return self._get_style_of_faulting_term(C, rup) + mag_term | python | def _get_magnitude_scaling_term(self, C, rup):
dmag = rup.mag - C["Mh"]
if rup.mag <= C["Mh"]:
mag_term = (C["e4"] * dmag) + (C["e5"] * (dmag ** 2.0))
else:
mag_term = C["e6"] * dmag
return self._get_style_of_faulting_term(C, rup) + mag_term | [
"def",
"_get_magnitude_scaling_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"dmag",
"=",
"rup",
".",
"mag",
"-",
"C",
"[",
"\"Mh\"",
"]",
"if",
"rup",
".",
"mag",
"<=",
"C",
"[",
"\"Mh\"",
"]",
":",
"mag_term",
"=",
"(",
"C",
"[",
"\"e4\"",
"]",
"*",
"dmag",
")",
"+",
"(",
"C",
"[",
"\"e5\"",
"]",
"*",
"(",
"dmag",
"**",
"2.0",
")",
")",
"else",
":",
"mag_term",
"=",
"C",
"[",
"\"e6\"",
"]",
"*",
"dmag",
"return",
"self",
".",
"_get_style_of_faulting_term",
"(",
"C",
",",
"rup",
")",
"+",
"mag_term"
]
| Returns the magnitude scling term defined in equation (2) | [
"Returns",
"the",
"magnitude",
"scling",
"term",
"defined",
"in",
"equation",
"(",
"2",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L111-L120 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_style_of_faulting_term | def _get_style_of_faulting_term(self, C, rup):
"""
Get fault type dummy variables
Fault type (Strike-slip, Normal, Thrust/reverse) is
derived from rake angle.
Rakes angles within 30 of horizontal are strike-slip,
angles from 30 to 150 are reverse, and angles from
-30 to -150 are normal.
Note that the 'Unspecified' case is not considered here as
rake is always given.
"""
if np.abs(rup.rake) <= 30.0 or (180.0 - np.abs(rup.rake)) <= 30.0:
# strike-slip
return C["e1"]
elif rup.rake > 30.0 and rup.rake < 150.0:
# reverse
return C["e3"]
else:
# normal
return C["e2"] | python | def _get_style_of_faulting_term(self, C, rup):
if np.abs(rup.rake) <= 30.0 or (180.0 - np.abs(rup.rake)) <= 30.0:
return C["e1"]
elif rup.rake > 30.0 and rup.rake < 150.0:
return C["e3"]
else:
return C["e2"] | [
"def",
"_get_style_of_faulting_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"if",
"np",
".",
"abs",
"(",
"rup",
".",
"rake",
")",
"<=",
"30.0",
"or",
"(",
"180.0",
"-",
"np",
".",
"abs",
"(",
"rup",
".",
"rake",
")",
")",
"<=",
"30.0",
":",
"# strike-slip",
"return",
"C",
"[",
"\"e1\"",
"]",
"elif",
"rup",
".",
"rake",
">",
"30.0",
"and",
"rup",
".",
"rake",
"<",
"150.0",
":",
"# reverse",
"return",
"C",
"[",
"\"e3\"",
"]",
"else",
":",
"# normal",
"return",
"C",
"[",
"\"e2\"",
"]"
]
| Get fault type dummy variables
Fault type (Strike-slip, Normal, Thrust/reverse) is
derived from rake angle.
Rakes angles within 30 of horizontal are strike-slip,
angles from 30 to 150 are reverse, and angles from
-30 to -150 are normal.
Note that the 'Unspecified' case is not considered here as
rake is always given. | [
"Get",
"fault",
"type",
"dummy",
"variables",
"Fault",
"type",
"(",
"Strike",
"-",
"slip",
"Normal",
"Thrust",
"/",
"reverse",
")",
"is",
"derived",
"from",
"rake",
"angle",
".",
"Rakes",
"angles",
"within",
"30",
"of",
"horizontal",
"are",
"strike",
"-",
"slip",
"angles",
"from",
"30",
"to",
"150",
"are",
"reverse",
"and",
"angles",
"from",
"-",
"30",
"to",
"-",
"150",
"are",
"normal",
".",
"Note",
"that",
"the",
"Unspecified",
"case",
"is",
"not",
"considered",
"here",
"as",
"rake",
"is",
"always",
"given",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L122-L141 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_path_scaling | def _get_path_scaling(self, C, dists, mag):
"""
Returns the path scaling term given by equation (3)
"""
rval = np.sqrt((dists.rjb ** 2.0) + (C["h"] ** 2.0))
scaling = (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\
np.log(rval / self.CONSTS["Rref"])
return scaling + ((C["c3"] + C["Dc3"]) * (rval - self.CONSTS["Rref"])) | python | def _get_path_scaling(self, C, dists, mag):
rval = np.sqrt((dists.rjb ** 2.0) + (C["h"] ** 2.0))
scaling = (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\
np.log(rval / self.CONSTS["Rref"])
return scaling + ((C["c3"] + C["Dc3"]) * (rval - self.CONSTS["Rref"])) | [
"def",
"_get_path_scaling",
"(",
"self",
",",
"C",
",",
"dists",
",",
"mag",
")",
":",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"(",
"dists",
".",
"rjb",
"**",
"2.0",
")",
"+",
"(",
"C",
"[",
"\"h\"",
"]",
"**",
"2.0",
")",
")",
"scaling",
"=",
"(",
"C",
"[",
"\"c1\"",
"]",
"+",
"C",
"[",
"\"c2\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mref\"",
"]",
")",
")",
"*",
"np",
".",
"log",
"(",
"rval",
"/",
"self",
".",
"CONSTS",
"[",
"\"Rref\"",
"]",
")",
"return",
"scaling",
"+",
"(",
"(",
"C",
"[",
"\"c3\"",
"]",
"+",
"C",
"[",
"\"Dc3\"",
"]",
")",
"*",
"(",
"rval",
"-",
"self",
".",
"CONSTS",
"[",
"\"Rref\"",
"]",
")",
")"
]
| Returns the path scaling term given by equation (3) | [
"Returns",
"the",
"path",
"scaling",
"term",
"given",
"by",
"equation",
"(",
"3",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L143-L150 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_site_scaling | def _get_site_scaling(self, C, pga_rock, sites, period, rjb):
"""
Returns the site-scaling term (equation 5), broken down into a
linear scaling, a nonlinear scaling and a basin scaling term
"""
flin = self._get_linear_site_term(C, sites.vs30)
fnl = self._get_nonlinear_site_term(C, sites.vs30, pga_rock)
fbd = self._get_basin_depth_term(C, sites, period)
return flin + fnl + fbd | python | def _get_site_scaling(self, C, pga_rock, sites, period, rjb):
flin = self._get_linear_site_term(C, sites.vs30)
fnl = self._get_nonlinear_site_term(C, sites.vs30, pga_rock)
fbd = self._get_basin_depth_term(C, sites, period)
return flin + fnl + fbd | [
"def",
"_get_site_scaling",
"(",
"self",
",",
"C",
",",
"pga_rock",
",",
"sites",
",",
"period",
",",
"rjb",
")",
":",
"flin",
"=",
"self",
".",
"_get_linear_site_term",
"(",
"C",
",",
"sites",
".",
"vs30",
")",
"fnl",
"=",
"self",
".",
"_get_nonlinear_site_term",
"(",
"C",
",",
"sites",
".",
"vs30",
",",
"pga_rock",
")",
"fbd",
"=",
"self",
".",
"_get_basin_depth_term",
"(",
"C",
",",
"sites",
",",
"period",
")",
"return",
"flin",
"+",
"fnl",
"+",
"fbd"
]
| Returns the site-scaling term (equation 5), broken down into a
linear scaling, a nonlinear scaling and a basin scaling term | [
"Returns",
"the",
"site",
"-",
"scaling",
"term",
"(",
"equation",
"5",
")",
"broken",
"down",
"into",
"a",
"linear",
"scaling",
"a",
"nonlinear",
"scaling",
"and",
"a",
"basin",
"scaling",
"term"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L152-L160 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_linear_site_term | def _get_linear_site_term(self, C, vs30):
"""
Returns the linear site scaling term (equation 6)
"""
flin = vs30 / self.CONSTS["Vref"]
flin[vs30 > C["Vc"]] = C["Vc"] / self.CONSTS["Vref"]
return C["c"] * np.log(flin) | python | def _get_linear_site_term(self, C, vs30):
flin = vs30 / self.CONSTS["Vref"]
flin[vs30 > C["Vc"]] = C["Vc"] / self.CONSTS["Vref"]
return C["c"] * np.log(flin) | [
"def",
"_get_linear_site_term",
"(",
"self",
",",
"C",
",",
"vs30",
")",
":",
"flin",
"=",
"vs30",
"/",
"self",
".",
"CONSTS",
"[",
"\"Vref\"",
"]",
"flin",
"[",
"vs30",
">",
"C",
"[",
"\"Vc\"",
"]",
"]",
"=",
"C",
"[",
"\"Vc\"",
"]",
"/",
"self",
".",
"CONSTS",
"[",
"\"Vref\"",
"]",
"return",
"C",
"[",
"\"c\"",
"]",
"*",
"np",
".",
"log",
"(",
"flin",
")"
]
| Returns the linear site scaling term (equation 6) | [
"Returns",
"the",
"linear",
"site",
"scaling",
"term",
"(",
"equation",
"6",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L162-L168 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_nonlinear_site_term | def _get_nonlinear_site_term(self, C, vs30, pga_rock):
"""
Returns the nonlinear site scaling term (equation 7)
"""
v_s = np.copy(vs30)
v_s[vs30 > 760.] = 760.
# Nonlinear controlling parameter (equation 8)
f_2 = C["f4"] * (np.exp(C["f5"] * (v_s - 360.)) -
np.exp(C["f5"] * 400.))
fnl = self.CONSTS["f1"] + f_2 * np.log((pga_rock + self.CONSTS["f3"]) /
self.CONSTS["f3"])
return fnl | python | def _get_nonlinear_site_term(self, C, vs30, pga_rock):
v_s = np.copy(vs30)
v_s[vs30 > 760.] = 760.
f_2 = C["f4"] * (np.exp(C["f5"] * (v_s - 360.)) -
np.exp(C["f5"] * 400.))
fnl = self.CONSTS["f1"] + f_2 * np.log((pga_rock + self.CONSTS["f3"]) /
self.CONSTS["f3"])
return fnl | [
"def",
"_get_nonlinear_site_term",
"(",
"self",
",",
"C",
",",
"vs30",
",",
"pga_rock",
")",
":",
"v_s",
"=",
"np",
".",
"copy",
"(",
"vs30",
")",
"v_s",
"[",
"vs30",
">",
"760.",
"]",
"=",
"760.",
"# Nonlinear controlling parameter (equation 8)",
"f_2",
"=",
"C",
"[",
"\"f4\"",
"]",
"*",
"(",
"np",
".",
"exp",
"(",
"C",
"[",
"\"f5\"",
"]",
"*",
"(",
"v_s",
"-",
"360.",
")",
")",
"-",
"np",
".",
"exp",
"(",
"C",
"[",
"\"f5\"",
"]",
"*",
"400.",
")",
")",
"fnl",
"=",
"self",
".",
"CONSTS",
"[",
"\"f1\"",
"]",
"+",
"f_2",
"*",
"np",
".",
"log",
"(",
"(",
"pga_rock",
"+",
"self",
".",
"CONSTS",
"[",
"\"f3\"",
"]",
")",
"/",
"self",
".",
"CONSTS",
"[",
"\"f3\"",
"]",
")",
"return",
"fnl"
]
| Returns the nonlinear site scaling term (equation 7) | [
"Returns",
"the",
"nonlinear",
"site",
"scaling",
"term",
"(",
"equation",
"7",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L170-L181 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_basin_depth_term | def _get_basin_depth_term(self, C, sites, period):
"""
In the case of the base model the basin depth term is switched off.
Therefore we return an array of zeros.
"""
return np.zeros(len(sites.vs30), dtype=float) | python | def _get_basin_depth_term(self, C, sites, period):
return np.zeros(len(sites.vs30), dtype=float) | [
"def",
"_get_basin_depth_term",
"(",
"self",
",",
"C",
",",
"sites",
",",
"period",
")",
":",
"return",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"vs30",
")",
",",
"dtype",
"=",
"float",
")"
]
| In the case of the base model the basin depth term is switched off.
Therefore we return an array of zeros. | [
"In",
"the",
"case",
"of",
"the",
"base",
"model",
"the",
"basin",
"depth",
"term",
"is",
"switched",
"off",
".",
"Therefore",
"we",
"return",
"an",
"array",
"of",
"zeros",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L183-L188 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_stddevs | def _get_stddevs(self, C, rup, dists, sites, stddev_types):
"""
Returns the aleatory uncertainty terms described in equations (13) to
(17)
"""
stddevs = []
num_sites = len(sites.vs30)
tau = self._get_inter_event_tau(C, rup.mag, num_sites)
phi = self._get_intra_event_phi(C,
rup.mag,
dists.rjb,
sites.vs30,
num_sites)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt((tau ** 2.0) + (phi ** 2.0)))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau)
return stddevs | python | def _get_stddevs(self, C, rup, dists, sites, stddev_types):
stddevs = []
num_sites = len(sites.vs30)
tau = self._get_inter_event_tau(C, rup.mag, num_sites)
phi = self._get_intra_event_phi(C,
rup.mag,
dists.rjb,
sites.vs30,
num_sites)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt((tau ** 2.0) + (phi ** 2.0)))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau)
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"rup",
",",
"dists",
",",
"sites",
",",
"stddev_types",
")",
":",
"stddevs",
"=",
"[",
"]",
"num_sites",
"=",
"len",
"(",
"sites",
".",
"vs30",
")",
"tau",
"=",
"self",
".",
"_get_inter_event_tau",
"(",
"C",
",",
"rup",
".",
"mag",
",",
"num_sites",
")",
"phi",
"=",
"self",
".",
"_get_intra_event_phi",
"(",
"C",
",",
"rup",
".",
"mag",
",",
"dists",
".",
"rjb",
",",
"sites",
".",
"vs30",
",",
"num_sites",
")",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"np",
".",
"sqrt",
"(",
"(",
"tau",
"**",
"2.0",
")",
"+",
"(",
"phi",
"**",
"2.0",
")",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"stddevs",
".",
"append",
"(",
"phi",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"stddevs",
".",
"append",
"(",
"tau",
")",
"return",
"stddevs"
]
| Returns the aleatory uncertainty terms described in equations (13) to
(17) | [
"Returns",
"the",
"aleatory",
"uncertainty",
"terms",
"described",
"in",
"equations",
"(",
"13",
")",
"to",
"(",
"17",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L190-L211 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_inter_event_tau | def _get_inter_event_tau(self, C, mag, num_sites):
"""
Returns the inter-event standard deviation (tau), which is dependent
on magnitude
"""
base_vals = np.zeros(num_sites)
if mag <= 4.5:
return base_vals + C["t1"]
elif mag >= 5.5:
return base_vals + C["t2"]
else:
return base_vals + C["t1"] + (C["t2"] - C["t1"]) * (mag - 4.5) | python | def _get_inter_event_tau(self, C, mag, num_sites):
base_vals = np.zeros(num_sites)
if mag <= 4.5:
return base_vals + C["t1"]
elif mag >= 5.5:
return base_vals + C["t2"]
else:
return base_vals + C["t1"] + (C["t2"] - C["t1"]) * (mag - 4.5) | [
"def",
"_get_inter_event_tau",
"(",
"self",
",",
"C",
",",
"mag",
",",
"num_sites",
")",
":",
"base_vals",
"=",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"if",
"mag",
"<=",
"4.5",
":",
"return",
"base_vals",
"+",
"C",
"[",
"\"t1\"",
"]",
"elif",
"mag",
">=",
"5.5",
":",
"return",
"base_vals",
"+",
"C",
"[",
"\"t2\"",
"]",
"else",
":",
"return",
"base_vals",
"+",
"C",
"[",
"\"t1\"",
"]",
"+",
"(",
"C",
"[",
"\"t2\"",
"]",
"-",
"C",
"[",
"\"t1\"",
"]",
")",
"*",
"(",
"mag",
"-",
"4.5",
")"
]
| Returns the inter-event standard deviation (tau), which is dependent
on magnitude | [
"Returns",
"the",
"inter",
"-",
"event",
"standard",
"deviation",
"(",
"tau",
")",
"which",
"is",
"dependent",
"on",
"magnitude"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L213-L224 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014._get_intra_event_phi | def _get_intra_event_phi(self, C, mag, rjb, vs30, num_sites):
"""
Returns the intra-event standard deviation (phi), dependent on
magnitude, distance and vs30
"""
base_vals = np.zeros(num_sites)
# Magnitude Dependent phi (Equation 17)
if mag <= 4.5:
base_vals += C["f1"]
elif mag >= 5.5:
base_vals += C["f2"]
else:
base_vals += (C["f1"] + (C["f2"] - C["f1"]) * (mag - 4.5))
# Distance dependent phi (Equation 16)
idx1 = rjb > C["R2"]
base_vals[idx1] += C["DfR"]
idx2 = np.logical_and(rjb > C["R1"], rjb <= C["R2"])
base_vals[idx2] += (C["DfR"] * (np.log(rjb[idx2] / C["R1"]) /
np.log(C["R2"] / C["R1"])))
# Site-dependent phi (Equation 15)
idx1 = vs30 <= self.CONSTS["v1"]
base_vals[idx1] -= C["DfV"]
idx2 = np.logical_and(vs30 >= self.CONSTS["v1"],
vs30 <= self.CONSTS["v2"])
base_vals[idx2] -= (
C["DfV"] * (np.log(self.CONSTS["v2"] / vs30[idx2]) /
np.log(self.CONSTS["v2"] / self.CONSTS["v1"])))
return base_vals | python | def _get_intra_event_phi(self, C, mag, rjb, vs30, num_sites):
base_vals = np.zeros(num_sites)
if mag <= 4.5:
base_vals += C["f1"]
elif mag >= 5.5:
base_vals += C["f2"]
else:
base_vals += (C["f1"] + (C["f2"] - C["f1"]) * (mag - 4.5))
idx1 = rjb > C["R2"]
base_vals[idx1] += C["DfR"]
idx2 = np.logical_and(rjb > C["R1"], rjb <= C["R2"])
base_vals[idx2] += (C["DfR"] * (np.log(rjb[idx2] / C["R1"]) /
np.log(C["R2"] / C["R1"])))
idx1 = vs30 <= self.CONSTS["v1"]
base_vals[idx1] -= C["DfV"]
idx2 = np.logical_and(vs30 >= self.CONSTS["v1"],
vs30 <= self.CONSTS["v2"])
base_vals[idx2] -= (
C["DfV"] * (np.log(self.CONSTS["v2"] / vs30[idx2]) /
np.log(self.CONSTS["v2"] / self.CONSTS["v1"])))
return base_vals | [
"def",
"_get_intra_event_phi",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
",",
"vs30",
",",
"num_sites",
")",
":",
"base_vals",
"=",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"# Magnitude Dependent phi (Equation 17)",
"if",
"mag",
"<=",
"4.5",
":",
"base_vals",
"+=",
"C",
"[",
"\"f1\"",
"]",
"elif",
"mag",
">=",
"5.5",
":",
"base_vals",
"+=",
"C",
"[",
"\"f2\"",
"]",
"else",
":",
"base_vals",
"+=",
"(",
"C",
"[",
"\"f1\"",
"]",
"+",
"(",
"C",
"[",
"\"f2\"",
"]",
"-",
"C",
"[",
"\"f1\"",
"]",
")",
"*",
"(",
"mag",
"-",
"4.5",
")",
")",
"# Distance dependent phi (Equation 16)",
"idx1",
"=",
"rjb",
">",
"C",
"[",
"\"R2\"",
"]",
"base_vals",
"[",
"idx1",
"]",
"+=",
"C",
"[",
"\"DfR\"",
"]",
"idx2",
"=",
"np",
".",
"logical_and",
"(",
"rjb",
">",
"C",
"[",
"\"R1\"",
"]",
",",
"rjb",
"<=",
"C",
"[",
"\"R2\"",
"]",
")",
"base_vals",
"[",
"idx2",
"]",
"+=",
"(",
"C",
"[",
"\"DfR\"",
"]",
"*",
"(",
"np",
".",
"log",
"(",
"rjb",
"[",
"idx2",
"]",
"/",
"C",
"[",
"\"R1\"",
"]",
")",
"/",
"np",
".",
"log",
"(",
"C",
"[",
"\"R2\"",
"]",
"/",
"C",
"[",
"\"R1\"",
"]",
")",
")",
")",
"# Site-dependent phi (Equation 15)",
"idx1",
"=",
"vs30",
"<=",
"self",
".",
"CONSTS",
"[",
"\"v1\"",
"]",
"base_vals",
"[",
"idx1",
"]",
"-=",
"C",
"[",
"\"DfV\"",
"]",
"idx2",
"=",
"np",
".",
"logical_and",
"(",
"vs30",
">=",
"self",
".",
"CONSTS",
"[",
"\"v1\"",
"]",
",",
"vs30",
"<=",
"self",
".",
"CONSTS",
"[",
"\"v2\"",
"]",
")",
"base_vals",
"[",
"idx2",
"]",
"-=",
"(",
"C",
"[",
"\"DfV\"",
"]",
"*",
"(",
"np",
".",
"log",
"(",
"self",
".",
"CONSTS",
"[",
"\"v2\"",
"]",
"/",
"vs30",
"[",
"idx2",
"]",
")",
"/",
"np",
".",
"log",
"(",
"self",
".",
"CONSTS",
"[",
"\"v2\"",
"]",
"/",
"self",
".",
"CONSTS",
"[",
"\"v1\"",
"]",
")",
")",
")",
"return",
"base_vals"
]
| Returns the intra-event standard deviation (phi), dependent on
magnitude, distance and vs30 | [
"Returns",
"the",
"intra",
"-",
"event",
"standard",
"deviation",
"(",
"phi",
")",
"dependent",
"on",
"magnitude",
"distance",
"and",
"vs30"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L226-L253 |
gem/oq-engine | openquake/hazardlib/gsim/boore_2014.py | BooreEtAl2014CaliforniaBasin._get_basin_depth_term | def _get_basin_depth_term(self, C, sites, period):
"""
In the case of the base model the basin depth term is switched off.
Therefore we return an array of zeros.
"""
if period < 0.65:
f_dz1 = np.zeros(len(sites.vs30), dtype=float)
else:
f_dz1 = C["f7"] + np.zeros(len(sites.vs30), dtype=float)
f_ratio = C["f7"] / C["f6"]
dz1 = (sites.z1pt0 / 1000.0) - california_basin_model(sites.vs30)
idx = dz1 <= f_ratio
f_dz1[idx] = C["f6"] * dz1[idx]
return f_dz1 | python | def _get_basin_depth_term(self, C, sites, period):
if period < 0.65:
f_dz1 = np.zeros(len(sites.vs30), dtype=float)
else:
f_dz1 = C["f7"] + np.zeros(len(sites.vs30), dtype=float)
f_ratio = C["f7"] / C["f6"]
dz1 = (sites.z1pt0 / 1000.0) - california_basin_model(sites.vs30)
idx = dz1 <= f_ratio
f_dz1[idx] = C["f6"] * dz1[idx]
return f_dz1 | [
"def",
"_get_basin_depth_term",
"(",
"self",
",",
"C",
",",
"sites",
",",
"period",
")",
":",
"if",
"period",
"<",
"0.65",
":",
"f_dz1",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"vs30",
")",
",",
"dtype",
"=",
"float",
")",
"else",
":",
"f_dz1",
"=",
"C",
"[",
"\"f7\"",
"]",
"+",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"vs30",
")",
",",
"dtype",
"=",
"float",
")",
"f_ratio",
"=",
"C",
"[",
"\"f7\"",
"]",
"/",
"C",
"[",
"\"f6\"",
"]",
"dz1",
"=",
"(",
"sites",
".",
"z1pt0",
"/",
"1000.0",
")",
"-",
"california_basin_model",
"(",
"sites",
".",
"vs30",
")",
"idx",
"=",
"dz1",
"<=",
"f_ratio",
"f_dz1",
"[",
"idx",
"]",
"=",
"C",
"[",
"\"f6\"",
"]",
"*",
"dz1",
"[",
"idx",
"]",
"return",
"f_dz1"
]
| In the case of the base model the basin depth term is switched off.
Therefore we return an array of zeros. | [
"In",
"the",
"case",
"of",
"the",
"base",
"model",
"the",
"basin",
"depth",
"term",
"is",
"switched",
"off",
".",
"Therefore",
"we",
"return",
"an",
"array",
"of",
"zeros",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L636-L649 |
gem/oq-engine | openquake/hazardlib/gsim/pankow_pechmann_2004.py | PankowPechmann2004.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
C = self.COEFFS[imt]
M = rup.mag - 6
R = np.sqrt(dists.rjb ** 2 + C['h'] ** 2)
# In the original formulation of the GMPE, distinction is only made
# between rock and soil sites, which I assumed separated by the Vs30
# value of 910m/s (see equation 5 of the paper)
gamma = np.array([0 if v > 910. else 1 for v in sites.vs30])
mean = np.zeros_like(R)
mean += C['b1'] + \
C['b2'] * M + \
C['b3'] * M ** 2 + \
C['b5'] * np.log10(R) + \
C['b6'] * gamma
# Convert from base 10 to base e
mean /= np.log10(np.e)
# Converting PSV to PSA
if imt != PGA() and imt != PGV():
omega = 2.*np.pi/imt.period
mean += np.log(omega/(gravity*100))
# Computing standard deviation
stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape[0])
# Convert from base 10 to base e
stddevs = [sd/np.log10(np.e) for sd in stddevs]
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
M = rup.mag - 6
R = np.sqrt(dists.rjb ** 2 + C['h'] ** 2)
gamma = np.array([0 if v > 910. else 1 for v in sites.vs30])
mean = np.zeros_like(R)
mean += C['b1'] + \
C['b2'] * M + \
C['b3'] * M ** 2 + \
C['b5'] * np.log10(R) + \
C['b6'] * gamma
mean /= np.log10(np.e)
if imt != PGA() and imt != PGV():
omega = 2.*np.pi/imt.period
mean += np.log(omega/(gravity*100))
stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape[0])
stddevs = [sd/np.log10(np.e) for sd in stddevs]
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"M",
"=",
"rup",
".",
"mag",
"-",
"6",
"R",
"=",
"np",
".",
"sqrt",
"(",
"dists",
".",
"rjb",
"**",
"2",
"+",
"C",
"[",
"'h'",
"]",
"**",
"2",
")",
"# In the original formulation of the GMPE, distinction is only made",
"# between rock and soil sites, which I assumed separated by the Vs30",
"# value of 910m/s (see equation 5 of the paper)",
"gamma",
"=",
"np",
".",
"array",
"(",
"[",
"0",
"if",
"v",
">",
"910.",
"else",
"1",
"for",
"v",
"in",
"sites",
".",
"vs30",
"]",
")",
"mean",
"=",
"np",
".",
"zeros_like",
"(",
"R",
")",
"mean",
"+=",
"C",
"[",
"'b1'",
"]",
"+",
"C",
"[",
"'b2'",
"]",
"*",
"M",
"+",
"C",
"[",
"'b3'",
"]",
"*",
"M",
"**",
"2",
"+",
"C",
"[",
"'b5'",
"]",
"*",
"np",
".",
"log10",
"(",
"R",
")",
"+",
"C",
"[",
"'b6'",
"]",
"*",
"gamma",
"# Convert from base 10 to base e",
"mean",
"/=",
"np",
".",
"log10",
"(",
"np",
".",
"e",
")",
"# Converting PSV to PSA",
"if",
"imt",
"!=",
"PGA",
"(",
")",
"and",
"imt",
"!=",
"PGV",
"(",
")",
":",
"omega",
"=",
"2.",
"*",
"np",
".",
"pi",
"/",
"imt",
".",
"period",
"mean",
"+=",
"np",
".",
"log",
"(",
"omega",
"/",
"(",
"gravity",
"*",
"100",
")",
")",
"# Computing standard deviation",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"C",
",",
"stddev_types",
",",
"dists",
".",
"rjb",
".",
"shape",
"[",
"0",
"]",
")",
"# Convert from base 10 to base e",
"stddevs",
"=",
"[",
"sd",
"/",
"np",
".",
"log10",
"(",
"np",
".",
"e",
")",
"for",
"sd",
"in",
"stddevs",
"]",
"return",
"mean",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/pankow_pechmann_2004.py#L76-L114 |
gem/oq-engine | openquake/hazardlib/gsim/pankow_pechmann_2004.py | PankowPechmann2004._get_stddevs | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return total standard deviation.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
if self.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT == 'Random horizontal':
# Using equation 8 of the paper,
# corrected as indicated in the erratum
Sr = np.sqrt(C['SlZ']**2 + (C['S3']/np.sqrt(2))**2)
else:
Sr = C['SlZ']
stddevs = [np.zeros(num_sites) + Sr for _ in stddev_types]
return stddevs | python | def _get_stddevs(self, C, stddev_types, num_sites):
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
if self.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT == 'Random horizontal':
Sr = np.sqrt(C['SlZ']**2 + (C['S3']/np.sqrt(2))**2)
else:
Sr = C['SlZ']
stddevs = [np.zeros(num_sites) + Sr for _ in stddev_types]
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"if",
"self",
".",
"DEFINED_FOR_INTENSITY_MEASURE_COMPONENT",
"==",
"'Random horizontal'",
":",
"# Using equation 8 of the paper,",
"# corrected as indicated in the erratum",
"Sr",
"=",
"np",
".",
"sqrt",
"(",
"C",
"[",
"'SlZ'",
"]",
"**",
"2",
"+",
"(",
"C",
"[",
"'S3'",
"]",
"/",
"np",
".",
"sqrt",
"(",
"2",
")",
")",
"**",
"2",
")",
"else",
":",
"Sr",
"=",
"C",
"[",
"'SlZ'",
"]",
"stddevs",
"=",
"[",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"+",
"Sr",
"for",
"_",
"in",
"stddev_types",
"]",
"return",
"stddevs"
]
| Return total standard deviation. | [
"Return",
"total",
"standard",
"deviation",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/pankow_pechmann_2004.py#L116-L131 |
gem/oq-engine | openquake/hazardlib/mfd/multi_mfd.py | MultiMFD.get_min_max_mag | def get_min_max_mag(self):
"""
:returns: minumum and maximum magnitudes from the underlying MFDs
"""
m1s, m2s = [], []
for mfd in self:
m1, m2 = mfd.get_min_max_mag()
m1s.append(m1)
m2s.append(m2)
return min(m1s), max(m2s) | python | def get_min_max_mag(self):
m1s, m2s = [], []
for mfd in self:
m1, m2 = mfd.get_min_max_mag()
m1s.append(m1)
m2s.append(m2)
return min(m1s), max(m2s) | [
"def",
"get_min_max_mag",
"(",
"self",
")",
":",
"m1s",
",",
"m2s",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"mfd",
"in",
"self",
":",
"m1",
",",
"m2",
"=",
"mfd",
".",
"get_min_max_mag",
"(",
")",
"m1s",
".",
"append",
"(",
"m1",
")",
"m2s",
".",
"append",
"(",
"m2",
")",
"return",
"min",
"(",
"m1s",
")",
",",
"max",
"(",
"m2s",
")"
]
| :returns: minumum and maximum magnitudes from the underlying MFDs | [
":",
"returns",
":",
"minumum",
"and",
"maximum",
"magnitudes",
"from",
"the",
"underlying",
"MFDs"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/mfd/multi_mfd.py#L121-L130 |
gem/oq-engine | openquake/hazardlib/mfd/multi_mfd.py | MultiMFD.modify | def modify(self, modification, parameters):
"""
Apply a modification to the underlying point sources, with the
same parameters for all sources
"""
for src in self:
src.modify(modification, parameters) | python | def modify(self, modification, parameters):
for src in self:
src.modify(modification, parameters) | [
"def",
"modify",
"(",
"self",
",",
"modification",
",",
"parameters",
")",
":",
"for",
"src",
"in",
"self",
":",
"src",
".",
"modify",
"(",
"modification",
",",
"parameters",
")"
]
| Apply a modification to the underlying point sources, with the
same parameters for all sources | [
"Apply",
"a",
"modification",
"to",
"the",
"underlying",
"point",
"sources",
"with",
"the",
"same",
"parameters",
"for",
"all",
"sources"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/mfd/multi_mfd.py#L143-L149 |
gem/oq-engine | openquake/hazardlib/gsim/geomatrix_1993.py | Geomatrix1993SSlabNSHMP2008._compute_mean | def _compute_mean(self, C, mag, ztor, rrup):
"""
Compute mean value as in ``subroutine getGeom`` in ``hazgridXnga2.f``
"""
gc0 = 0.2418
ci = 0.3846
gch = 0.00607
g4 = 1.7818
ge = 0.554
gm = 1.414
mean = (
gc0 + ci + ztor * gch + C['gc1'] +
gm * mag + C['gc2'] * (10 - mag) ** 3 +
C['gc3'] * np.log(rrup + g4 * np.exp(ge * mag))
)
return mean | python | def _compute_mean(self, C, mag, ztor, rrup):
gc0 = 0.2418
ci = 0.3846
gch = 0.00607
g4 = 1.7818
ge = 0.554
gm = 1.414
mean = (
gc0 + ci + ztor * gch + C['gc1'] +
gm * mag + C['gc2'] * (10 - mag) ** 3 +
C['gc3'] * np.log(rrup + g4 * np.exp(ge * mag))
)
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"ztor",
",",
"rrup",
")",
":",
"gc0",
"=",
"0.2418",
"ci",
"=",
"0.3846",
"gch",
"=",
"0.00607",
"g4",
"=",
"1.7818",
"ge",
"=",
"0.554",
"gm",
"=",
"1.414",
"mean",
"=",
"(",
"gc0",
"+",
"ci",
"+",
"ztor",
"*",
"gch",
"+",
"C",
"[",
"'gc1'",
"]",
"+",
"gm",
"*",
"mag",
"+",
"C",
"[",
"'gc2'",
"]",
"*",
"(",
"10",
"-",
"mag",
")",
"**",
"3",
"+",
"C",
"[",
"'gc3'",
"]",
"*",
"np",
".",
"log",
"(",
"rrup",
"+",
"g4",
"*",
"np",
".",
"exp",
"(",
"ge",
"*",
"mag",
")",
")",
")",
"return",
"mean"
]
| Compute mean value as in ``subroutine getGeom`` in ``hazgridXnga2.f`` | [
"Compute",
"mean",
"value",
"as",
"in",
"subroutine",
"getGeom",
"in",
"hazgridXnga2",
".",
"f"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/geomatrix_1993.py#L92-L109 |
gem/oq-engine | openquake/commands/abort.py | abort | def abort(job_id):
"""
Abort the given job
"""
job = logs.dbcmd('get_job', job_id) # job_id can be negative
if job is None:
print('There is no job %d' % job_id)
return
elif job.status not in ('executing', 'running'):
print('Job %d is %s' % (job.id, job.status))
return
name = 'oq-job-%d' % job.id
for p in psutil.process_iter():
if p.name() == name:
try:
os.kill(p.pid, signal.SIGTERM)
logs.dbcmd('set_status', job.id, 'aborted')
print('Job %d aborted' % job.id)
except Exception as exc:
print(exc)
break
else: # no break
# set job as failed if it is set as 'executing' or 'running' in the db
# but the corresponding process is not running anymore
logs.dbcmd('set_status', job.id, 'failed')
print('Unable to find a process for job %d,'
' setting it as failed' % job.id) | python | def abort(job_id):
job = logs.dbcmd('get_job', job_id)
if job is None:
print('There is no job %d' % job_id)
return
elif job.status not in ('executing', 'running'):
print('Job %d is %s' % (job.id, job.status))
return
name = 'oq-job-%d' % job.id
for p in psutil.process_iter():
if p.name() == name:
try:
os.kill(p.pid, signal.SIGTERM)
logs.dbcmd('set_status', job.id, 'aborted')
print('Job %d aborted' % job.id)
except Exception as exc:
print(exc)
break
else:
logs.dbcmd('set_status', job.id, 'failed')
print('Unable to find a process for job %d,'
' setting it as failed' % job.id) | [
"def",
"abort",
"(",
"job_id",
")",
":",
"job",
"=",
"logs",
".",
"dbcmd",
"(",
"'get_job'",
",",
"job_id",
")",
"# job_id can be negative",
"if",
"job",
"is",
"None",
":",
"print",
"(",
"'There is no job %d'",
"%",
"job_id",
")",
"return",
"elif",
"job",
".",
"status",
"not",
"in",
"(",
"'executing'",
",",
"'running'",
")",
":",
"print",
"(",
"'Job %d is %s'",
"%",
"(",
"job",
".",
"id",
",",
"job",
".",
"status",
")",
")",
"return",
"name",
"=",
"'oq-job-%d'",
"%",
"job",
".",
"id",
"for",
"p",
"in",
"psutil",
".",
"process_iter",
"(",
")",
":",
"if",
"p",
".",
"name",
"(",
")",
"==",
"name",
":",
"try",
":",
"os",
".",
"kill",
"(",
"p",
".",
"pid",
",",
"signal",
".",
"SIGTERM",
")",
"logs",
".",
"dbcmd",
"(",
"'set_status'",
",",
"job",
".",
"id",
",",
"'aborted'",
")",
"print",
"(",
"'Job %d aborted'",
"%",
"job",
".",
"id",
")",
"except",
"Exception",
"as",
"exc",
":",
"print",
"(",
"exc",
")",
"break",
"else",
":",
"# no break",
"# set job as failed if it is set as 'executing' or 'running' in the db",
"# but the corresponding process is not running anymore",
"logs",
".",
"dbcmd",
"(",
"'set_status'",
",",
"job",
".",
"id",
",",
"'failed'",
")",
"print",
"(",
"'Unable to find a process for job %d,'",
"' setting it as failed'",
"%",
"job",
".",
"id",
")"
]
| Abort the given job | [
"Abort",
"the",
"given",
"job"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/abort.py#L27-L53 |
gem/oq-engine | openquake/baselib/sap.py | get_parentparser | def get_parentparser(parser, description=None, help=True):
"""
:param parser: :class:`argparse.ArgumentParser` instance or None
:param description: string used to build a new parser if parser is None
:param help: flag used to build a new parser if parser is None
:returns: if parser is None the new parser; otherwise the `.parentparser`
attribute (if set) or the parser itself (if not set)
"""
if parser is None:
return argparse.ArgumentParser(
description=description, add_help=help)
elif hasattr(parser, 'parentparser'):
return parser.parentparser
else:
return parser | python | def get_parentparser(parser, description=None, help=True):
if parser is None:
return argparse.ArgumentParser(
description=description, add_help=help)
elif hasattr(parser, 'parentparser'):
return parser.parentparser
else:
return parser | [
"def",
"get_parentparser",
"(",
"parser",
",",
"description",
"=",
"None",
",",
"help",
"=",
"True",
")",
":",
"if",
"parser",
"is",
"None",
":",
"return",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"description",
",",
"add_help",
"=",
"help",
")",
"elif",
"hasattr",
"(",
"parser",
",",
"'parentparser'",
")",
":",
"return",
"parser",
".",
"parentparser",
"else",
":",
"return",
"parser"
]
| :param parser: :class:`argparse.ArgumentParser` instance or None
:param description: string used to build a new parser if parser is None
:param help: flag used to build a new parser if parser is None
:returns: if parser is None the new parser; otherwise the `.parentparser`
attribute (if set) or the parser itself (if not set) | [
":",
"param",
"parser",
":",
":",
"class",
":",
"argparse",
".",
"ArgumentParser",
"instance",
"or",
"None",
":",
"param",
"description",
":",
"string",
"used",
"to",
"build",
"a",
"new",
"parser",
"if",
"parser",
"is",
"None",
":",
"param",
"help",
":",
"flag",
"used",
"to",
"build",
"a",
"new",
"parser",
"if",
"parser",
"is",
"None",
":",
"returns",
":",
"if",
"parser",
"is",
"None",
"the",
"new",
"parser",
";",
"otherwise",
"the",
".",
"parentparser",
"attribute",
"(",
"if",
"set",
")",
"or",
"the",
"parser",
"itself",
"(",
"if",
"not",
"set",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L60-L74 |
gem/oq-engine | openquake/baselib/sap.py | compose | def compose(scripts, name='main', description=None, prog=None,
version=None):
"""
Collects together different scripts and builds a single
script dispatching to the subparsers depending on
the first argument, i.e. the name of the subparser to invoke.
:param scripts: a list of script instances
:param name: the name of the composed parser
:param description: description of the composed parser
:param prog: name of the script printed in the usage message
:param version: version of the script printed with --version
"""
assert len(scripts) >= 1, scripts
parentparser = argparse.ArgumentParser(
description=description, add_help=False)
parentparser.add_argument(
'--version', '-v', action='version', version=version)
subparsers = parentparser.add_subparsers(
help='available subcommands; use %s help <subcmd>' % prog,
prog=prog)
def gethelp(cmd=None):
if cmd is None:
print(parentparser.format_help())
return
subp = subparsers._name_parser_map.get(cmd)
if subp is None:
print('No help for unknown command %r' % cmd)
else:
print(subp.format_help())
help_script = Script(gethelp, 'help', help=False)
progname = '%s ' % prog if prog else ''
help_script.arg('cmd', progname + 'subcommand')
for s in list(scripts) + [help_script]:
subp = subparsers.add_parser(s.name, description=s.description)
for args, kw in s.all_arguments:
subp.add_argument(*args, **kw)
subp.set_defaults(_func=s.func)
def main(**kw):
try:
func = kw.pop('_func')
except KeyError:
parentparser.print_usage()
else:
return func(**kw)
main.__name__ = name
return Script(main, name, parentparser) | python | def compose(scripts, name='main', description=None, prog=None,
version=None):
assert len(scripts) >= 1, scripts
parentparser = argparse.ArgumentParser(
description=description, add_help=False)
parentparser.add_argument(
'--version', '-v', action='version', version=version)
subparsers = parentparser.add_subparsers(
help='available subcommands; use %s help <subcmd>' % prog,
prog=prog)
def gethelp(cmd=None):
if cmd is None:
print(parentparser.format_help())
return
subp = subparsers._name_parser_map.get(cmd)
if subp is None:
print('No help for unknown command %r' % cmd)
else:
print(subp.format_help())
help_script = Script(gethelp, 'help', help=False)
progname = '%s ' % prog if prog else ''
help_script.arg('cmd', progname + 'subcommand')
for s in list(scripts) + [help_script]:
subp = subparsers.add_parser(s.name, description=s.description)
for args, kw in s.all_arguments:
subp.add_argument(*args, **kw)
subp.set_defaults(_func=s.func)
def main(**kw):
try:
func = kw.pop('_func')
except KeyError:
parentparser.print_usage()
else:
return func(**kw)
main.__name__ = name
return Script(main, name, parentparser) | [
"def",
"compose",
"(",
"scripts",
",",
"name",
"=",
"'main'",
",",
"description",
"=",
"None",
",",
"prog",
"=",
"None",
",",
"version",
"=",
"None",
")",
":",
"assert",
"len",
"(",
"scripts",
")",
">=",
"1",
",",
"scripts",
"parentparser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"description",
",",
"add_help",
"=",
"False",
")",
"parentparser",
".",
"add_argument",
"(",
"'--version'",
",",
"'-v'",
",",
"action",
"=",
"'version'",
",",
"version",
"=",
"version",
")",
"subparsers",
"=",
"parentparser",
".",
"add_subparsers",
"(",
"help",
"=",
"'available subcommands; use %s help <subcmd>'",
"%",
"prog",
",",
"prog",
"=",
"prog",
")",
"def",
"gethelp",
"(",
"cmd",
"=",
"None",
")",
":",
"if",
"cmd",
"is",
"None",
":",
"print",
"(",
"parentparser",
".",
"format_help",
"(",
")",
")",
"return",
"subp",
"=",
"subparsers",
".",
"_name_parser_map",
".",
"get",
"(",
"cmd",
")",
"if",
"subp",
"is",
"None",
":",
"print",
"(",
"'No help for unknown command %r'",
"%",
"cmd",
")",
"else",
":",
"print",
"(",
"subp",
".",
"format_help",
"(",
")",
")",
"help_script",
"=",
"Script",
"(",
"gethelp",
",",
"'help'",
",",
"help",
"=",
"False",
")",
"progname",
"=",
"'%s '",
"%",
"prog",
"if",
"prog",
"else",
"''",
"help_script",
".",
"arg",
"(",
"'cmd'",
",",
"progname",
"+",
"'subcommand'",
")",
"for",
"s",
"in",
"list",
"(",
"scripts",
")",
"+",
"[",
"help_script",
"]",
":",
"subp",
"=",
"subparsers",
".",
"add_parser",
"(",
"s",
".",
"name",
",",
"description",
"=",
"s",
".",
"description",
")",
"for",
"args",
",",
"kw",
"in",
"s",
".",
"all_arguments",
":",
"subp",
".",
"add_argument",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
"subp",
".",
"set_defaults",
"(",
"_func",
"=",
"s",
".",
"func",
")",
"def",
"main",
"(",
"*",
"*",
"kw",
")",
":",
"try",
":",
"func",
"=",
"kw",
".",
"pop",
"(",
"'_func'",
")",
"except",
"KeyError",
":",
"parentparser",
".",
"print_usage",
"(",
")",
"else",
":",
"return",
"func",
"(",
"*",
"*",
"kw",
")",
"main",
".",
"__name__",
"=",
"name",
"return",
"Script",
"(",
"main",
",",
"name",
",",
"parentparser",
")"
]
| Collects together different scripts and builds a single
script dispatching to the subparsers depending on
the first argument, i.e. the name of the subparser to invoke.
:param scripts: a list of script instances
:param name: the name of the composed parser
:param description: description of the composed parser
:param prog: name of the script printed in the usage message
:param version: version of the script printed with --version | [
"Collects",
"together",
"different",
"scripts",
"and",
"builds",
"a",
"single",
"script",
"dispatching",
"to",
"the",
"subparsers",
"depending",
"on",
"the",
"first",
"argument",
"i",
".",
"e",
".",
"the",
"name",
"of",
"the",
"subparser",
"to",
"invoke",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L205-L253 |
gem/oq-engine | openquake/baselib/sap.py | Script._add | def _add(self, name, *args, **kw):
"""
Add an argument to the underlying parser and grow the list
.all_arguments and the set .names
"""
argname = list(self.argdict)[self._argno]
if argname != name:
raise NameError(
'Setting argument %s, but it should be %s' % (name, argname))
self._group.add_argument(*args, **kw)
self.all_arguments.append((args, kw))
self.names.append(name)
self._argno += 1 | python | def _add(self, name, *args, **kw):
argname = list(self.argdict)[self._argno]
if argname != name:
raise NameError(
'Setting argument %s, but it should be %s' % (name, argname))
self._group.add_argument(*args, **kw)
self.all_arguments.append((args, kw))
self.names.append(name)
self._argno += 1 | [
"def",
"_add",
"(",
"self",
",",
"name",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"argname",
"=",
"list",
"(",
"self",
".",
"argdict",
")",
"[",
"self",
".",
"_argno",
"]",
"if",
"argname",
"!=",
"name",
":",
"raise",
"NameError",
"(",
"'Setting argument %s, but it should be %s'",
"%",
"(",
"name",
",",
"argname",
")",
")",
"self",
".",
"_group",
".",
"add_argument",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
"self",
".",
"all_arguments",
".",
"append",
"(",
"(",
"args",
",",
"kw",
")",
")",
"self",
".",
"names",
".",
"append",
"(",
"name",
")",
"self",
".",
"_argno",
"+=",
"1"
]
| Add an argument to the underlying parser and grow the list
.all_arguments and the set .names | [
"Add",
"an",
"argument",
"to",
"the",
"underlying",
"parser",
"and",
"grow",
"the",
"list",
".",
"all_arguments",
"and",
"the",
"set",
".",
"names"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L116-L128 |
gem/oq-engine | openquake/baselib/sap.py | Script.arg | def arg(self, name, help, type=None, choices=None, metavar=None,
nargs=None):
"""Describe a positional argument"""
kw = dict(help=help, type=type, choices=choices, metavar=metavar,
nargs=nargs)
default = self.argdict[name]
if default is not NODEFAULT:
kw['nargs'] = nargs or '?'
kw['default'] = default
kw['help'] = kw['help'] + ' [default: %s]' % repr(default)
self._add(name, name, **kw) | python | def arg(self, name, help, type=None, choices=None, metavar=None,
nargs=None):
kw = dict(help=help, type=type, choices=choices, metavar=metavar,
nargs=nargs)
default = self.argdict[name]
if default is not NODEFAULT:
kw['nargs'] = nargs or '?'
kw['default'] = default
kw['help'] = kw['help'] + ' [default: %s]' % repr(default)
self._add(name, name, **kw) | [
"def",
"arg",
"(",
"self",
",",
"name",
",",
"help",
",",
"type",
"=",
"None",
",",
"choices",
"=",
"None",
",",
"metavar",
"=",
"None",
",",
"nargs",
"=",
"None",
")",
":",
"kw",
"=",
"dict",
"(",
"help",
"=",
"help",
",",
"type",
"=",
"type",
",",
"choices",
"=",
"choices",
",",
"metavar",
"=",
"metavar",
",",
"nargs",
"=",
"nargs",
")",
"default",
"=",
"self",
".",
"argdict",
"[",
"name",
"]",
"if",
"default",
"is",
"not",
"NODEFAULT",
":",
"kw",
"[",
"'nargs'",
"]",
"=",
"nargs",
"or",
"'?'",
"kw",
"[",
"'default'",
"]",
"=",
"default",
"kw",
"[",
"'help'",
"]",
"=",
"kw",
"[",
"'help'",
"]",
"+",
"' [default: %s]'",
"%",
"repr",
"(",
"default",
")",
"self",
".",
"_add",
"(",
"name",
",",
"name",
",",
"*",
"*",
"kw",
")"
]
| Describe a positional argument | [
"Describe",
"a",
"positional",
"argument"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L130-L140 |
gem/oq-engine | openquake/baselib/sap.py | Script.opt | def opt(self, name, help, abbrev=None,
type=None, choices=None, metavar=None, nargs=None):
"""Describe an option"""
kw = dict(help=help, type=type, choices=choices, metavar=metavar,
nargs=nargs)
default = self.argdict[name]
if default is not NODEFAULT:
kw['default'] = default
kw['metavar'] = metavar or str_choices(choices) or str(default)
abbrev = abbrev or '-' + name[0]
abbrevs = set(args[0] for args, kw in self.all_arguments)
longname = '--' + name.replace('_', '-')
if abbrev == '-h' or abbrev in abbrevs:
# avoid conflicts with predefined abbreviations
self._add(name, longname, **kw)
else:
self._add(name, abbrev, longname, **kw) | python | def opt(self, name, help, abbrev=None,
type=None, choices=None, metavar=None, nargs=None):
kw = dict(help=help, type=type, choices=choices, metavar=metavar,
nargs=nargs)
default = self.argdict[name]
if default is not NODEFAULT:
kw['default'] = default
kw['metavar'] = metavar or str_choices(choices) or str(default)
abbrev = abbrev or '-' + name[0]
abbrevs = set(args[0] for args, kw in self.all_arguments)
longname = '--' + name.replace('_', '-')
if abbrev == '-h' or abbrev in abbrevs:
self._add(name, longname, **kw)
else:
self._add(name, abbrev, longname, **kw) | [
"def",
"opt",
"(",
"self",
",",
"name",
",",
"help",
",",
"abbrev",
"=",
"None",
",",
"type",
"=",
"None",
",",
"choices",
"=",
"None",
",",
"metavar",
"=",
"None",
",",
"nargs",
"=",
"None",
")",
":",
"kw",
"=",
"dict",
"(",
"help",
"=",
"help",
",",
"type",
"=",
"type",
",",
"choices",
"=",
"choices",
",",
"metavar",
"=",
"metavar",
",",
"nargs",
"=",
"nargs",
")",
"default",
"=",
"self",
".",
"argdict",
"[",
"name",
"]",
"if",
"default",
"is",
"not",
"NODEFAULT",
":",
"kw",
"[",
"'default'",
"]",
"=",
"default",
"kw",
"[",
"'metavar'",
"]",
"=",
"metavar",
"or",
"str_choices",
"(",
"choices",
")",
"or",
"str",
"(",
"default",
")",
"abbrev",
"=",
"abbrev",
"or",
"'-'",
"+",
"name",
"[",
"0",
"]",
"abbrevs",
"=",
"set",
"(",
"args",
"[",
"0",
"]",
"for",
"args",
",",
"kw",
"in",
"self",
".",
"all_arguments",
")",
"longname",
"=",
"'--'",
"+",
"name",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
"if",
"abbrev",
"==",
"'-h'",
"or",
"abbrev",
"in",
"abbrevs",
":",
"# avoid conflicts with predefined abbreviations",
"self",
".",
"_add",
"(",
"name",
",",
"longname",
",",
"*",
"*",
"kw",
")",
"else",
":",
"self",
".",
"_add",
"(",
"name",
",",
"abbrev",
",",
"longname",
",",
"*",
"*",
"kw",
")"
]
| Describe an option | [
"Describe",
"an",
"option"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L142-L158 |
gem/oq-engine | openquake/baselib/sap.py | Script.flg | def flg(self, name, help, abbrev=None):
"""Describe a flag"""
abbrev = abbrev or '-' + name[0]
longname = '--' + name.replace('_', '-')
self._add(name, abbrev, longname, action='store_true', help=help) | python | def flg(self, name, help, abbrev=None):
abbrev = abbrev or '-' + name[0]
longname = '--' + name.replace('_', '-')
self._add(name, abbrev, longname, action='store_true', help=help) | [
"def",
"flg",
"(",
"self",
",",
"name",
",",
"help",
",",
"abbrev",
"=",
"None",
")",
":",
"abbrev",
"=",
"abbrev",
"or",
"'-'",
"+",
"name",
"[",
"0",
"]",
"longname",
"=",
"'--'",
"+",
"name",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
"self",
".",
"_add",
"(",
"name",
",",
"abbrev",
",",
"longname",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"help",
")"
]
| Describe a flag | [
"Describe",
"a",
"flag"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L160-L164 |
gem/oq-engine | openquake/baselib/sap.py | Script.check_arguments | def check_arguments(self):
"""Make sure all arguments have a specification"""
for name, default in self.argdict.items():
if name not in self.names and default is NODEFAULT:
raise NameError('Missing argparse specification for %r' % name) | python | def check_arguments(self):
for name, default in self.argdict.items():
if name not in self.names and default is NODEFAULT:
raise NameError('Missing argparse specification for %r' % name) | [
"def",
"check_arguments",
"(",
"self",
")",
":",
"for",
"name",
",",
"default",
"in",
"self",
".",
"argdict",
".",
"items",
"(",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"names",
"and",
"default",
"is",
"NODEFAULT",
":",
"raise",
"NameError",
"(",
"'Missing argparse specification for %r'",
"%",
"name",
")"
]
| Make sure all arguments have a specification | [
"Make",
"sure",
"all",
"arguments",
"have",
"a",
"specification"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L166-L170 |
gem/oq-engine | openquake/baselib/sap.py | Script.callfunc | def callfunc(self, argv=None):
"""
Parse the argv list and extract a dictionary of arguments which
is then passed to the function underlying the script.
"""
if not self.checked:
self.check_arguments()
self.checked = True
namespace = self.parentparser.parse_args(argv or sys.argv[1:])
return self.func(**vars(namespace)) | python | def callfunc(self, argv=None):
if not self.checked:
self.check_arguments()
self.checked = True
namespace = self.parentparser.parse_args(argv or sys.argv[1:])
return self.func(**vars(namespace)) | [
"def",
"callfunc",
"(",
"self",
",",
"argv",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"checked",
":",
"self",
".",
"check_arguments",
"(",
")",
"self",
".",
"checked",
"=",
"True",
"namespace",
"=",
"self",
".",
"parentparser",
".",
"parse_args",
"(",
"argv",
"or",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
")",
"return",
"self",
".",
"func",
"(",
"*",
"*",
"vars",
"(",
"namespace",
")",
")"
]
| Parse the argv list and extract a dictionary of arguments which
is then passed to the function underlying the script. | [
"Parse",
"the",
"argv",
"list",
"and",
"extract",
"a",
"dictionary",
"of",
"arguments",
"which",
"is",
"then",
"passed",
"to",
"the",
"function",
"underlying",
"the",
"script",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L172-L181 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type1RecurrenceModel.cumulative_value | def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar):
'''
Returns the rate of events with M > mag_value
:param float slip_moment:
:param float slip_moment:
Product of slip (cm/yr) * Area (cm ^ 2) * shear_modulus (dyne-cm)
:param float mmax:
Maximum magnitude
:param float mag_value:
Magnitude value
:param float bbar:
\bar{b} parameter (effectively = b * log(10.))
:param float dbar:
\bar{d} parameter
'''
delta_m = mmax - mag_value
a_1 = self._get_a1(bbar, dbar, slip_moment, mmax)
return a_1 * np.exp(bbar * (delta_m)) * (delta_m > 0.0) | python | def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar):
delta_m = mmax - mag_value
a_1 = self._get_a1(bbar, dbar, slip_moment, mmax)
return a_1 * np.exp(bbar * (delta_m)) * (delta_m > 0.0) | [
"def",
"cumulative_value",
"(",
"self",
",",
"slip_moment",
",",
"mmax",
",",
"mag_value",
",",
"bbar",
",",
"dbar",
")",
":",
"delta_m",
"=",
"mmax",
"-",
"mag_value",
"a_1",
"=",
"self",
".",
"_get_a1",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
"return",
"a_1",
"*",
"np",
".",
"exp",
"(",
"bbar",
"*",
"(",
"delta_m",
")",
")",
"*",
"(",
"delta_m",
">",
"0.0",
")"
]
| Returns the rate of events with M > mag_value
:param float slip_moment:
:param float slip_moment:
Product of slip (cm/yr) * Area (cm ^ 2) * shear_modulus (dyne-cm)
:param float mmax:
Maximum magnitude
:param float mag_value:
Magnitude value
:param float bbar:
\bar{b} parameter (effectively = b * log(10.))
:param float dbar:
\bar{d} parameter | [
"Returns",
"the",
"rate",
"of",
"events",
"with",
"M",
">",
"mag_value"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L101-L119 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type1RecurrenceModel._get_a1 | def _get_a1(bbar, dbar, slip_moment, mmax):
"""
Returns the A1 term (I.4 of Table 2 in Anderson & Luco)
"""
return ((dbar - bbar) / dbar) * (slip_moment / _scale_moment(mmax)) | python | def _get_a1(bbar, dbar, slip_moment, mmax):
return ((dbar - bbar) / dbar) * (slip_moment / _scale_moment(mmax)) | [
"def",
"_get_a1",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
":",
"return",
"(",
"(",
"dbar",
"-",
"bbar",
")",
"/",
"dbar",
")",
"*",
"(",
"slip_moment",
"/",
"_scale_moment",
"(",
"mmax",
")",
")"
]
| Returns the A1 term (I.4 of Table 2 in Anderson & Luco) | [
"Returns",
"the",
"A1",
"term",
"(",
"I",
".",
"4",
"of",
"Table",
"2",
"in",
"Anderson",
"&",
"Luco",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L122-L126 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type1RecurrenceModel.incremental_value | def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar):
"""
Returns the incremental rate of earthquakes with M = mag_value
"""
delta_m = mmax - mag_value
dirac_term = np.zeros_like(mag_value)
dirac_term[np.fabs(delta_m) < 1.0E-12] = 1.0
a_1 = self._get_a1(bbar, dbar, slip_moment, mmax)
return a_1 * (bbar * np.exp(bbar * delta_m) * (delta_m > 0.0)) +\
a_1 * dirac_term | python | def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar):
delta_m = mmax - mag_value
dirac_term = np.zeros_like(mag_value)
dirac_term[np.fabs(delta_m) < 1.0E-12] = 1.0
a_1 = self._get_a1(bbar, dbar, slip_moment, mmax)
return a_1 * (bbar * np.exp(bbar * delta_m) * (delta_m > 0.0)) +\
a_1 * dirac_term | [
"def",
"incremental_value",
"(",
"self",
",",
"slip_moment",
",",
"mmax",
",",
"mag_value",
",",
"bbar",
",",
"dbar",
")",
":",
"delta_m",
"=",
"mmax",
"-",
"mag_value",
"dirac_term",
"=",
"np",
".",
"zeros_like",
"(",
"mag_value",
")",
"dirac_term",
"[",
"np",
".",
"fabs",
"(",
"delta_m",
")",
"<",
"1.0E-12",
"]",
"=",
"1.0",
"a_1",
"=",
"self",
".",
"_get_a1",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
"return",
"a_1",
"*",
"(",
"bbar",
"*",
"np",
".",
"exp",
"(",
"bbar",
"*",
"delta_m",
")",
"*",
"(",
"delta_m",
">",
"0.0",
")",
")",
"+",
"a_1",
"*",
"dirac_term"
]
| Returns the incremental rate of earthquakes with M = mag_value | [
"Returns",
"the",
"incremental",
"rate",
"of",
"earthquakes",
"with",
"M",
"=",
"mag_value"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L128-L137 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type2RecurrenceModel.cumulative_value | def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar):
'''
Returns the rate of events with M > mag_value
:param float slip_moment:
Product of slip (cm/yr) * Area (cm ^ 2) * shear_modulus (dyne-cm)
:param float mmax:
Maximum magnitude
:param float mag_value:
Magnitude value
:param float bbar:
\bar{b} parameter (effectively = b * log(10.))
:param float dbar:
\bar{d} parameter
'''
delta_m = mmax - mag_value
a_2 = self._get_a2(bbar, dbar, slip_moment, mmax)
return a_2 * (np.exp(bbar * delta_m) - 1.) * (delta_m > 0.0) | python | def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar):
delta_m = mmax - mag_value
a_2 = self._get_a2(bbar, dbar, slip_moment, mmax)
return a_2 * (np.exp(bbar * delta_m) - 1.) * (delta_m > 0.0) | [
"def",
"cumulative_value",
"(",
"self",
",",
"slip_moment",
",",
"mmax",
",",
"mag_value",
",",
"bbar",
",",
"dbar",
")",
":",
"delta_m",
"=",
"mmax",
"-",
"mag_value",
"a_2",
"=",
"self",
".",
"_get_a2",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
"return",
"a_2",
"*",
"(",
"np",
".",
"exp",
"(",
"bbar",
"*",
"delta_m",
")",
"-",
"1.",
")",
"*",
"(",
"delta_m",
">",
"0.0",
")"
]
| Returns the rate of events with M > mag_value
:param float slip_moment:
Product of slip (cm/yr) * Area (cm ^ 2) * shear_modulus (dyne-cm)
:param float mmax:
Maximum magnitude
:param float mag_value:
Magnitude value
:param float bbar:
\bar{b} parameter (effectively = b * log(10.))
:param float dbar:
\bar{d} parameter | [
"Returns",
"the",
"rate",
"of",
"events",
"with",
"M",
">",
"mag_value"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L146-L163 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type2RecurrenceModel._get_a2 | def _get_a2(bbar, dbar, slip_moment, mmax):
"""
Returns the A2 value defined in II.4 of Table 2
"""
return ((dbar - bbar) / bbar) * (slip_moment / _scale_moment(mmax)) | python | def _get_a2(bbar, dbar, slip_moment, mmax):
return ((dbar - bbar) / bbar) * (slip_moment / _scale_moment(mmax)) | [
"def",
"_get_a2",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
":",
"return",
"(",
"(",
"dbar",
"-",
"bbar",
")",
"/",
"bbar",
")",
"*",
"(",
"slip_moment",
"/",
"_scale_moment",
"(",
"mmax",
")",
")"
]
| Returns the A2 value defined in II.4 of Table 2 | [
"Returns",
"the",
"A2",
"value",
"defined",
"in",
"II",
".",
"4",
"of",
"Table",
"2"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L166-L170 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type3RecurrenceModel.cumulative_value | def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar):
'''
Returns the rate of events with M > mag_value
:param float slip_moment:
Product of slip (cm/yr) * Area (cm ^ 2) * shear_modulus (dyne-cm)
:param float mmax:
Maximum magnitude
:param float mag_value:
Magnitude value
:param float bbar:
\bar{b} parameter (effectively = b * log(10.))
:param float dbar:
\bar{d} parameter
'''
delta_m = mmax - mag_value
a_3 = self._get_a3(bbar, dbar, slip_moment, mmax)
central_term = np.exp(bbar * delta_m) - 1.0 - (bbar * delta_m)
return a_3 * central_term * (delta_m > 0.0) | python | def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar):
delta_m = mmax - mag_value
a_3 = self._get_a3(bbar, dbar, slip_moment, mmax)
central_term = np.exp(bbar * delta_m) - 1.0 - (bbar * delta_m)
return a_3 * central_term * (delta_m > 0.0) | [
"def",
"cumulative_value",
"(",
"self",
",",
"slip_moment",
",",
"mmax",
",",
"mag_value",
",",
"bbar",
",",
"dbar",
")",
":",
"delta_m",
"=",
"mmax",
"-",
"mag_value",
"a_3",
"=",
"self",
".",
"_get_a3",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
"central_term",
"=",
"np",
".",
"exp",
"(",
"bbar",
"*",
"delta_m",
")",
"-",
"1.0",
"-",
"(",
"bbar",
"*",
"delta_m",
")",
"return",
"a_3",
"*",
"central_term",
"*",
"(",
"delta_m",
">",
"0.0",
")"
]
| Returns the rate of events with M > mag_value
:param float slip_moment:
Product of slip (cm/yr) * Area (cm ^ 2) * shear_modulus (dyne-cm)
:param float mmax:
Maximum magnitude
:param float mag_value:
Magnitude value
:param float bbar:
\bar{b} parameter (effectively = b * log(10.))
:param float dbar:
\bar{d} parameter | [
"Returns",
"the",
"rate",
"of",
"events",
"with",
"M",
">",
"mag_value"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L187-L205 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type3RecurrenceModel._get_a3 | def _get_a3(bbar, dbar, slip_moment, mmax):
"""
Returns the A3 term (III.4 in Table 4)
"""
return ((dbar * (dbar - bbar)) / (bbar ** 2.)) * (slip_moment /
_scale_moment(mmax)) | python | def _get_a3(bbar, dbar, slip_moment, mmax):
return ((dbar * (dbar - bbar)) / (bbar ** 2.)) * (slip_moment /
_scale_moment(mmax)) | [
"def",
"_get_a3",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
":",
"return",
"(",
"(",
"dbar",
"*",
"(",
"dbar",
"-",
"bbar",
")",
")",
"/",
"(",
"bbar",
"**",
"2.",
")",
")",
"*",
"(",
"slip_moment",
"/",
"_scale_moment",
"(",
"mmax",
")",
")"
]
| Returns the A3 term (III.4 in Table 4) | [
"Returns",
"the",
"A3",
"term",
"(",
"III",
".",
"4",
"in",
"Table",
"4",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L208-L213 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | Type3RecurrenceModel.incremental_value | def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar):
"""
Returns the incremental rate with Mmax = Mag_value
"""
delta_m = mmax - mag_value
a_3 = self._get_a3(bbar, dbar, slip_moment, mmax)
return a_3 * bbar * (np.exp(bbar * delta_m) - 1.0) * (delta_m > 0.0) | python | def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar):
delta_m = mmax - mag_value
a_3 = self._get_a3(bbar, dbar, slip_moment, mmax)
return a_3 * bbar * (np.exp(bbar * delta_m) - 1.0) * (delta_m > 0.0) | [
"def",
"incremental_value",
"(",
"self",
",",
"slip_moment",
",",
"mmax",
",",
"mag_value",
",",
"bbar",
",",
"dbar",
")",
":",
"delta_m",
"=",
"mmax",
"-",
"mag_value",
"a_3",
"=",
"self",
".",
"_get_a3",
"(",
"bbar",
",",
"dbar",
",",
"slip_moment",
",",
"mmax",
")",
"return",
"a_3",
"*",
"bbar",
"*",
"(",
"np",
".",
"exp",
"(",
"bbar",
"*",
"delta_m",
")",
"-",
"1.0",
")",
"*",
"(",
"delta_m",
">",
"0.0",
")"
]
| Returns the incremental rate with Mmax = Mag_value | [
"Returns",
"the",
"incremental",
"rate",
"with",
"Mmax",
"=",
"Mag_value"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L215-L221 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | AndersonLucoArbitrary.setUp | def setUp(self, mfd_conf):
'''
Input core configuration parameters as specified in the
configuration file
:param dict mfd_conf:
Configuration file containing the following attributes:
* 'Type' - Choose between the 1st, 2nd or 3rd type of recurrence
model {'First' | 'Second' | 'Third'}
* 'Model_Weight' - Logic tree weight of model type (float)
* 'MFD_spacing' - Width of MFD bin (float)
* 'Minimum_Magnitude' - Minimum magnitude of activity rates (float)
* 'b_value' - Tuple of (b-value, b-value uncertainty)
* 'Maximum_Magnitude' - Maximum magnitude on fault (if not defined
will use scaling relation)
* 'Maximum_Magnitude_Uncertainty' - Uncertainty on maximum
magnitude (If not defined and the MSR has a sigma term then this
will be taken from sigma)
'''
self.mfd_type = mfd_conf['Model_Type']
self.mfd_model = 'Anderson & Luco (Arbitrary) ' + self.mfd_type
self.mfd_weight = mfd_conf['Model_Weight']
self.bin_width = mfd_conf['MFD_spacing']
self.mmin = mfd_conf['Minimum_Magnitude']
self.mmax = None
self.mmax_sigma = None
self.b_value = mfd_conf['b_value'][0]
self.b_value_sigma = mfd_conf['b_value'][1]
self.occurrence_rate = None | python | def setUp(self, mfd_conf):
self.mfd_type = mfd_conf['Model_Type']
self.mfd_model = 'Anderson & Luco (Arbitrary) ' + self.mfd_type
self.mfd_weight = mfd_conf['Model_Weight']
self.bin_width = mfd_conf['MFD_spacing']
self.mmin = mfd_conf['Minimum_Magnitude']
self.mmax = None
self.mmax_sigma = None
self.b_value = mfd_conf['b_value'][0]
self.b_value_sigma = mfd_conf['b_value'][1]
self.occurrence_rate = None | [
"def",
"setUp",
"(",
"self",
",",
"mfd_conf",
")",
":",
"self",
".",
"mfd_type",
"=",
"mfd_conf",
"[",
"'Model_Type'",
"]",
"self",
".",
"mfd_model",
"=",
"'Anderson & Luco (Arbitrary) '",
"+",
"self",
".",
"mfd_type",
"self",
".",
"mfd_weight",
"=",
"mfd_conf",
"[",
"'Model_Weight'",
"]",
"self",
".",
"bin_width",
"=",
"mfd_conf",
"[",
"'MFD_spacing'",
"]",
"self",
".",
"mmin",
"=",
"mfd_conf",
"[",
"'Minimum_Magnitude'",
"]",
"self",
".",
"mmax",
"=",
"None",
"self",
".",
"mmax_sigma",
"=",
"None",
"self",
".",
"b_value",
"=",
"mfd_conf",
"[",
"'b_value'",
"]",
"[",
"0",
"]",
"self",
".",
"b_value_sigma",
"=",
"mfd_conf",
"[",
"'b_value'",
"]",
"[",
"1",
"]",
"self",
".",
"occurrence_rate",
"=",
"None"
]
| Input core configuration parameters as specified in the
configuration file
:param dict mfd_conf:
Configuration file containing the following attributes:
* 'Type' - Choose between the 1st, 2nd or 3rd type of recurrence
model {'First' | 'Second' | 'Third'}
* 'Model_Weight' - Logic tree weight of model type (float)
* 'MFD_spacing' - Width of MFD bin (float)
* 'Minimum_Magnitude' - Minimum magnitude of activity rates (float)
* 'b_value' - Tuple of (b-value, b-value uncertainty)
* 'Maximum_Magnitude' - Maximum magnitude on fault (if not defined
will use scaling relation)
* 'Maximum_Magnitude_Uncertainty' - Uncertainty on maximum
magnitude (If not defined and the MSR has a sigma term then this
will be taken from sigma) | [
"Input",
"core",
"configuration",
"parameters",
"as",
"specified",
"in",
"the",
"configuration",
"file"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L260-L288 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | AndersonLucoArbitrary.get_mmax | def get_mmax(self, mfd_conf, msr, rake, area):
'''
Gets the mmax for the fault - reading directly from the config file
or using the msr otherwise
:param dict mfd_config:
Configuration file (see setUp for paramters)
:param msr:
Instance of :class:`nhlib.scalerel`
:param float rake:
Rake of the fault (in range -180 to 180)
:param float area:
Area of the fault surface (km^2)
'''
if mfd_conf['Maximum_Magnitude']:
self.mmax = mfd_conf['Maximum_Magnitude']
else:
self.mmax = msr.get_median_mag(area, rake)
if ('Maximum_Magnitude_Uncertainty' in mfd_conf and
mfd_conf['Maximum_Magnitude_Uncertainty']):
self.mmax_sigma = mfd_conf['Maximum_Magnitude_Uncertainty']
else:
self.mmax_sigma = msr.get_std_dev_mag(rake) | python | def get_mmax(self, mfd_conf, msr, rake, area):
if mfd_conf['Maximum_Magnitude']:
self.mmax = mfd_conf['Maximum_Magnitude']
else:
self.mmax = msr.get_median_mag(area, rake)
if ('Maximum_Magnitude_Uncertainty' in mfd_conf and
mfd_conf['Maximum_Magnitude_Uncertainty']):
self.mmax_sigma = mfd_conf['Maximum_Magnitude_Uncertainty']
else:
self.mmax_sigma = msr.get_std_dev_mag(rake) | [
"def",
"get_mmax",
"(",
"self",
",",
"mfd_conf",
",",
"msr",
",",
"rake",
",",
"area",
")",
":",
"if",
"mfd_conf",
"[",
"'Maximum_Magnitude'",
"]",
":",
"self",
".",
"mmax",
"=",
"mfd_conf",
"[",
"'Maximum_Magnitude'",
"]",
"else",
":",
"self",
".",
"mmax",
"=",
"msr",
".",
"get_median_mag",
"(",
"area",
",",
"rake",
")",
"if",
"(",
"'Maximum_Magnitude_Uncertainty'",
"in",
"mfd_conf",
"and",
"mfd_conf",
"[",
"'Maximum_Magnitude_Uncertainty'",
"]",
")",
":",
"self",
".",
"mmax_sigma",
"=",
"mfd_conf",
"[",
"'Maximum_Magnitude_Uncertainty'",
"]",
"else",
":",
"self",
".",
"mmax_sigma",
"=",
"msr",
".",
"get_std_dev_mag",
"(",
"rake",
")"
]
| Gets the mmax for the fault - reading directly from the config file
or using the msr otherwise
:param dict mfd_config:
Configuration file (see setUp for paramters)
:param msr:
Instance of :class:`nhlib.scalerel`
:param float rake:
Rake of the fault (in range -180 to 180)
:param float area:
Area of the fault surface (km^2) | [
"Gets",
"the",
"mmax",
"for",
"the",
"fault",
"-",
"reading",
"directly",
"from",
"the",
"config",
"file",
"or",
"using",
"the",
"msr",
"otherwise"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L290-L316 |
gem/oq-engine | openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py | AndersonLucoArbitrary.get_mfd | def get_mfd(self, slip, area, shear_modulus=30.0):
'''
Calculates activity rate on the fault
:param float slip:
Slip rate in mm/yr
:param fault_area:
Width of the fault (km)
:param float shear_modulus:
Shear modulus of the fault (GPa)
:returns:
* Minimum Magnitude (float)
* Bin width (float)
* Occurrence Rates (numpy.ndarray)
'''
# Convert shear modulus GPa -> dyne-cm, area km ** 2 -> cm ** 2 and
# slip mm/yr -> cm/yr
slip_moment = (shear_modulus * 1E10) * (area * 1E10) * (slip / 10.)
dbar = D_VALUE * np.log(10.0)
bbar = self.b_value * np.log(10.0)
mags = np.arange(self.mmin - (self.bin_width / 2.),
self.mmax + self.bin_width,
self.bin_width)
if bbar >= dbar:
print('b-value larger than 1.5 will produce invalid results in '
'Anderson & Luco models')
self.occurrence_rate = np.nan * np.ones(len(mags) - 1)
return self.mmin, self.bin_width, self.occurrence_rate
self.occurrence_rate = np.zeros(len(mags) - 1, dtype=float)
for ival in range(0, len(mags) - 1):
self.occurrence_rate[ival] = \
RECURRENCE_MAP[self.mfd_type].cumulative_value(
slip_moment, self.mmax, mags[ival], bbar, dbar) - \
RECURRENCE_MAP[self.mfd_type].cumulative_value(
slip_moment, self.mmax, mags[ival + 1], bbar, dbar)
return self.mmin, self.bin_width, self.occurrence_rate | python | def get_mfd(self, slip, area, shear_modulus=30.0):
slip_moment = (shear_modulus * 1E10) * (area * 1E10) * (slip / 10.)
dbar = D_VALUE * np.log(10.0)
bbar = self.b_value * np.log(10.0)
mags = np.arange(self.mmin - (self.bin_width / 2.),
self.mmax + self.bin_width,
self.bin_width)
if bbar >= dbar:
print('b-value larger than 1.5 will produce invalid results in '
'Anderson & Luco models')
self.occurrence_rate = np.nan * np.ones(len(mags) - 1)
return self.mmin, self.bin_width, self.occurrence_rate
self.occurrence_rate = np.zeros(len(mags) - 1, dtype=float)
for ival in range(0, len(mags) - 1):
self.occurrence_rate[ival] = \
RECURRENCE_MAP[self.mfd_type].cumulative_value(
slip_moment, self.mmax, mags[ival], bbar, dbar) - \
RECURRENCE_MAP[self.mfd_type].cumulative_value(
slip_moment, self.mmax, mags[ival + 1], bbar, dbar)
return self.mmin, self.bin_width, self.occurrence_rate | [
"def",
"get_mfd",
"(",
"self",
",",
"slip",
",",
"area",
",",
"shear_modulus",
"=",
"30.0",
")",
":",
"# Convert shear modulus GPa -> dyne-cm, area km ** 2 -> cm ** 2 and",
"# slip mm/yr -> cm/yr",
"slip_moment",
"=",
"(",
"shear_modulus",
"*",
"1E10",
")",
"*",
"(",
"area",
"*",
"1E10",
")",
"*",
"(",
"slip",
"/",
"10.",
")",
"dbar",
"=",
"D_VALUE",
"*",
"np",
".",
"log",
"(",
"10.0",
")",
"bbar",
"=",
"self",
".",
"b_value",
"*",
"np",
".",
"log",
"(",
"10.0",
")",
"mags",
"=",
"np",
".",
"arange",
"(",
"self",
".",
"mmin",
"-",
"(",
"self",
".",
"bin_width",
"/",
"2.",
")",
",",
"self",
".",
"mmax",
"+",
"self",
".",
"bin_width",
",",
"self",
".",
"bin_width",
")",
"if",
"bbar",
">=",
"dbar",
":",
"print",
"(",
"'b-value larger than 1.5 will produce invalid results in '",
"'Anderson & Luco models'",
")",
"self",
".",
"occurrence_rate",
"=",
"np",
".",
"nan",
"*",
"np",
".",
"ones",
"(",
"len",
"(",
"mags",
")",
"-",
"1",
")",
"return",
"self",
".",
"mmin",
",",
"self",
".",
"bin_width",
",",
"self",
".",
"occurrence_rate",
"self",
".",
"occurrence_rate",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"mags",
")",
"-",
"1",
",",
"dtype",
"=",
"float",
")",
"for",
"ival",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"mags",
")",
"-",
"1",
")",
":",
"self",
".",
"occurrence_rate",
"[",
"ival",
"]",
"=",
"RECURRENCE_MAP",
"[",
"self",
".",
"mfd_type",
"]",
".",
"cumulative_value",
"(",
"slip_moment",
",",
"self",
".",
"mmax",
",",
"mags",
"[",
"ival",
"]",
",",
"bbar",
",",
"dbar",
")",
"-",
"RECURRENCE_MAP",
"[",
"self",
".",
"mfd_type",
"]",
".",
"cumulative_value",
"(",
"slip_moment",
",",
"self",
".",
"mmax",
",",
"mags",
"[",
"ival",
"+",
"1",
"]",
",",
"bbar",
",",
"dbar",
")",
"return",
"self",
".",
"mmin",
",",
"self",
".",
"bin_width",
",",
"self",
".",
"occurrence_rate"
]
| Calculates activity rate on the fault
:param float slip:
Slip rate in mm/yr
:param fault_area:
Width of the fault (km)
:param float shear_modulus:
Shear modulus of the fault (GPa)
:returns:
* Minimum Magnitude (float)
* Bin width (float)
* Occurrence Rates (numpy.ndarray) | [
"Calculates",
"activity",
"rate",
"on",
"the",
"fault"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L318-L358 |
gem/oq-engine | openquake/commands/reduce_sm.py | reduce_sm | def reduce_sm(calc_id):
"""
Reduce the source model of the given (pre)calculation by discarding all
sources that do not contribute to the hazard.
"""
with datastore.read(calc_id) as dstore:
oqparam = dstore['oqparam']
info = dstore['source_info'].value
ok = info['weight'] > 0
source_ids = set(info[ok]['source_id'])
with performance.Monitor() as mon:
readinput.reduce_source_model(
oqparam.inputs['source_model_logic_tree'], source_ids)
print(mon) | python | def reduce_sm(calc_id):
with datastore.read(calc_id) as dstore:
oqparam = dstore['oqparam']
info = dstore['source_info'].value
ok = info['weight'] > 0
source_ids = set(info[ok]['source_id'])
with performance.Monitor() as mon:
readinput.reduce_source_model(
oqparam.inputs['source_model_logic_tree'], source_ids)
print(mon) | [
"def",
"reduce_sm",
"(",
"calc_id",
")",
":",
"with",
"datastore",
".",
"read",
"(",
"calc_id",
")",
"as",
"dstore",
":",
"oqparam",
"=",
"dstore",
"[",
"'oqparam'",
"]",
"info",
"=",
"dstore",
"[",
"'source_info'",
"]",
".",
"value",
"ok",
"=",
"info",
"[",
"'weight'",
"]",
">",
"0",
"source_ids",
"=",
"set",
"(",
"info",
"[",
"ok",
"]",
"[",
"'source_id'",
"]",
")",
"with",
"performance",
".",
"Monitor",
"(",
")",
"as",
"mon",
":",
"readinput",
".",
"reduce_source_model",
"(",
"oqparam",
".",
"inputs",
"[",
"'source_model_logic_tree'",
"]",
",",
"source_ids",
")",
"print",
"(",
"mon",
")"
]
| Reduce the source model of the given (pre)calculation by discarding all
sources that do not contribute to the hazard. | [
"Reduce",
"the",
"source",
"model",
"of",
"the",
"given",
"(",
"pre",
")",
"calculation",
"by",
"discarding",
"all",
"sources",
"that",
"do",
"not",
"contribute",
"to",
"the",
"hazard",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/reduce_sm.py#L24-L37 |
gem/oq-engine | openquake/hazardlib/gsim/lin_2009.py | Lin2009.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
C = self.COEFFS[imt]
mean = (
self._get_magnitude_term(C, rup.mag) +
self._get_distance_term(C, rup.mag, dists.rrup) +
self._get_style_of_faulting_term(C, rup.rake) +
self._get_site_response_term(C, sites.vs30))
stddevs = self._get_stddevs(C, stddev_types, len(sites.vs30))
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
mean = (
self._get_magnitude_term(C, rup.mag) +
self._get_distance_term(C, rup.mag, dists.rrup) +
self._get_style_of_faulting_term(C, rup.rake) +
self._get_site_response_term(C, sites.vs30))
stddevs = self._get_stddevs(C, stddev_types, len(sites.vs30))
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"mean",
"=",
"(",
"self",
".",
"_get_magnitude_term",
"(",
"C",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_get_distance_term",
"(",
"C",
",",
"rup",
".",
"mag",
",",
"dists",
".",
"rrup",
")",
"+",
"self",
".",
"_get_style_of_faulting_term",
"(",
"C",
",",
"rup",
".",
"rake",
")",
"+",
"self",
".",
"_get_site_response_term",
"(",
"C",
",",
"sites",
".",
"vs30",
")",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"C",
",",
"stddev_types",
",",
"len",
"(",
"sites",
".",
"vs30",
")",
")",
"return",
"mean",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L67-L81 |
gem/oq-engine | openquake/hazardlib/gsim/lin_2009.py | Lin2009._get_magnitude_term | def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term.
"""
lny = C['C1'] + (C['C3'] * ((8.5 - mag) ** 2.))
if mag > 6.3:
return lny + (-C['H'] * C['C5']) * (mag - 6.3)
else:
return lny + C['C2'] * (mag - 6.3) | python | def _get_magnitude_term(self, C, mag):
lny = C['C1'] + (C['C3'] * ((8.5 - mag) ** 2.))
if mag > 6.3:
return lny + (-C['H'] * C['C5']) * (mag - 6.3)
else:
return lny + C['C2'] * (mag - 6.3) | [
"def",
"_get_magnitude_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"lny",
"=",
"C",
"[",
"'C1'",
"]",
"+",
"(",
"C",
"[",
"'C3'",
"]",
"*",
"(",
"(",
"8.5",
"-",
"mag",
")",
"**",
"2.",
")",
")",
"if",
"mag",
">",
"6.3",
":",
"return",
"lny",
"+",
"(",
"-",
"C",
"[",
"'H'",
"]",
"*",
"C",
"[",
"'C5'",
"]",
")",
"*",
"(",
"mag",
"-",
"6.3",
")",
"else",
":",
"return",
"lny",
"+",
"C",
"[",
"'C2'",
"]",
"*",
"(",
"mag",
"-",
"6.3",
")"
]
| Returns the magnitude scaling term. | [
"Returns",
"the",
"magnitude",
"scaling",
"term",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L83-L91 |
gem/oq-engine | openquake/hazardlib/gsim/lin_2009.py | Lin2009._get_distance_term | def _get_distance_term(self, C, mag, rrup):
"""
Returns the distance scaling term
"""
return (C['C4'] + C['C5'] * (mag - 6.3)) *\
np.log(np.sqrt(rrup ** 2. + np.exp(C['H']) ** 2.)) | python | def _get_distance_term(self, C, mag, rrup):
return (C['C4'] + C['C5'] * (mag - 6.3)) *\
np.log(np.sqrt(rrup ** 2. + np.exp(C['H']) ** 2.)) | [
"def",
"_get_distance_term",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rrup",
")",
":",
"return",
"(",
"C",
"[",
"'C4'",
"]",
"+",
"C",
"[",
"'C5'",
"]",
"*",
"(",
"mag",
"-",
"6.3",
")",
")",
"*",
"np",
".",
"log",
"(",
"np",
".",
"sqrt",
"(",
"rrup",
"**",
"2.",
"+",
"np",
".",
"exp",
"(",
"C",
"[",
"'H'",
"]",
")",
"**",
"2.",
")",
")"
]
| Returns the distance scaling term | [
"Returns",
"the",
"distance",
"scaling",
"term"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L93-L98 |
gem/oq-engine | openquake/hazardlib/gsim/lin_2009.py | Lin2009._get_style_of_faulting_term | def _get_style_of_faulting_term(self, C, rake):
"""
Returns the style of faulting factor
"""
f_n, f_r = self._get_fault_type_dummy_variables(rake)
return C['C6'] * f_n + C['C7'] * f_r | python | def _get_style_of_faulting_term(self, C, rake):
f_n, f_r = self._get_fault_type_dummy_variables(rake)
return C['C6'] * f_n + C['C7'] * f_r | [
"def",
"_get_style_of_faulting_term",
"(",
"self",
",",
"C",
",",
"rake",
")",
":",
"f_n",
",",
"f_r",
"=",
"self",
".",
"_get_fault_type_dummy_variables",
"(",
"rake",
")",
"return",
"C",
"[",
"'C6'",
"]",
"*",
"f_n",
"+",
"C",
"[",
"'C7'",
"]",
"*",
"f_r"
]
| Returns the style of faulting factor | [
"Returns",
"the",
"style",
"of",
"faulting",
"factor"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L100-L105 |
gem/oq-engine | openquake/hazardlib/gsim/lin_2009.py | Lin2009._get_fault_type_dummy_variables | def _get_fault_type_dummy_variables(self, rake):
"""
Defines the fault type dummy variables for normal faulting (f_n) and
reverse faulting (f_r) from rake. Classification based on that
found in the original fortran code of Lin (2009)
"""
f_n, f_r = 0, 0
if rake >= -120 and rake <= -60:
# normal
f_n = 1
elif rake >= 30 and rake <= 150:
# reverse
f_r = 1
return f_n, f_r | python | def _get_fault_type_dummy_variables(self, rake):
f_n, f_r = 0, 0
if rake >= -120 and rake <= -60:
f_n = 1
elif rake >= 30 and rake <= 150:
f_r = 1
return f_n, f_r | [
"def",
"_get_fault_type_dummy_variables",
"(",
"self",
",",
"rake",
")",
":",
"f_n",
",",
"f_r",
"=",
"0",
",",
"0",
"if",
"rake",
">=",
"-",
"120",
"and",
"rake",
"<=",
"-",
"60",
":",
"# normal",
"f_n",
"=",
"1",
"elif",
"rake",
">=",
"30",
"and",
"rake",
"<=",
"150",
":",
"# reverse",
"f_r",
"=",
"1",
"return",
"f_n",
",",
"f_r"
]
| Defines the fault type dummy variables for normal faulting (f_n) and
reverse faulting (f_r) from rake. Classification based on that
found in the original fortran code of Lin (2009) | [
"Defines",
"the",
"fault",
"type",
"dummy",
"variables",
"for",
"normal",
"faulting",
"(",
"f_n",
")",
"and",
"reverse",
"faulting",
"(",
"f_r",
")",
"from",
"rake",
".",
"Classification",
"based",
"on",
"that",
"found",
"in",
"the",
"original",
"fortran",
"code",
"of",
"Lin",
"(",
"2009",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L107-L120 |
gem/oq-engine | openquake/hazardlib/gsim/lin_2009.py | Lin2009._get_stddevs | def _get_stddevs(self, C, stddev_types, nsites):
"""
Compute total standard deviation, see table 4.2, page 50.
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(C['sigma'] + np.zeros(nsites, dtype=float))
return stddevs | python | def _get_stddevs(self, C, stddev_types, nsites):
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(C['sigma'] + np.zeros(nsites, dtype=float))
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"nsites",
")",
":",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"C",
"[",
"'sigma'",
"]",
"+",
"np",
".",
"zeros",
"(",
"nsites",
",",
"dtype",
"=",
"float",
")",
")",
"return",
"stddevs"
]
| Compute total standard deviation, see table 4.2, page 50. | [
"Compute",
"total",
"standard",
"deviation",
"see",
"table",
"4",
".",
"2",
"page",
"50",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L128-L137 |
gem/oq-engine | openquake/hazardlib/imt.py | imt2tup | def imt2tup(string):
"""
>>> imt2tup('PGA')
('PGA',)
>>> imt2tup('SA(1.0)')
('SA', 1.0)
>>> imt2tup('SA(1)')
('SA', 1.0)
"""
s = string.strip()
if not s.endswith(')'):
# no parenthesis, PGA is considered the same as PGA()
return (s,)
name, rest = s.split('(', 1)
return (name,) + tuple(float(x) for x in ast.literal_eval(rest[:-1] + ',')) | python | def imt2tup(string):
s = string.strip()
if not s.endswith(')'):
return (s,)
name, rest = s.split('(', 1)
return (name,) + tuple(float(x) for x in ast.literal_eval(rest[:-1] + ',')) | [
"def",
"imt2tup",
"(",
"string",
")",
":",
"s",
"=",
"string",
".",
"strip",
"(",
")",
"if",
"not",
"s",
".",
"endswith",
"(",
"')'",
")",
":",
"# no parenthesis, PGA is considered the same as PGA()",
"return",
"(",
"s",
",",
")",
"name",
",",
"rest",
"=",
"s",
".",
"split",
"(",
"'('",
",",
"1",
")",
"return",
"(",
"name",
",",
")",
"+",
"tuple",
"(",
"float",
"(",
"x",
")",
"for",
"x",
"in",
"ast",
".",
"literal_eval",
"(",
"rest",
"[",
":",
"-",
"1",
"]",
"+",
"','",
")",
")"
]
| >>> imt2tup('PGA')
('PGA',)
>>> imt2tup('SA(1.0)')
('SA', 1.0)
>>> imt2tup('SA(1)')
('SA', 1.0) | [
">>>",
"imt2tup",
"(",
"PGA",
")",
"(",
"PGA",
")",
">>>",
"imt2tup",
"(",
"SA",
"(",
"1",
".",
"0",
")",
")",
"(",
"SA",
"1",
".",
"0",
")",
">>>",
"imt2tup",
"(",
"SA",
"(",
"1",
")",
")",
"(",
"SA",
"1",
".",
"0",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/imt.py#L41-L55 |
gem/oq-engine | openquake/baselib/slots.py | with_slots | def with_slots(cls):
"""
Decorator for a class with _slots_. It automatically defines
the methods __eq__, __ne__, assert_equal.
"""
def _compare(self, other):
for slot in self.__class__._slots_:
attr = operator.attrgetter(slot)
source = attr(self)
target = attr(other)
if isinstance(source, numpy.ndarray):
eq = numpy.array_equal(source, target)
elif hasattr(source, '_slots_'):
source.assert_equal(target)
eq = True
else:
eq = source == target
yield slot, source, target, eq
def __eq__(self, other):
return all(eq for slot, source, target, eq in _compare(self, other))
def __ne__(self, other):
return not self.__eq__(other)
def assert_equal(self, other, ignore=()):
for slot, source, target, eq in _compare(self, other):
if not eq and slot not in ignore:
raise AssertionError('slot %s: %s is different from %s' %
(slot, source, target))
cls._slots_ # raise an AttributeError for missing slots
cls.__eq__ = __eq__
cls.__ne__ = __ne__
cls.assert_equal = assert_equal
return cls | python | def with_slots(cls):
def _compare(self, other):
for slot in self.__class__._slots_:
attr = operator.attrgetter(slot)
source = attr(self)
target = attr(other)
if isinstance(source, numpy.ndarray):
eq = numpy.array_equal(source, target)
elif hasattr(source, '_slots_'):
source.assert_equal(target)
eq = True
else:
eq = source == target
yield slot, source, target, eq
def __eq__(self, other):
return all(eq for slot, source, target, eq in _compare(self, other))
def __ne__(self, other):
return not self.__eq__(other)
def assert_equal(self, other, ignore=()):
for slot, source, target, eq in _compare(self, other):
if not eq and slot not in ignore:
raise AssertionError('slot %s: %s is different from %s' %
(slot, source, target))
cls._slots_
cls.__eq__ = __eq__
cls.__ne__ = __ne__
cls.assert_equal = assert_equal
return cls | [
"def",
"with_slots",
"(",
"cls",
")",
":",
"def",
"_compare",
"(",
"self",
",",
"other",
")",
":",
"for",
"slot",
"in",
"self",
".",
"__class__",
".",
"_slots_",
":",
"attr",
"=",
"operator",
".",
"attrgetter",
"(",
"slot",
")",
"source",
"=",
"attr",
"(",
"self",
")",
"target",
"=",
"attr",
"(",
"other",
")",
"if",
"isinstance",
"(",
"source",
",",
"numpy",
".",
"ndarray",
")",
":",
"eq",
"=",
"numpy",
".",
"array_equal",
"(",
"source",
",",
"target",
")",
"elif",
"hasattr",
"(",
"source",
",",
"'_slots_'",
")",
":",
"source",
".",
"assert_equal",
"(",
"target",
")",
"eq",
"=",
"True",
"else",
":",
"eq",
"=",
"source",
"==",
"target",
"yield",
"slot",
",",
"source",
",",
"target",
",",
"eq",
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"return",
"all",
"(",
"eq",
"for",
"slot",
",",
"source",
",",
"target",
",",
"eq",
"in",
"_compare",
"(",
"self",
",",
"other",
")",
")",
"def",
"__ne__",
"(",
"self",
",",
"other",
")",
":",
"return",
"not",
"self",
".",
"__eq__",
"(",
"other",
")",
"def",
"assert_equal",
"(",
"self",
",",
"other",
",",
"ignore",
"=",
"(",
")",
")",
":",
"for",
"slot",
",",
"source",
",",
"target",
",",
"eq",
"in",
"_compare",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"eq",
"and",
"slot",
"not",
"in",
"ignore",
":",
"raise",
"AssertionError",
"(",
"'slot %s: %s is different from %s'",
"%",
"(",
"slot",
",",
"source",
",",
"target",
")",
")",
"cls",
".",
"_slots_",
"# raise an AttributeError for missing slots",
"cls",
".",
"__eq__",
"=",
"__eq__",
"cls",
".",
"__ne__",
"=",
"__ne__",
"cls",
".",
"assert_equal",
"=",
"assert_equal",
"return",
"cls"
]
| Decorator for a class with _slots_. It automatically defines
the methods __eq__, __ne__, assert_equal. | [
"Decorator",
"for",
"a",
"class",
"with",
"_slots_",
".",
"It",
"automatically",
"defines",
"the",
"methods",
"__eq__",
"__ne__",
"assert_equal",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/slots.py#L23-L58 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | make_iml4 | def make_iml4(R, iml_disagg, imtls=None, poes_disagg=(None,), curves=()):
"""
:returns: an ArrayWrapper over a 4D array of shape (N, R, M, P)
"""
if imtls is None:
imtls = {imt: [iml] for imt, iml in iml_disagg.items()}
N = len(curves) or 1
M = len(imtls)
P = len(poes_disagg)
arr = numpy.zeros((N, R, M, P))
imts = [from_string(imt) for imt in imtls]
for m, imt in enumerate(imtls):
imls = imtls[imt]
for p, poe in enumerate(poes_disagg):
for r in range(R):
arr[:, r, m, p] = _imls(curves, poe, imt, imls, r)
return ArrayWrapper(arr, dict(poes_disagg=poes_disagg, imts=imts)) | python | def make_iml4(R, iml_disagg, imtls=None, poes_disagg=(None,), curves=()):
if imtls is None:
imtls = {imt: [iml] for imt, iml in iml_disagg.items()}
N = len(curves) or 1
M = len(imtls)
P = len(poes_disagg)
arr = numpy.zeros((N, R, M, P))
imts = [from_string(imt) for imt in imtls]
for m, imt in enumerate(imtls):
imls = imtls[imt]
for p, poe in enumerate(poes_disagg):
for r in range(R):
arr[:, r, m, p] = _imls(curves, poe, imt, imls, r)
return ArrayWrapper(arr, dict(poes_disagg=poes_disagg, imts=imts)) | [
"def",
"make_iml4",
"(",
"R",
",",
"iml_disagg",
",",
"imtls",
"=",
"None",
",",
"poes_disagg",
"=",
"(",
"None",
",",
")",
",",
"curves",
"=",
"(",
")",
")",
":",
"if",
"imtls",
"is",
"None",
":",
"imtls",
"=",
"{",
"imt",
":",
"[",
"iml",
"]",
"for",
"imt",
",",
"iml",
"in",
"iml_disagg",
".",
"items",
"(",
")",
"}",
"N",
"=",
"len",
"(",
"curves",
")",
"or",
"1",
"M",
"=",
"len",
"(",
"imtls",
")",
"P",
"=",
"len",
"(",
"poes_disagg",
")",
"arr",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"N",
",",
"R",
",",
"M",
",",
"P",
")",
")",
"imts",
"=",
"[",
"from_string",
"(",
"imt",
")",
"for",
"imt",
"in",
"imtls",
"]",
"for",
"m",
",",
"imt",
"in",
"enumerate",
"(",
"imtls",
")",
":",
"imls",
"=",
"imtls",
"[",
"imt",
"]",
"for",
"p",
",",
"poe",
"in",
"enumerate",
"(",
"poes_disagg",
")",
":",
"for",
"r",
"in",
"range",
"(",
"R",
")",
":",
"arr",
"[",
":",
",",
"r",
",",
"m",
",",
"p",
"]",
"=",
"_imls",
"(",
"curves",
",",
"poe",
",",
"imt",
",",
"imls",
",",
"r",
")",
"return",
"ArrayWrapper",
"(",
"arr",
",",
"dict",
"(",
"poes_disagg",
"=",
"poes_disagg",
",",
"imts",
"=",
"imts",
")",
")"
]
| :returns: an ArrayWrapper over a 4D array of shape (N, R, M, P) | [
":",
"returns",
":",
"an",
"ArrayWrapper",
"over",
"a",
"4D",
"array",
"of",
"shape",
"(",
"N",
"R",
"M",
"P",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L53-L69 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | collect_bin_data | def collect_bin_data(ruptures, sitecol, cmaker, iml4,
truncation_level, n_epsilons, monitor=Monitor()):
"""
:param ruptures: a list of ruptures
:param sitecol: a SiteCollection instance
:param cmaker: a ContextMaker instance
:param iml4: an ArrayWrapper of intensities of shape (N, R, M, P)
:param truncation_level: the truncation level
:param n_epsilons: the number of epsilons
:param monitor: a Monitor instance
:returns: a dictionary (poe, imt, rlzi) -> probabilities of shape (N, E)
"""
# NB: instantiating truncnorm is slow and calls the infamous "doccer"
truncnorm = scipy.stats.truncnorm(-truncation_level, truncation_level)
epsilons = numpy.linspace(truncnorm.a, truncnorm.b, n_epsilons + 1)
acc = cmaker.disaggregate(
sitecol, ruptures, iml4, truncnorm, epsilons, monitor)
return pack(acc, 'mags dists lons lats'.split()) | python | def collect_bin_data(ruptures, sitecol, cmaker, iml4,
truncation_level, n_epsilons, monitor=Monitor()):
truncnorm = scipy.stats.truncnorm(-truncation_level, truncation_level)
epsilons = numpy.linspace(truncnorm.a, truncnorm.b, n_epsilons + 1)
acc = cmaker.disaggregate(
sitecol, ruptures, iml4, truncnorm, epsilons, monitor)
return pack(acc, 'mags dists lons lats'.split()) | [
"def",
"collect_bin_data",
"(",
"ruptures",
",",
"sitecol",
",",
"cmaker",
",",
"iml4",
",",
"truncation_level",
",",
"n_epsilons",
",",
"monitor",
"=",
"Monitor",
"(",
")",
")",
":",
"# NB: instantiating truncnorm is slow and calls the infamous \"doccer\"",
"truncnorm",
"=",
"scipy",
".",
"stats",
".",
"truncnorm",
"(",
"-",
"truncation_level",
",",
"truncation_level",
")",
"epsilons",
"=",
"numpy",
".",
"linspace",
"(",
"truncnorm",
".",
"a",
",",
"truncnorm",
".",
"b",
",",
"n_epsilons",
"+",
"1",
")",
"acc",
"=",
"cmaker",
".",
"disaggregate",
"(",
"sitecol",
",",
"ruptures",
",",
"iml4",
",",
"truncnorm",
",",
"epsilons",
",",
"monitor",
")",
"return",
"pack",
"(",
"acc",
",",
"'mags dists lons lats'",
".",
"split",
"(",
")",
")"
]
| :param ruptures: a list of ruptures
:param sitecol: a SiteCollection instance
:param cmaker: a ContextMaker instance
:param iml4: an ArrayWrapper of intensities of shape (N, R, M, P)
:param truncation_level: the truncation level
:param n_epsilons: the number of epsilons
:param monitor: a Monitor instance
:returns: a dictionary (poe, imt, rlzi) -> probabilities of shape (N, E) | [
":",
"param",
"ruptures",
":",
"a",
"list",
"of",
"ruptures",
":",
"param",
"sitecol",
":",
"a",
"SiteCollection",
"instance",
":",
"param",
"cmaker",
":",
"a",
"ContextMaker",
"instance",
":",
"param",
"iml4",
":",
"an",
"ArrayWrapper",
"of",
"intensities",
"of",
"shape",
"(",
"N",
"R",
"M",
"P",
")",
":",
"param",
"truncation_level",
":",
"the",
"truncation",
"level",
":",
"param",
"n_epsilons",
":",
"the",
"number",
"of",
"epsilons",
":",
"param",
"monitor",
":",
"a",
"Monitor",
"instance",
":",
"returns",
":",
"a",
"dictionary",
"(",
"poe",
"imt",
"rlzi",
")",
"-",
">",
"probabilities",
"of",
"shape",
"(",
"N",
"E",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L72-L89 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | lon_lat_bins | def lon_lat_bins(bb, coord_bin_width):
"""
Define bin edges for disaggregation histograms.
Given bins data as provided by :func:`collect_bin_data`, this function
finds edges of histograms, taking into account maximum and minimum values
of magnitude, distance and coordinates as well as requested sizes/numbers
of bins.
"""
west, south, east, north = bb
west = numpy.floor(west / coord_bin_width) * coord_bin_width
east = numpy.ceil(east / coord_bin_width) * coord_bin_width
lon_extent = get_longitudinal_extent(west, east)
lon_bins, _, _ = npoints_between(
west, 0, 0, east, 0, 0,
numpy.round(lon_extent / coord_bin_width + 1))
lat_bins = coord_bin_width * numpy.arange(
int(numpy.floor(south / coord_bin_width)),
int(numpy.ceil(north / coord_bin_width) + 1))
return lon_bins, lat_bins | python | def lon_lat_bins(bb, coord_bin_width):
west, south, east, north = bb
west = numpy.floor(west / coord_bin_width) * coord_bin_width
east = numpy.ceil(east / coord_bin_width) * coord_bin_width
lon_extent = get_longitudinal_extent(west, east)
lon_bins, _, _ = npoints_between(
west, 0, 0, east, 0, 0,
numpy.round(lon_extent / coord_bin_width + 1))
lat_bins = coord_bin_width * numpy.arange(
int(numpy.floor(south / coord_bin_width)),
int(numpy.ceil(north / coord_bin_width) + 1))
return lon_bins, lat_bins | [
"def",
"lon_lat_bins",
"(",
"bb",
",",
"coord_bin_width",
")",
":",
"west",
",",
"south",
",",
"east",
",",
"north",
"=",
"bb",
"west",
"=",
"numpy",
".",
"floor",
"(",
"west",
"/",
"coord_bin_width",
")",
"*",
"coord_bin_width",
"east",
"=",
"numpy",
".",
"ceil",
"(",
"east",
"/",
"coord_bin_width",
")",
"*",
"coord_bin_width",
"lon_extent",
"=",
"get_longitudinal_extent",
"(",
"west",
",",
"east",
")",
"lon_bins",
",",
"_",
",",
"_",
"=",
"npoints_between",
"(",
"west",
",",
"0",
",",
"0",
",",
"east",
",",
"0",
",",
"0",
",",
"numpy",
".",
"round",
"(",
"lon_extent",
"/",
"coord_bin_width",
"+",
"1",
")",
")",
"lat_bins",
"=",
"coord_bin_width",
"*",
"numpy",
".",
"arange",
"(",
"int",
"(",
"numpy",
".",
"floor",
"(",
"south",
"/",
"coord_bin_width",
")",
")",
",",
"int",
"(",
"numpy",
".",
"ceil",
"(",
"north",
"/",
"coord_bin_width",
")",
"+",
"1",
")",
")",
"return",
"lon_bins",
",",
"lat_bins"
]
| Define bin edges for disaggregation histograms.
Given bins data as provided by :func:`collect_bin_data`, this function
finds edges of histograms, taking into account maximum and minimum values
of magnitude, distance and coordinates as well as requested sizes/numbers
of bins. | [
"Define",
"bin",
"edges",
"for",
"disaggregation",
"histograms",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L92-L111 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | get_shape | def get_shape(bin_edges, sid):
"""
:returns:
the shape of the disaggregation matrix for the given site, of form
(#mags-1, #dists-1, #lons-1, #lats-1, #eps-1)
"""
mag_bins, dist_bins, lon_bins, lat_bins, eps_bins = bin_edges
return (len(mag_bins) - 1, len(dist_bins) - 1,
len(lon_bins[sid]) - 1, len(lat_bins[sid]) - 1, len(eps_bins) - 1) | python | def get_shape(bin_edges, sid):
mag_bins, dist_bins, lon_bins, lat_bins, eps_bins = bin_edges
return (len(mag_bins) - 1, len(dist_bins) - 1,
len(lon_bins[sid]) - 1, len(lat_bins[sid]) - 1, len(eps_bins) - 1) | [
"def",
"get_shape",
"(",
"bin_edges",
",",
"sid",
")",
":",
"mag_bins",
",",
"dist_bins",
",",
"lon_bins",
",",
"lat_bins",
",",
"eps_bins",
"=",
"bin_edges",
"return",
"(",
"len",
"(",
"mag_bins",
")",
"-",
"1",
",",
"len",
"(",
"dist_bins",
")",
"-",
"1",
",",
"len",
"(",
"lon_bins",
"[",
"sid",
"]",
")",
"-",
"1",
",",
"len",
"(",
"lat_bins",
"[",
"sid",
"]",
")",
"-",
"1",
",",
"len",
"(",
"eps_bins",
")",
"-",
"1",
")"
]
| :returns:
the shape of the disaggregation matrix for the given site, of form
(#mags-1, #dists-1, #lons-1, #lats-1, #eps-1) | [
":",
"returns",
":",
"the",
"shape",
"of",
"the",
"disaggregation",
"matrix",
"for",
"the",
"given",
"site",
"of",
"form",
"(",
"#mags",
"-",
"1",
"#dists",
"-",
"1",
"#lons",
"-",
"1",
"#lats",
"-",
"1",
"#eps",
"-",
"1",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L114-L122 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | build_disagg_matrix | def build_disagg_matrix(bdata, bin_edges, sid, mon=Monitor):
"""
:param bdata: a dictionary of probabilities of no exceedence
:param bin_edges: bin edges
:param sid: site index
:param mon: a Monitor instance
:returns: a dictionary key -> matrix|pmf for each key in bdata
"""
with mon('build_disagg_matrix'):
mag_bins, dist_bins, lon_bins, lat_bins, eps_bins = bin_edges
dim1, dim2, dim3, dim4, dim5 = shape = get_shape(bin_edges, sid)
# find bin indexes of rupture attributes; bins are assumed closed
# on the lower bound, and open on the upper bound, that is [ )
# longitude values need an ad-hoc method to take into account
# the 'international date line' issue
# the 'minus 1' is needed because the digitize method returns the
# index of the upper bound of the bin
mags_idx = numpy.digitize(bdata.mags+pmf.PRECISION, mag_bins) - 1
dists_idx = numpy.digitize(bdata.dists[:, sid], dist_bins) - 1
lons_idx = _digitize_lons(bdata.lons[:, sid], lon_bins[sid])
lats_idx = numpy.digitize(bdata.lats[:, sid], lat_bins[sid]) - 1
# because of the way numpy.digitize works, values equal to the last bin
# edge are associated to an index equal to len(bins) which is not a
# valid index for the disaggregation matrix. Such values are assumed
# to fall in the last bin
mags_idx[mags_idx == dim1] = dim1 - 1
dists_idx[dists_idx == dim2] = dim2 - 1
lons_idx[lons_idx == dim3] = dim3 - 1
lats_idx[lats_idx == dim4] = dim4 - 1
out = {}
cache = {}
cache_hit = 0
num_zeros = 0
for k, allpnes in bdata.items():
pnes = allpnes[:, sid, :] # shape (U, N, E)
cache_key = pnes.sum()
if cache_key == pnes.size: # all pnes are 1
num_zeros += 1
continue # zero matrices are not transferred
try:
matrix = cache[cache_key]
cache_hit += 1
except KeyError:
mat = numpy.ones(shape)
for i_mag, i_dist, i_lon, i_lat, pne in zip(
mags_idx, dists_idx, lons_idx, lats_idx, pnes):
mat[i_mag, i_dist, i_lon, i_lat] *= pne
matrix = 1. - mat
cache[cache_key] = matrix
out[k] = matrix
# operations, hits, num_zeros
if hasattr(mon, 'cache_info'):
mon.cache_info += numpy.array([len(bdata), cache_hit, num_zeros])
else:
mon.cache_info = numpy.array([len(bdata), cache_hit, num_zeros])
return out | python | def build_disagg_matrix(bdata, bin_edges, sid, mon=Monitor):
with mon('build_disagg_matrix'):
mag_bins, dist_bins, lon_bins, lat_bins, eps_bins = bin_edges
dim1, dim2, dim3, dim4, dim5 = shape = get_shape(bin_edges, sid)
mags_idx = numpy.digitize(bdata.mags+pmf.PRECISION, mag_bins) - 1
dists_idx = numpy.digitize(bdata.dists[:, sid], dist_bins) - 1
lons_idx = _digitize_lons(bdata.lons[:, sid], lon_bins[sid])
lats_idx = numpy.digitize(bdata.lats[:, sid], lat_bins[sid]) - 1
mags_idx[mags_idx == dim1] = dim1 - 1
dists_idx[dists_idx == dim2] = dim2 - 1
lons_idx[lons_idx == dim3] = dim3 - 1
lats_idx[lats_idx == dim4] = dim4 - 1
out = {}
cache = {}
cache_hit = 0
num_zeros = 0
for k, allpnes in bdata.items():
pnes = allpnes[:, sid, :]
cache_key = pnes.sum()
if cache_key == pnes.size:
num_zeros += 1
continue
try:
matrix = cache[cache_key]
cache_hit += 1
except KeyError:
mat = numpy.ones(shape)
for i_mag, i_dist, i_lon, i_lat, pne in zip(
mags_idx, dists_idx, lons_idx, lats_idx, pnes):
mat[i_mag, i_dist, i_lon, i_lat] *= pne
matrix = 1. - mat
cache[cache_key] = matrix
out[k] = matrix
if hasattr(mon, 'cache_info'):
mon.cache_info += numpy.array([len(bdata), cache_hit, num_zeros])
else:
mon.cache_info = numpy.array([len(bdata), cache_hit, num_zeros])
return out | [
"def",
"build_disagg_matrix",
"(",
"bdata",
",",
"bin_edges",
",",
"sid",
",",
"mon",
"=",
"Monitor",
")",
":",
"with",
"mon",
"(",
"'build_disagg_matrix'",
")",
":",
"mag_bins",
",",
"dist_bins",
",",
"lon_bins",
",",
"lat_bins",
",",
"eps_bins",
"=",
"bin_edges",
"dim1",
",",
"dim2",
",",
"dim3",
",",
"dim4",
",",
"dim5",
"=",
"shape",
"=",
"get_shape",
"(",
"bin_edges",
",",
"sid",
")",
"# find bin indexes of rupture attributes; bins are assumed closed",
"# on the lower bound, and open on the upper bound, that is [ )",
"# longitude values need an ad-hoc method to take into account",
"# the 'international date line' issue",
"# the 'minus 1' is needed because the digitize method returns the",
"# index of the upper bound of the bin",
"mags_idx",
"=",
"numpy",
".",
"digitize",
"(",
"bdata",
".",
"mags",
"+",
"pmf",
".",
"PRECISION",
",",
"mag_bins",
")",
"-",
"1",
"dists_idx",
"=",
"numpy",
".",
"digitize",
"(",
"bdata",
".",
"dists",
"[",
":",
",",
"sid",
"]",
",",
"dist_bins",
")",
"-",
"1",
"lons_idx",
"=",
"_digitize_lons",
"(",
"bdata",
".",
"lons",
"[",
":",
",",
"sid",
"]",
",",
"lon_bins",
"[",
"sid",
"]",
")",
"lats_idx",
"=",
"numpy",
".",
"digitize",
"(",
"bdata",
".",
"lats",
"[",
":",
",",
"sid",
"]",
",",
"lat_bins",
"[",
"sid",
"]",
")",
"-",
"1",
"# because of the way numpy.digitize works, values equal to the last bin",
"# edge are associated to an index equal to len(bins) which is not a",
"# valid index for the disaggregation matrix. Such values are assumed",
"# to fall in the last bin",
"mags_idx",
"[",
"mags_idx",
"==",
"dim1",
"]",
"=",
"dim1",
"-",
"1",
"dists_idx",
"[",
"dists_idx",
"==",
"dim2",
"]",
"=",
"dim2",
"-",
"1",
"lons_idx",
"[",
"lons_idx",
"==",
"dim3",
"]",
"=",
"dim3",
"-",
"1",
"lats_idx",
"[",
"lats_idx",
"==",
"dim4",
"]",
"=",
"dim4",
"-",
"1",
"out",
"=",
"{",
"}",
"cache",
"=",
"{",
"}",
"cache_hit",
"=",
"0",
"num_zeros",
"=",
"0",
"for",
"k",
",",
"allpnes",
"in",
"bdata",
".",
"items",
"(",
")",
":",
"pnes",
"=",
"allpnes",
"[",
":",
",",
"sid",
",",
":",
"]",
"# shape (U, N, E)",
"cache_key",
"=",
"pnes",
".",
"sum",
"(",
")",
"if",
"cache_key",
"==",
"pnes",
".",
"size",
":",
"# all pnes are 1",
"num_zeros",
"+=",
"1",
"continue",
"# zero matrices are not transferred",
"try",
":",
"matrix",
"=",
"cache",
"[",
"cache_key",
"]",
"cache_hit",
"+=",
"1",
"except",
"KeyError",
":",
"mat",
"=",
"numpy",
".",
"ones",
"(",
"shape",
")",
"for",
"i_mag",
",",
"i_dist",
",",
"i_lon",
",",
"i_lat",
",",
"pne",
"in",
"zip",
"(",
"mags_idx",
",",
"dists_idx",
",",
"lons_idx",
",",
"lats_idx",
",",
"pnes",
")",
":",
"mat",
"[",
"i_mag",
",",
"i_dist",
",",
"i_lon",
",",
"i_lat",
"]",
"*=",
"pne",
"matrix",
"=",
"1.",
"-",
"mat",
"cache",
"[",
"cache_key",
"]",
"=",
"matrix",
"out",
"[",
"k",
"]",
"=",
"matrix",
"# operations, hits, num_zeros",
"if",
"hasattr",
"(",
"mon",
",",
"'cache_info'",
")",
":",
"mon",
".",
"cache_info",
"+=",
"numpy",
".",
"array",
"(",
"[",
"len",
"(",
"bdata",
")",
",",
"cache_hit",
",",
"num_zeros",
"]",
")",
"else",
":",
"mon",
".",
"cache_info",
"=",
"numpy",
".",
"array",
"(",
"[",
"len",
"(",
"bdata",
")",
",",
"cache_hit",
",",
"num_zeros",
"]",
")",
"return",
"out"
]
| :param bdata: a dictionary of probabilities of no exceedence
:param bin_edges: bin edges
:param sid: site index
:param mon: a Monitor instance
:returns: a dictionary key -> matrix|pmf for each key in bdata | [
":",
"param",
"bdata",
":",
"a",
"dictionary",
"of",
"probabilities",
"of",
"no",
"exceedence",
":",
"param",
"bin_edges",
":",
"bin",
"edges",
":",
"param",
"sid",
":",
"site",
"index",
":",
"param",
"mon",
":",
"a",
"Monitor",
"instance",
":",
"returns",
":",
"a",
"dictionary",
"key",
"-",
">",
"matrix|pmf",
"for",
"each",
"key",
"in",
"bdata"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L126-L185 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | _digitize_lons | def _digitize_lons(lons, lon_bins):
"""
Return indices of the bins to which each value in lons belongs.
Takes into account the case in which longitude values cross the
international date line.
:parameter lons:
An instance of `numpy.ndarray`.
:parameter lons_bins:
An instance of `numpy.ndarray`.
"""
if cross_idl(lon_bins[0], lon_bins[-1]):
idx = numpy.zeros_like(lons, dtype=numpy.int)
for i_lon in range(len(lon_bins) - 1):
extents = get_longitudinal_extent(lons, lon_bins[i_lon + 1])
lon_idx = extents > 0
if i_lon != 0:
extents = get_longitudinal_extent(lon_bins[i_lon], lons)
lon_idx &= extents >= 0
idx[lon_idx] = i_lon
return numpy.array(idx)
else:
return numpy.digitize(lons, lon_bins) - 1 | python | def _digitize_lons(lons, lon_bins):
if cross_idl(lon_bins[0], lon_bins[-1]):
idx = numpy.zeros_like(lons, dtype=numpy.int)
for i_lon in range(len(lon_bins) - 1):
extents = get_longitudinal_extent(lons, lon_bins[i_lon + 1])
lon_idx = extents > 0
if i_lon != 0:
extents = get_longitudinal_extent(lon_bins[i_lon], lons)
lon_idx &= extents >= 0
idx[lon_idx] = i_lon
return numpy.array(idx)
else:
return numpy.digitize(lons, lon_bins) - 1 | [
"def",
"_digitize_lons",
"(",
"lons",
",",
"lon_bins",
")",
":",
"if",
"cross_idl",
"(",
"lon_bins",
"[",
"0",
"]",
",",
"lon_bins",
"[",
"-",
"1",
"]",
")",
":",
"idx",
"=",
"numpy",
".",
"zeros_like",
"(",
"lons",
",",
"dtype",
"=",
"numpy",
".",
"int",
")",
"for",
"i_lon",
"in",
"range",
"(",
"len",
"(",
"lon_bins",
")",
"-",
"1",
")",
":",
"extents",
"=",
"get_longitudinal_extent",
"(",
"lons",
",",
"lon_bins",
"[",
"i_lon",
"+",
"1",
"]",
")",
"lon_idx",
"=",
"extents",
">",
"0",
"if",
"i_lon",
"!=",
"0",
":",
"extents",
"=",
"get_longitudinal_extent",
"(",
"lon_bins",
"[",
"i_lon",
"]",
",",
"lons",
")",
"lon_idx",
"&=",
"extents",
">=",
"0",
"idx",
"[",
"lon_idx",
"]",
"=",
"i_lon",
"return",
"numpy",
".",
"array",
"(",
"idx",
")",
"else",
":",
"return",
"numpy",
".",
"digitize",
"(",
"lons",
",",
"lon_bins",
")",
"-",
"1"
]
| Return indices of the bins to which each value in lons belongs.
Takes into account the case in which longitude values cross the
international date line.
:parameter lons:
An instance of `numpy.ndarray`.
:parameter lons_bins:
An instance of `numpy.ndarray`. | [
"Return",
"indices",
"of",
"the",
"bins",
"to",
"which",
"each",
"value",
"in",
"lons",
"belongs",
".",
"Takes",
"into",
"account",
"the",
"case",
"in",
"which",
"longitude",
"values",
"cross",
"the",
"international",
"date",
"line",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L188-L210 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | disaggregation | def disaggregation(
sources, site, imt, iml, gsim_by_trt, truncation_level,
n_epsilons, mag_bin_width, dist_bin_width, coord_bin_width,
source_filter=filters.nofilter, filter_distance='rjb'):
"""
Compute "Disaggregation" matrix representing conditional probability of an
intensity mesaure type ``imt`` exceeding, at least once, an intensity
measure level ``iml`` at a geographical location ``site``, given rupture
scenarios classified in terms of:
- rupture magnitude
- Joyner-Boore distance from rupture surface to site
- longitude and latitude of the surface projection of a rupture's point
closest to ``site``
- epsilon: number of standard deviations by which an intensity measure
level deviates from the median value predicted by a GSIM, given the
rupture parameters
- rupture tectonic region type
In other words, the disaggregation matrix allows to compute the probability
of each scenario with the specified properties (e.g., magnitude, or the
magnitude and distance) to cause one or more exceedences of a given hazard
level.
For more detailed information about the disaggregation, see for instance
"Disaggregation of Seismic Hazard", Paolo Bazzurro, C. Allin Cornell,
Bulletin of the Seismological Society of America, Vol. 89, pp. 501-520,
April 1999.
:param sources:
Seismic source model, as for
:mod:`PSHA <openquake.hazardlib.calc.hazard_curve>` calculator it
should be an iterator of seismic sources.
:param site:
:class:`~openquake.hazardlib.site.Site` of interest to calculate
disaggregation matrix for.
:param imt:
Instance of :mod:`intensity measure type <openquake.hazardlib.imt>`
class.
:param iml:
Intensity measure level. A float value in units of ``imt``.
:param gsim_by_trt:
Tectonic region type to GSIM objects mapping.
:param truncation_level:
Float, number of standard deviations for truncation of the intensity
distribution.
:param n_epsilons:
Integer number of epsilon histogram bins in the result matrix.
:param mag_bin_width:
Magnitude discretization step, width of one magnitude histogram bin.
:param dist_bin_width:
Distance histogram discretization step, in km.
:param coord_bin_width:
Longitude and latitude histograms discretization step,
in decimal degrees.
:param source_filter:
Optional source-site filter function. See
:mod:`openquake.hazardlib.calc.filters`.
:returns:
A tuple of two items. First is itself a tuple of bin edges information
for (in specified order) magnitude, distance, longitude, latitude,
epsilon and tectonic region types.
Second item is 6d-array representing the full disaggregation matrix.
Dimensions are in the same order as bin edges in the first item
of the result tuple. The matrix can be used directly by pmf-extractor
functions.
"""
trts = sorted(set(src.tectonic_region_type for src in sources))
trt_num = dict((trt, i) for i, trt in enumerate(trts))
rlzs_by_gsim = {gsim_by_trt[trt]: [0] for trt in trts}
iml4 = make_iml4(1, {str(imt): iml})
by_trt = groupby(sources, operator.attrgetter('tectonic_region_type'))
bdata = {}
sitecol = SiteCollection([site])
for trt, srcs in by_trt.items():
ruptures = []
for src in srcs:
ruptures.extend(src.iter_ruptures())
cmaker = ContextMaker(
trt, rlzs_by_gsim, source_filter.integration_distance,
{'filter_distance': filter_distance})
bdata[trt] = collect_bin_data(
ruptures, sitecol, cmaker, iml4, truncation_level, n_epsilons)
if sum(len(bd.mags) for bd in bdata.values()) == 0:
warnings.warn(
'No ruptures have contributed to the hazard at site %s'
% site, RuntimeWarning)
return None, None
min_mag = min(bd.mags.min() for bd in bdata.values())
max_mag = max(bd.mags.max() for bd in bdata.values())
mag_bins = mag_bin_width * numpy.arange(
int(numpy.floor(min_mag / mag_bin_width)),
int(numpy.ceil(max_mag / mag_bin_width) + 1))
min_dist = min(bd.dists.min() for bd in bdata.values())
max_dist = max(bd.dists.max() for bd in bdata.values())
dist_bins = dist_bin_width * numpy.arange(
int(numpy.floor(min_dist / dist_bin_width)),
int(numpy.ceil(max_dist / dist_bin_width) + 1))
bb = (min(bd.lons.min() for bd in bdata.values()),
min(bd.lats.min() for bd in bdata.values()),
max(bd.lons.max() for bd in bdata.values()),
max(bd.lats.max() for bd in bdata.values()))
lon_bins, lat_bins = lon_lat_bins(bb, coord_bin_width)
eps_bins = numpy.linspace(-truncation_level, truncation_level,
n_epsilons + 1)
bin_edges = (mag_bins, dist_bins, [lon_bins], [lat_bins], eps_bins)
matrix = numpy.zeros((len(mag_bins) - 1, len(dist_bins) - 1,
len(lon_bins) - 1, len(lat_bins) - 1,
len(eps_bins) - 1, len(trts)))
for trt in bdata:
dic = build_disagg_matrix(bdata[trt], bin_edges, sid=0)
if dic: # (poe, imt, rlzi) -> matrix
[mat] = dic.values()
matrix[..., trt_num[trt]] = mat
return bin_edges + (trts,), matrix | python | def disaggregation(
sources, site, imt, iml, gsim_by_trt, truncation_level,
n_epsilons, mag_bin_width, dist_bin_width, coord_bin_width,
source_filter=filters.nofilter, filter_distance='rjb'):
trts = sorted(set(src.tectonic_region_type for src in sources))
trt_num = dict((trt, i) for i, trt in enumerate(trts))
rlzs_by_gsim = {gsim_by_trt[trt]: [0] for trt in trts}
iml4 = make_iml4(1, {str(imt): iml})
by_trt = groupby(sources, operator.attrgetter('tectonic_region_type'))
bdata = {}
sitecol = SiteCollection([site])
for trt, srcs in by_trt.items():
ruptures = []
for src in srcs:
ruptures.extend(src.iter_ruptures())
cmaker = ContextMaker(
trt, rlzs_by_gsim, source_filter.integration_distance,
{'filter_distance': filter_distance})
bdata[trt] = collect_bin_data(
ruptures, sitecol, cmaker, iml4, truncation_level, n_epsilons)
if sum(len(bd.mags) for bd in bdata.values()) == 0:
warnings.warn(
'No ruptures have contributed to the hazard at site %s'
% site, RuntimeWarning)
return None, None
min_mag = min(bd.mags.min() for bd in bdata.values())
max_mag = max(bd.mags.max() for bd in bdata.values())
mag_bins = mag_bin_width * numpy.arange(
int(numpy.floor(min_mag / mag_bin_width)),
int(numpy.ceil(max_mag / mag_bin_width) + 1))
min_dist = min(bd.dists.min() for bd in bdata.values())
max_dist = max(bd.dists.max() for bd in bdata.values())
dist_bins = dist_bin_width * numpy.arange(
int(numpy.floor(min_dist / dist_bin_width)),
int(numpy.ceil(max_dist / dist_bin_width) + 1))
bb = (min(bd.lons.min() for bd in bdata.values()),
min(bd.lats.min() for bd in bdata.values()),
max(bd.lons.max() for bd in bdata.values()),
max(bd.lats.max() for bd in bdata.values()))
lon_bins, lat_bins = lon_lat_bins(bb, coord_bin_width)
eps_bins = numpy.linspace(-truncation_level, truncation_level,
n_epsilons + 1)
bin_edges = (mag_bins, dist_bins, [lon_bins], [lat_bins], eps_bins)
matrix = numpy.zeros((len(mag_bins) - 1, len(dist_bins) - 1,
len(lon_bins) - 1, len(lat_bins) - 1,
len(eps_bins) - 1, len(trts)))
for trt in bdata:
dic = build_disagg_matrix(bdata[trt], bin_edges, sid=0)
if dic:
[mat] = dic.values()
matrix[..., trt_num[trt]] = mat
return bin_edges + (trts,), matrix | [
"def",
"disaggregation",
"(",
"sources",
",",
"site",
",",
"imt",
",",
"iml",
",",
"gsim_by_trt",
",",
"truncation_level",
",",
"n_epsilons",
",",
"mag_bin_width",
",",
"dist_bin_width",
",",
"coord_bin_width",
",",
"source_filter",
"=",
"filters",
".",
"nofilter",
",",
"filter_distance",
"=",
"'rjb'",
")",
":",
"trts",
"=",
"sorted",
"(",
"set",
"(",
"src",
".",
"tectonic_region_type",
"for",
"src",
"in",
"sources",
")",
")",
"trt_num",
"=",
"dict",
"(",
"(",
"trt",
",",
"i",
")",
"for",
"i",
",",
"trt",
"in",
"enumerate",
"(",
"trts",
")",
")",
"rlzs_by_gsim",
"=",
"{",
"gsim_by_trt",
"[",
"trt",
"]",
":",
"[",
"0",
"]",
"for",
"trt",
"in",
"trts",
"}",
"iml4",
"=",
"make_iml4",
"(",
"1",
",",
"{",
"str",
"(",
"imt",
")",
":",
"iml",
"}",
")",
"by_trt",
"=",
"groupby",
"(",
"sources",
",",
"operator",
".",
"attrgetter",
"(",
"'tectonic_region_type'",
")",
")",
"bdata",
"=",
"{",
"}",
"sitecol",
"=",
"SiteCollection",
"(",
"[",
"site",
"]",
")",
"for",
"trt",
",",
"srcs",
"in",
"by_trt",
".",
"items",
"(",
")",
":",
"ruptures",
"=",
"[",
"]",
"for",
"src",
"in",
"srcs",
":",
"ruptures",
".",
"extend",
"(",
"src",
".",
"iter_ruptures",
"(",
")",
")",
"cmaker",
"=",
"ContextMaker",
"(",
"trt",
",",
"rlzs_by_gsim",
",",
"source_filter",
".",
"integration_distance",
",",
"{",
"'filter_distance'",
":",
"filter_distance",
"}",
")",
"bdata",
"[",
"trt",
"]",
"=",
"collect_bin_data",
"(",
"ruptures",
",",
"sitecol",
",",
"cmaker",
",",
"iml4",
",",
"truncation_level",
",",
"n_epsilons",
")",
"if",
"sum",
"(",
"len",
"(",
"bd",
".",
"mags",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
"==",
"0",
":",
"warnings",
".",
"warn",
"(",
"'No ruptures have contributed to the hazard at site %s'",
"%",
"site",
",",
"RuntimeWarning",
")",
"return",
"None",
",",
"None",
"min_mag",
"=",
"min",
"(",
"bd",
".",
"mags",
".",
"min",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
"max_mag",
"=",
"max",
"(",
"bd",
".",
"mags",
".",
"max",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
"mag_bins",
"=",
"mag_bin_width",
"*",
"numpy",
".",
"arange",
"(",
"int",
"(",
"numpy",
".",
"floor",
"(",
"min_mag",
"/",
"mag_bin_width",
")",
")",
",",
"int",
"(",
"numpy",
".",
"ceil",
"(",
"max_mag",
"/",
"mag_bin_width",
")",
"+",
"1",
")",
")",
"min_dist",
"=",
"min",
"(",
"bd",
".",
"dists",
".",
"min",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
"max_dist",
"=",
"max",
"(",
"bd",
".",
"dists",
".",
"max",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
"dist_bins",
"=",
"dist_bin_width",
"*",
"numpy",
".",
"arange",
"(",
"int",
"(",
"numpy",
".",
"floor",
"(",
"min_dist",
"/",
"dist_bin_width",
")",
")",
",",
"int",
"(",
"numpy",
".",
"ceil",
"(",
"max_dist",
"/",
"dist_bin_width",
")",
"+",
"1",
")",
")",
"bb",
"=",
"(",
"min",
"(",
"bd",
".",
"lons",
".",
"min",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
",",
"min",
"(",
"bd",
".",
"lats",
".",
"min",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
",",
"max",
"(",
"bd",
".",
"lons",
".",
"max",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
",",
"max",
"(",
"bd",
".",
"lats",
".",
"max",
"(",
")",
"for",
"bd",
"in",
"bdata",
".",
"values",
"(",
")",
")",
")",
"lon_bins",
",",
"lat_bins",
"=",
"lon_lat_bins",
"(",
"bb",
",",
"coord_bin_width",
")",
"eps_bins",
"=",
"numpy",
".",
"linspace",
"(",
"-",
"truncation_level",
",",
"truncation_level",
",",
"n_epsilons",
"+",
"1",
")",
"bin_edges",
"=",
"(",
"mag_bins",
",",
"dist_bins",
",",
"[",
"lon_bins",
"]",
",",
"[",
"lat_bins",
"]",
",",
"eps_bins",
")",
"matrix",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"len",
"(",
"mag_bins",
")",
"-",
"1",
",",
"len",
"(",
"dist_bins",
")",
"-",
"1",
",",
"len",
"(",
"lon_bins",
")",
"-",
"1",
",",
"len",
"(",
"lat_bins",
")",
"-",
"1",
",",
"len",
"(",
"eps_bins",
")",
"-",
"1",
",",
"len",
"(",
"trts",
")",
")",
")",
"for",
"trt",
"in",
"bdata",
":",
"dic",
"=",
"build_disagg_matrix",
"(",
"bdata",
"[",
"trt",
"]",
",",
"bin_edges",
",",
"sid",
"=",
"0",
")",
"if",
"dic",
":",
"# (poe, imt, rlzi) -> matrix",
"[",
"mat",
"]",
"=",
"dic",
".",
"values",
"(",
")",
"matrix",
"[",
"...",
",",
"trt_num",
"[",
"trt",
"]",
"]",
"=",
"mat",
"return",
"bin_edges",
"+",
"(",
"trts",
",",
")",
",",
"matrix"
]
| Compute "Disaggregation" matrix representing conditional probability of an
intensity mesaure type ``imt`` exceeding, at least once, an intensity
measure level ``iml`` at a geographical location ``site``, given rupture
scenarios classified in terms of:
- rupture magnitude
- Joyner-Boore distance from rupture surface to site
- longitude and latitude of the surface projection of a rupture's point
closest to ``site``
- epsilon: number of standard deviations by which an intensity measure
level deviates from the median value predicted by a GSIM, given the
rupture parameters
- rupture tectonic region type
In other words, the disaggregation matrix allows to compute the probability
of each scenario with the specified properties (e.g., magnitude, or the
magnitude and distance) to cause one or more exceedences of a given hazard
level.
For more detailed information about the disaggregation, see for instance
"Disaggregation of Seismic Hazard", Paolo Bazzurro, C. Allin Cornell,
Bulletin of the Seismological Society of America, Vol. 89, pp. 501-520,
April 1999.
:param sources:
Seismic source model, as for
:mod:`PSHA <openquake.hazardlib.calc.hazard_curve>` calculator it
should be an iterator of seismic sources.
:param site:
:class:`~openquake.hazardlib.site.Site` of interest to calculate
disaggregation matrix for.
:param imt:
Instance of :mod:`intensity measure type <openquake.hazardlib.imt>`
class.
:param iml:
Intensity measure level. A float value in units of ``imt``.
:param gsim_by_trt:
Tectonic region type to GSIM objects mapping.
:param truncation_level:
Float, number of standard deviations for truncation of the intensity
distribution.
:param n_epsilons:
Integer number of epsilon histogram bins in the result matrix.
:param mag_bin_width:
Magnitude discretization step, width of one magnitude histogram bin.
:param dist_bin_width:
Distance histogram discretization step, in km.
:param coord_bin_width:
Longitude and latitude histograms discretization step,
in decimal degrees.
:param source_filter:
Optional source-site filter function. See
:mod:`openquake.hazardlib.calc.filters`.
:returns:
A tuple of two items. First is itself a tuple of bin edges information
for (in specified order) magnitude, distance, longitude, latitude,
epsilon and tectonic region types.
Second item is 6d-array representing the full disaggregation matrix.
Dimensions are in the same order as bin edges in the first item
of the result tuple. The matrix can be used directly by pmf-extractor
functions. | [
"Compute",
"Disaggregation",
"matrix",
"representing",
"conditional",
"probability",
"of",
"an",
"intensity",
"mesaure",
"type",
"imt",
"exceeding",
"at",
"least",
"once",
"an",
"intensity",
"measure",
"level",
"iml",
"at",
"a",
"geographical",
"location",
"site",
"given",
"rupture",
"scenarios",
"classified",
"in",
"terms",
"of",
":"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L214-L335 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | mag_pmf | def mag_pmf(matrix):
"""
Fold full disaggregation matrix to magnitude PMF.
:returns:
1d array, a histogram representing magnitude PMF.
"""
nmags, ndists, nlons, nlats, neps = matrix.shape
mag_pmf = numpy.zeros(nmags)
for i in range(nmags):
mag_pmf[i] = numpy.prod(
[1. - matrix[i, j, k, l, m]
for j in range(ndists)
for k in range(nlons)
for l in range(nlats)
for m in range(neps)])
return 1. - mag_pmf | python | def mag_pmf(matrix):
nmags, ndists, nlons, nlats, neps = matrix.shape
mag_pmf = numpy.zeros(nmags)
for i in range(nmags):
mag_pmf[i] = numpy.prod(
[1. - matrix[i, j, k, l, m]
for j in range(ndists)
for k in range(nlons)
for l in range(nlats)
for m in range(neps)])
return 1. - mag_pmf | [
"def",
"mag_pmf",
"(",
"matrix",
")",
":",
"nmags",
",",
"ndists",
",",
"nlons",
",",
"nlats",
",",
"neps",
"=",
"matrix",
".",
"shape",
"mag_pmf",
"=",
"numpy",
".",
"zeros",
"(",
"nmags",
")",
"for",
"i",
"in",
"range",
"(",
"nmags",
")",
":",
"mag_pmf",
"[",
"i",
"]",
"=",
"numpy",
".",
"prod",
"(",
"[",
"1.",
"-",
"matrix",
"[",
"i",
",",
"j",
",",
"k",
",",
"l",
",",
"m",
"]",
"for",
"j",
"in",
"range",
"(",
"ndists",
")",
"for",
"k",
"in",
"range",
"(",
"nlons",
")",
"for",
"l",
"in",
"range",
"(",
"nlats",
")",
"for",
"m",
"in",
"range",
"(",
"neps",
")",
"]",
")",
"return",
"1.",
"-",
"mag_pmf"
]
| Fold full disaggregation matrix to magnitude PMF.
:returns:
1d array, a histogram representing magnitude PMF. | [
"Fold",
"full",
"disaggregation",
"matrix",
"to",
"magnitude",
"PMF",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L338-L354 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | trt_pmf | def trt_pmf(matrices):
"""
Fold full disaggregation matrix to tectonic region type PMF.
:param matrices:
a matrix with T submatrices
:returns:
an array of T probabilities one per each tectonic region type
"""
ntrts, nmags, ndists, nlons, nlats, neps = matrices.shape
pmf = numpy.zeros(ntrts)
for t in range(ntrts):
pmf[t] = 1. - numpy.prod(
[1. - matrices[t, i, j, k, l, m]
for i in range(nmags)
for j in range(ndists)
for k in range(nlons)
for l in range(nlats)
for m in range(neps)])
return pmf | python | def trt_pmf(matrices):
ntrts, nmags, ndists, nlons, nlats, neps = matrices.shape
pmf = numpy.zeros(ntrts)
for t in range(ntrts):
pmf[t] = 1. - numpy.prod(
[1. - matrices[t, i, j, k, l, m]
for i in range(nmags)
for j in range(ndists)
for k in range(nlons)
for l in range(nlats)
for m in range(neps)])
return pmf | [
"def",
"trt_pmf",
"(",
"matrices",
")",
":",
"ntrts",
",",
"nmags",
",",
"ndists",
",",
"nlons",
",",
"nlats",
",",
"neps",
"=",
"matrices",
".",
"shape",
"pmf",
"=",
"numpy",
".",
"zeros",
"(",
"ntrts",
")",
"for",
"t",
"in",
"range",
"(",
"ntrts",
")",
":",
"pmf",
"[",
"t",
"]",
"=",
"1.",
"-",
"numpy",
".",
"prod",
"(",
"[",
"1.",
"-",
"matrices",
"[",
"t",
",",
"i",
",",
"j",
",",
"k",
",",
"l",
",",
"m",
"]",
"for",
"i",
"in",
"range",
"(",
"nmags",
")",
"for",
"j",
"in",
"range",
"(",
"ndists",
")",
"for",
"k",
"in",
"range",
"(",
"nlons",
")",
"for",
"l",
"in",
"range",
"(",
"nlats",
")",
"for",
"m",
"in",
"range",
"(",
"neps",
")",
"]",
")",
"return",
"pmf"
]
| Fold full disaggregation matrix to tectonic region type PMF.
:param matrices:
a matrix with T submatrices
:returns:
an array of T probabilities one per each tectonic region type | [
"Fold",
"full",
"disaggregation",
"matrix",
"to",
"tectonic",
"region",
"type",
"PMF",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L376-L395 |
gem/oq-engine | openquake/hazardlib/calc/disagg.py | lon_lat_trt_pmf | def lon_lat_trt_pmf(matrices):
"""
Fold full disaggregation matrices to lon / lat / TRT PMF.
:param matrices:
a matrix with T submatrices
:returns:
3d array. First dimension represents longitude histogram bins,
second one latitude histogram bins, third one trt histogram bins.
"""
res = numpy.array([lon_lat_pmf(mat) for mat in matrices])
return res.transpose(1, 2, 0) | python | def lon_lat_trt_pmf(matrices):
res = numpy.array([lon_lat_pmf(mat) for mat in matrices])
return res.transpose(1, 2, 0) | [
"def",
"lon_lat_trt_pmf",
"(",
"matrices",
")",
":",
"res",
"=",
"numpy",
".",
"array",
"(",
"[",
"lon_lat_pmf",
"(",
"mat",
")",
"for",
"mat",
"in",
"matrices",
"]",
")",
"return",
"res",
".",
"transpose",
"(",
"1",
",",
"2",
",",
"0",
")"
]
| Fold full disaggregation matrices to lon / lat / TRT PMF.
:param matrices:
a matrix with T submatrices
:returns:
3d array. First dimension represents longitude histogram bins,
second one latitude histogram bins, third one trt histogram bins. | [
"Fold",
"full",
"disaggregation",
"matrices",
"to",
"lon",
"/",
"lat",
"/",
"TRT",
"PMF",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L459-L470 |
gem/oq-engine | openquake/commands/db.py | db | def db(cmd, args=()):
"""
Run a database command
"""
if cmd not in commands:
okcmds = '\n'.join(
'%s %s' % (name, repr(' '.join(args)) if args else '')
for name, args in sorted(commands.items()))
print('Invalid command "%s": choose one from\n%s' % (cmd, okcmds))
elif len(args) != len(commands[cmd]):
print('Wrong number of arguments, expected %s, got %s' % (
commands[cmd], args))
else:
dbserver.ensure_on()
res = logs.dbcmd(cmd, *convert(args))
if hasattr(res, '_fields') and res.__class__.__name__ != 'Row':
print(rst_table(res))
else:
print(res) | python | def db(cmd, args=()):
if cmd not in commands:
okcmds = '\n'.join(
'%s %s' % (name, repr(' '.join(args)) if args else '')
for name, args in sorted(commands.items()))
print('Invalid command "%s": choose one from\n%s' % (cmd, okcmds))
elif len(args) != len(commands[cmd]):
print('Wrong number of arguments, expected %s, got %s' % (
commands[cmd], args))
else:
dbserver.ensure_on()
res = logs.dbcmd(cmd, *convert(args))
if hasattr(res, '_fields') and res.__class__.__name__ != 'Row':
print(rst_table(res))
else:
print(res) | [
"def",
"db",
"(",
"cmd",
",",
"args",
"=",
"(",
")",
")",
":",
"if",
"cmd",
"not",
"in",
"commands",
":",
"okcmds",
"=",
"'\\n'",
".",
"join",
"(",
"'%s %s'",
"%",
"(",
"name",
",",
"repr",
"(",
"' '",
".",
"join",
"(",
"args",
")",
")",
"if",
"args",
"else",
"''",
")",
"for",
"name",
",",
"args",
"in",
"sorted",
"(",
"commands",
".",
"items",
"(",
")",
")",
")",
"print",
"(",
"'Invalid command \"%s\": choose one from\\n%s'",
"%",
"(",
"cmd",
",",
"okcmds",
")",
")",
"elif",
"len",
"(",
"args",
")",
"!=",
"len",
"(",
"commands",
"[",
"cmd",
"]",
")",
":",
"print",
"(",
"'Wrong number of arguments, expected %s, got %s'",
"%",
"(",
"commands",
"[",
"cmd",
"]",
",",
"args",
")",
")",
"else",
":",
"dbserver",
".",
"ensure_on",
"(",
")",
"res",
"=",
"logs",
".",
"dbcmd",
"(",
"cmd",
",",
"*",
"convert",
"(",
"args",
")",
")",
"if",
"hasattr",
"(",
"res",
",",
"'_fields'",
")",
"and",
"res",
".",
"__class__",
".",
"__name__",
"!=",
"'Row'",
":",
"print",
"(",
"rst_table",
"(",
"res",
")",
")",
"else",
":",
"print",
"(",
"res",
")"
]
| Run a database command | [
"Run",
"a",
"database",
"command"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/db.py#L47-L65 |
gem/oq-engine | openquake/hmtk/faults/fault_geometries.py | SimpleFaultGeometry.get_area | def get_area(self):
'''
Calculates the area of the fault (km ** 2.) as the product of length
(km) and downdip width (km)
'''
d_z = self.lower_depth - self.upper_depth
self.downdip_width = d_z / np.sin(self.dip * np.pi / 180.)
self.surface_width = self.downdip_width * np.cos(self.dip *
np.pi / 180.)
self.area = self.length * self.downdip_width
return self.area | python | def get_area(self):
d_z = self.lower_depth - self.upper_depth
self.downdip_width = d_z / np.sin(self.dip * np.pi / 180.)
self.surface_width = self.downdip_width * np.cos(self.dip *
np.pi / 180.)
self.area = self.length * self.downdip_width
return self.area | [
"def",
"get_area",
"(",
"self",
")",
":",
"d_z",
"=",
"self",
".",
"lower_depth",
"-",
"self",
".",
"upper_depth",
"self",
".",
"downdip_width",
"=",
"d_z",
"/",
"np",
".",
"sin",
"(",
"self",
".",
"dip",
"*",
"np",
".",
"pi",
"/",
"180.",
")",
"self",
".",
"surface_width",
"=",
"self",
".",
"downdip_width",
"*",
"np",
".",
"cos",
"(",
"self",
".",
"dip",
"*",
"np",
".",
"pi",
"/",
"180.",
")",
"self",
".",
"area",
"=",
"self",
".",
"length",
"*",
"self",
".",
"downdip_width",
"return",
"self",
".",
"area"
]
| Calculates the area of the fault (km ** 2.) as the product of length
(km) and downdip width (km) | [
"Calculates",
"the",
"area",
"of",
"the",
"fault",
"(",
"km",
"**",
"2",
".",
")",
"as",
"the",
"product",
"of",
"length",
"(",
"km",
")",
"and",
"downdip",
"width",
"(",
"km",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/fault_geometries.py#L112-L122 |
gem/oq-engine | openquake/commands/download_shakemap.py | download_shakemap | def download_shakemap(id):
"""
Example of usage: utils/shakemap usp000fjta
"""
with performance.Monitor('shakemap', measuremem=True) as mon:
dest = '%s.npy' % id
numpy.save(dest, download_array(id))
print(mon)
print('Saved %s' % dest) | python | def download_shakemap(id):
with performance.Monitor('shakemap', measuremem=True) as mon:
dest = '%s.npy' % id
numpy.save(dest, download_array(id))
print(mon)
print('Saved %s' % dest) | [
"def",
"download_shakemap",
"(",
"id",
")",
":",
"with",
"performance",
".",
"Monitor",
"(",
"'shakemap'",
",",
"measuremem",
"=",
"True",
")",
"as",
"mon",
":",
"dest",
"=",
"'%s.npy'",
"%",
"id",
"numpy",
".",
"save",
"(",
"dest",
",",
"download_array",
"(",
"id",
")",
")",
"print",
"(",
"mon",
")",
"print",
"(",
"'Saved %s'",
"%",
"dest",
")"
]
| Example of usage: utils/shakemap usp000fjta | [
"Example",
"of",
"usage",
":",
"utils",
"/",
"shakemap",
"usp000fjta"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/download_shakemap.py#L25-L33 |
gem/oq-engine | openquake/hazardlib/stats.py | mean_curve | def mean_curve(values, weights=None):
"""
Compute the mean by using numpy.average on the first axis.
"""
if weights is None:
weights = [1. / len(values)] * len(values)
if not isinstance(values, numpy.ndarray):
values = numpy.array(values)
return numpy.average(values, axis=0, weights=weights) | python | def mean_curve(values, weights=None):
if weights is None:
weights = [1. / len(values)] * len(values)
if not isinstance(values, numpy.ndarray):
values = numpy.array(values)
return numpy.average(values, axis=0, weights=weights) | [
"def",
"mean_curve",
"(",
"values",
",",
"weights",
"=",
"None",
")",
":",
"if",
"weights",
"is",
"None",
":",
"weights",
"=",
"[",
"1.",
"/",
"len",
"(",
"values",
")",
"]",
"*",
"len",
"(",
"values",
")",
"if",
"not",
"isinstance",
"(",
"values",
",",
"numpy",
".",
"ndarray",
")",
":",
"values",
"=",
"numpy",
".",
"array",
"(",
"values",
")",
"return",
"numpy",
".",
"average",
"(",
"values",
",",
"axis",
"=",
"0",
",",
"weights",
"=",
"weights",
")"
]
| Compute the mean by using numpy.average on the first axis. | [
"Compute",
"the",
"mean",
"by",
"using",
"numpy",
".",
"average",
"on",
"the",
"first",
"axis",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L25-L33 |
gem/oq-engine | openquake/hazardlib/stats.py | quantile_curve | def quantile_curve(quantile, curves, weights=None):
"""
Compute the weighted quantile aggregate of a set of curves.
:param quantile:
Quantile value to calculate. Should be in the range [0.0, 1.0].
:param curves:
Array of R PoEs (possibly arrays)
:param weights:
Array-like of weights, 1 for each input curve, or None
:returns:
A numpy array representing the quantile aggregate
"""
if not isinstance(curves, numpy.ndarray):
curves = numpy.array(curves)
R = len(curves)
if weights is None:
weights = numpy.ones(R) / R
else:
weights = numpy.array(weights)
assert len(weights) == R, (len(weights), R)
result = numpy.zeros(curves.shape[1:])
for idx, _ in numpy.ndenumerate(result):
data = numpy.array([a[idx] for a in curves])
sorted_idxs = numpy.argsort(data)
sorted_weights = weights[sorted_idxs]
sorted_data = data[sorted_idxs]
cum_weights = numpy.cumsum(sorted_weights)
# get the quantile from the interpolated CDF
result[idx] = numpy.interp(quantile, cum_weights, sorted_data)
return result | python | def quantile_curve(quantile, curves, weights=None):
if not isinstance(curves, numpy.ndarray):
curves = numpy.array(curves)
R = len(curves)
if weights is None:
weights = numpy.ones(R) / R
else:
weights = numpy.array(weights)
assert len(weights) == R, (len(weights), R)
result = numpy.zeros(curves.shape[1:])
for idx, _ in numpy.ndenumerate(result):
data = numpy.array([a[idx] for a in curves])
sorted_idxs = numpy.argsort(data)
sorted_weights = weights[sorted_idxs]
sorted_data = data[sorted_idxs]
cum_weights = numpy.cumsum(sorted_weights)
result[idx] = numpy.interp(quantile, cum_weights, sorted_data)
return result | [
"def",
"quantile_curve",
"(",
"quantile",
",",
"curves",
",",
"weights",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"curves",
",",
"numpy",
".",
"ndarray",
")",
":",
"curves",
"=",
"numpy",
".",
"array",
"(",
"curves",
")",
"R",
"=",
"len",
"(",
"curves",
")",
"if",
"weights",
"is",
"None",
":",
"weights",
"=",
"numpy",
".",
"ones",
"(",
"R",
")",
"/",
"R",
"else",
":",
"weights",
"=",
"numpy",
".",
"array",
"(",
"weights",
")",
"assert",
"len",
"(",
"weights",
")",
"==",
"R",
",",
"(",
"len",
"(",
"weights",
")",
",",
"R",
")",
"result",
"=",
"numpy",
".",
"zeros",
"(",
"curves",
".",
"shape",
"[",
"1",
":",
"]",
")",
"for",
"idx",
",",
"_",
"in",
"numpy",
".",
"ndenumerate",
"(",
"result",
")",
":",
"data",
"=",
"numpy",
".",
"array",
"(",
"[",
"a",
"[",
"idx",
"]",
"for",
"a",
"in",
"curves",
"]",
")",
"sorted_idxs",
"=",
"numpy",
".",
"argsort",
"(",
"data",
")",
"sorted_weights",
"=",
"weights",
"[",
"sorted_idxs",
"]",
"sorted_data",
"=",
"data",
"[",
"sorted_idxs",
"]",
"cum_weights",
"=",
"numpy",
".",
"cumsum",
"(",
"sorted_weights",
")",
"# get the quantile from the interpolated CDF",
"result",
"[",
"idx",
"]",
"=",
"numpy",
".",
"interp",
"(",
"quantile",
",",
"cum_weights",
",",
"sorted_data",
")",
"return",
"result"
]
| Compute the weighted quantile aggregate of a set of curves.
:param quantile:
Quantile value to calculate. Should be in the range [0.0, 1.0].
:param curves:
Array of R PoEs (possibly arrays)
:param weights:
Array-like of weights, 1 for each input curve, or None
:returns:
A numpy array representing the quantile aggregate | [
"Compute",
"the",
"weighted",
"quantile",
"aggregate",
"of",
"a",
"set",
"of",
"curves",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L44-L74 |
gem/oq-engine | openquake/hazardlib/stats.py | compute_pmap_stats | def compute_pmap_stats(pmaps, stats, weights, imtls):
"""
:param pmaps:
a list of R probability maps
:param stats:
a sequence of S statistic functions
:param weights:
a list of ImtWeights
:param imtls:
a DictArray of intensity measure types
:returns:
a probability map with S internal values
"""
sids = set()
p0 = next(iter(pmaps))
L = p0.shape_y
for pmap in pmaps:
sids.update(pmap)
assert pmap.shape_y == L, (pmap.shape_y, L)
if len(sids) == 0:
raise ValueError('All empty probability maps!')
sids = numpy.array(sorted(sids), numpy.uint32)
nstats = len(stats)
curves = numpy.zeros((len(pmaps), len(sids), L), numpy.float64)
for i, pmap in enumerate(pmaps):
for j, sid in enumerate(sids):
if sid in pmap:
curves[i, j] = pmap[sid].array[:, 0]
out = p0.__class__.build(L, nstats, sids)
for imt in imtls:
slc = imtls(imt)
w = [weight[imt] for weight in weights]
for i, array in enumerate(compute_stats(curves[:, :, slc], stats, w)):
for j, sid in numpy.ndenumerate(sids):
out[sid].array[slc, i] = array[j]
return out | python | def compute_pmap_stats(pmaps, stats, weights, imtls):
sids = set()
p0 = next(iter(pmaps))
L = p0.shape_y
for pmap in pmaps:
sids.update(pmap)
assert pmap.shape_y == L, (pmap.shape_y, L)
if len(sids) == 0:
raise ValueError('All empty probability maps!')
sids = numpy.array(sorted(sids), numpy.uint32)
nstats = len(stats)
curves = numpy.zeros((len(pmaps), len(sids), L), numpy.float64)
for i, pmap in enumerate(pmaps):
for j, sid in enumerate(sids):
if sid in pmap:
curves[i, j] = pmap[sid].array[:, 0]
out = p0.__class__.build(L, nstats, sids)
for imt in imtls:
slc = imtls(imt)
w = [weight[imt] for weight in weights]
for i, array in enumerate(compute_stats(curves[:, :, slc], stats, w)):
for j, sid in numpy.ndenumerate(sids):
out[sid].array[slc, i] = array[j]
return out | [
"def",
"compute_pmap_stats",
"(",
"pmaps",
",",
"stats",
",",
"weights",
",",
"imtls",
")",
":",
"sids",
"=",
"set",
"(",
")",
"p0",
"=",
"next",
"(",
"iter",
"(",
"pmaps",
")",
")",
"L",
"=",
"p0",
".",
"shape_y",
"for",
"pmap",
"in",
"pmaps",
":",
"sids",
".",
"update",
"(",
"pmap",
")",
"assert",
"pmap",
".",
"shape_y",
"==",
"L",
",",
"(",
"pmap",
".",
"shape_y",
",",
"L",
")",
"if",
"len",
"(",
"sids",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'All empty probability maps!'",
")",
"sids",
"=",
"numpy",
".",
"array",
"(",
"sorted",
"(",
"sids",
")",
",",
"numpy",
".",
"uint32",
")",
"nstats",
"=",
"len",
"(",
"stats",
")",
"curves",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"len",
"(",
"pmaps",
")",
",",
"len",
"(",
"sids",
")",
",",
"L",
")",
",",
"numpy",
".",
"float64",
")",
"for",
"i",
",",
"pmap",
"in",
"enumerate",
"(",
"pmaps",
")",
":",
"for",
"j",
",",
"sid",
"in",
"enumerate",
"(",
"sids",
")",
":",
"if",
"sid",
"in",
"pmap",
":",
"curves",
"[",
"i",
",",
"j",
"]",
"=",
"pmap",
"[",
"sid",
"]",
".",
"array",
"[",
":",
",",
"0",
"]",
"out",
"=",
"p0",
".",
"__class__",
".",
"build",
"(",
"L",
",",
"nstats",
",",
"sids",
")",
"for",
"imt",
"in",
"imtls",
":",
"slc",
"=",
"imtls",
"(",
"imt",
")",
"w",
"=",
"[",
"weight",
"[",
"imt",
"]",
"for",
"weight",
"in",
"weights",
"]",
"for",
"i",
",",
"array",
"in",
"enumerate",
"(",
"compute_stats",
"(",
"curves",
"[",
":",
",",
":",
",",
"slc",
"]",
",",
"stats",
",",
"w",
")",
")",
":",
"for",
"j",
",",
"sid",
"in",
"numpy",
".",
"ndenumerate",
"(",
"sids",
")",
":",
"out",
"[",
"sid",
"]",
".",
"array",
"[",
"slc",
",",
"i",
"]",
"=",
"array",
"[",
"j",
"]",
"return",
"out"
]
| :param pmaps:
a list of R probability maps
:param stats:
a sequence of S statistic functions
:param weights:
a list of ImtWeights
:param imtls:
a DictArray of intensity measure types
:returns:
a probability map with S internal values | [
":",
"param",
"pmaps",
":",
"a",
"list",
"of",
"R",
"probability",
"maps",
":",
"param",
"stats",
":",
"a",
"sequence",
"of",
"S",
"statistic",
"functions",
":",
"param",
"weights",
":",
"a",
"list",
"of",
"ImtWeights",
":",
"param",
"imtls",
":",
"a",
"DictArray",
"of",
"intensity",
"measure",
"types",
":",
"returns",
":",
"a",
"probability",
"map",
"with",
"S",
"internal",
"values"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L89-L124 |
gem/oq-engine | openquake/hazardlib/stats.py | compute_stats | def compute_stats(array, stats, weights):
"""
:param array:
an array of R elements (which can be arrays)
:param stats:
a sequence of S statistic functions
:param weights:
a list of R weights
:returns:
an array of S elements (which can be arrays)
"""
result = numpy.zeros((len(stats),) + array.shape[1:], array.dtype)
for i, func in enumerate(stats):
result[i] = apply_stat(func, array, weights)
return result | python | def compute_stats(array, stats, weights):
result = numpy.zeros((len(stats),) + array.shape[1:], array.dtype)
for i, func in enumerate(stats):
result[i] = apply_stat(func, array, weights)
return result | [
"def",
"compute_stats",
"(",
"array",
",",
"stats",
",",
"weights",
")",
":",
"result",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"len",
"(",
"stats",
")",
",",
")",
"+",
"array",
".",
"shape",
"[",
"1",
":",
"]",
",",
"array",
".",
"dtype",
")",
"for",
"i",
",",
"func",
"in",
"enumerate",
"(",
"stats",
")",
":",
"result",
"[",
"i",
"]",
"=",
"apply_stat",
"(",
"func",
",",
"array",
",",
"weights",
")",
"return",
"result"
]
| :param array:
an array of R elements (which can be arrays)
:param stats:
a sequence of S statistic functions
:param weights:
a list of R weights
:returns:
an array of S elements (which can be arrays) | [
":",
"param",
"array",
":",
"an",
"array",
"of",
"R",
"elements",
"(",
"which",
"can",
"be",
"arrays",
")",
":",
"param",
"stats",
":",
"a",
"sequence",
"of",
"S",
"statistic",
"functions",
":",
"param",
"weights",
":",
"a",
"list",
"of",
"R",
"weights",
":",
"returns",
":",
"an",
"array",
"of",
"S",
"elements",
"(",
"which",
"can",
"be",
"arrays",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L128-L142 |
gem/oq-engine | openquake/hazardlib/stats.py | compute_stats2 | def compute_stats2(arrayNR, stats, weights):
"""
:param arrayNR:
an array of (N, R) elements
:param stats:
a sequence of S statistic functions
:param weights:
a list of R weights
:returns:
an array of (N, S) elements
"""
newshape = list(arrayNR.shape)
if newshape[1] != len(weights):
raise ValueError('Got %d weights but %d values!' %
(len(weights), newshape[1]))
newshape[1] = len(stats) # number of statistical outputs
newarray = numpy.zeros(newshape, arrayNR.dtype)
data = [arrayNR[:, i] for i in range(len(weights))]
for i, func in enumerate(stats):
newarray[:, i] = apply_stat(func, data, weights)
return newarray | python | def compute_stats2(arrayNR, stats, weights):
newshape = list(arrayNR.shape)
if newshape[1] != len(weights):
raise ValueError('Got %d weights but %d values!' %
(len(weights), newshape[1]))
newshape[1] = len(stats)
newarray = numpy.zeros(newshape, arrayNR.dtype)
data = [arrayNR[:, i] for i in range(len(weights))]
for i, func in enumerate(stats):
newarray[:, i] = apply_stat(func, data, weights)
return newarray | [
"def",
"compute_stats2",
"(",
"arrayNR",
",",
"stats",
",",
"weights",
")",
":",
"newshape",
"=",
"list",
"(",
"arrayNR",
".",
"shape",
")",
"if",
"newshape",
"[",
"1",
"]",
"!=",
"len",
"(",
"weights",
")",
":",
"raise",
"ValueError",
"(",
"'Got %d weights but %d values!'",
"%",
"(",
"len",
"(",
"weights",
")",
",",
"newshape",
"[",
"1",
"]",
")",
")",
"newshape",
"[",
"1",
"]",
"=",
"len",
"(",
"stats",
")",
"# number of statistical outputs",
"newarray",
"=",
"numpy",
".",
"zeros",
"(",
"newshape",
",",
"arrayNR",
".",
"dtype",
")",
"data",
"=",
"[",
"arrayNR",
"[",
":",
",",
"i",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"weights",
")",
")",
"]",
"for",
"i",
",",
"func",
"in",
"enumerate",
"(",
"stats",
")",
":",
"newarray",
"[",
":",
",",
"i",
"]",
"=",
"apply_stat",
"(",
"func",
",",
"data",
",",
"weights",
")",
"return",
"newarray"
]
| :param arrayNR:
an array of (N, R) elements
:param stats:
a sequence of S statistic functions
:param weights:
a list of R weights
:returns:
an array of (N, S) elements | [
":",
"param",
"arrayNR",
":",
"an",
"array",
"of",
"(",
"N",
"R",
")",
"elements",
":",
"param",
"stats",
":",
"a",
"sequence",
"of",
"S",
"statistic",
"functions",
":",
"param",
"weights",
":",
"a",
"list",
"of",
"R",
"weights",
":",
"returns",
":",
"an",
"array",
"of",
"(",
"N",
"S",
")",
"elements"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L146-L166 |
gem/oq-engine | openquake/hazardlib/stats.py | apply_stat | def apply_stat(f, arraylist, *extra, **kw):
"""
:param f: a callable arraylist -> array (of the same shape and dtype)
:param arraylist: a list of arrays of the same shape and dtype
:param extra: additional positional arguments
:param kw: keyword arguments
:returns: an array of the same shape and dtype
Broadcast statistical functions to composite arrays. Here is an example:
>>> dt = numpy.dtype([('a', (float, 2)), ('b', float)])
>>> a1 = numpy.array([([1, 2], 3)], dt)
>>> a2 = numpy.array([([4, 5], 6)], dt)
>>> apply_stat(mean_curve, [a1, a2])
array([([2.5, 3.5], 4.5)], dtype=[('a', '<f8', (2,)), ('b', '<f8')])
"""
dtype = arraylist[0].dtype
shape = arraylist[0].shape
if dtype.names: # composite array
new = numpy.zeros(shape, dtype)
for name in dtype.names:
new[name] = f([arr[name] for arr in arraylist], *extra, **kw)
return new
else: # simple array
return f(arraylist, *extra, **kw) | python | def apply_stat(f, arraylist, *extra, **kw):
dtype = arraylist[0].dtype
shape = arraylist[0].shape
if dtype.names:
new = numpy.zeros(shape, dtype)
for name in dtype.names:
new[name] = f([arr[name] for arr in arraylist], *extra, **kw)
return new
else:
return f(arraylist, *extra, **kw) | [
"def",
"apply_stat",
"(",
"f",
",",
"arraylist",
",",
"*",
"extra",
",",
"*",
"*",
"kw",
")",
":",
"dtype",
"=",
"arraylist",
"[",
"0",
"]",
".",
"dtype",
"shape",
"=",
"arraylist",
"[",
"0",
"]",
".",
"shape",
"if",
"dtype",
".",
"names",
":",
"# composite array",
"new",
"=",
"numpy",
".",
"zeros",
"(",
"shape",
",",
"dtype",
")",
"for",
"name",
"in",
"dtype",
".",
"names",
":",
"new",
"[",
"name",
"]",
"=",
"f",
"(",
"[",
"arr",
"[",
"name",
"]",
"for",
"arr",
"in",
"arraylist",
"]",
",",
"*",
"extra",
",",
"*",
"*",
"kw",
")",
"return",
"new",
"else",
":",
"# simple array",
"return",
"f",
"(",
"arraylist",
",",
"*",
"extra",
",",
"*",
"*",
"kw",
")"
]
| :param f: a callable arraylist -> array (of the same shape and dtype)
:param arraylist: a list of arrays of the same shape and dtype
:param extra: additional positional arguments
:param kw: keyword arguments
:returns: an array of the same shape and dtype
Broadcast statistical functions to composite arrays. Here is an example:
>>> dt = numpy.dtype([('a', (float, 2)), ('b', float)])
>>> a1 = numpy.array([([1, 2], 3)], dt)
>>> a2 = numpy.array([([4, 5], 6)], dt)
>>> apply_stat(mean_curve, [a1, a2])
array([([2.5, 3.5], 4.5)], dtype=[('a', '<f8', (2,)), ('b', '<f8')]) | [
":",
"param",
"f",
":",
"a",
"callable",
"arraylist",
"-",
">",
"array",
"(",
"of",
"the",
"same",
"shape",
"and",
"dtype",
")",
":",
"param",
"arraylist",
":",
"a",
"list",
"of",
"arrays",
"of",
"the",
"same",
"shape",
"and",
"dtype",
":",
"param",
"extra",
":",
"additional",
"positional",
"arguments",
":",
"param",
"kw",
":",
"keyword",
"arguments",
":",
"returns",
":",
"an",
"array",
"of",
"the",
"same",
"shape",
"and",
"dtype"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L169-L193 |
gem/oq-engine | openquake/hazardlib/stats.py | set_rlzs_stats | def set_rlzs_stats(dstore, prefix, arrayNR=None):
"""
:param dstore: a DataStore object
:param prefix: dataset prefix
:param arrayNR: an array of shape (N, R, ...)
"""
if arrayNR is None:
# assume the -rlzs array is already stored
arrayNR = dstore[prefix + '-rlzs'].value
else:
# store passed the -rlzs array
dstore[prefix + '-rlzs'] = arrayNR
R = arrayNR.shape[1]
if R > 1:
stats = dstore['oqparam'].hazard_stats()
statnames, statfuncs = zip(*stats.items())
weights = dstore['weights'][:, 0]
dstore[prefix + '-stats'] = compute_stats2(arrayNR, statfuncs, weights)
dstore.set_attrs(prefix + '-stats', stats=encode(statnames)) | python | def set_rlzs_stats(dstore, prefix, arrayNR=None):
if arrayNR is None:
arrayNR = dstore[prefix + '-rlzs'].value
else:
dstore[prefix + '-rlzs'] = arrayNR
R = arrayNR.shape[1]
if R > 1:
stats = dstore['oqparam'].hazard_stats()
statnames, statfuncs = zip(*stats.items())
weights = dstore['weights'][:, 0]
dstore[prefix + '-stats'] = compute_stats2(arrayNR, statfuncs, weights)
dstore.set_attrs(prefix + '-stats', stats=encode(statnames)) | [
"def",
"set_rlzs_stats",
"(",
"dstore",
",",
"prefix",
",",
"arrayNR",
"=",
"None",
")",
":",
"if",
"arrayNR",
"is",
"None",
":",
"# assume the -rlzs array is already stored",
"arrayNR",
"=",
"dstore",
"[",
"prefix",
"+",
"'-rlzs'",
"]",
".",
"value",
"else",
":",
"# store passed the -rlzs array",
"dstore",
"[",
"prefix",
"+",
"'-rlzs'",
"]",
"=",
"arrayNR",
"R",
"=",
"arrayNR",
".",
"shape",
"[",
"1",
"]",
"if",
"R",
">",
"1",
":",
"stats",
"=",
"dstore",
"[",
"'oqparam'",
"]",
".",
"hazard_stats",
"(",
")",
"statnames",
",",
"statfuncs",
"=",
"zip",
"(",
"*",
"stats",
".",
"items",
"(",
")",
")",
"weights",
"=",
"dstore",
"[",
"'weights'",
"]",
"[",
":",
",",
"0",
"]",
"dstore",
"[",
"prefix",
"+",
"'-stats'",
"]",
"=",
"compute_stats2",
"(",
"arrayNR",
",",
"statfuncs",
",",
"weights",
")",
"dstore",
".",
"set_attrs",
"(",
"prefix",
"+",
"'-stats'",
",",
"stats",
"=",
"encode",
"(",
"statnames",
")",
")"
]
| :param dstore: a DataStore object
:param prefix: dataset prefix
:param arrayNR: an array of shape (N, R, ...) | [
":",
"param",
"dstore",
":",
"a",
"DataStore",
"object",
":",
"param",
"prefix",
":",
"dataset",
"prefix",
":",
"param",
"arrayNR",
":",
"an",
"array",
"of",
"shape",
"(",
"N",
"R",
"...",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L196-L214 |
gem/oq-engine | openquake/hazardlib/gsim/toro_2002.py | ToroEtAl2002.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
C = self.COEFFS[imt]
mean = self._compute_mean(C, rup.mag, dists.rjb)
stddevs = self._compute_stddevs(C, rup.mag, dists.rjb, imt,
stddev_types)
# apply decay factor for 3 and 4 seconds (not originally supported
# by the equations)
if imt.period == 3.0:
mean /= 0.612
if imt.period == 4.0:
mean /= 0.559
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
C = self.COEFFS[imt]
mean = self._compute_mean(C, rup.mag, dists.rjb)
stddevs = self._compute_stddevs(C, rup.mag, dists.rjb, imt,
stddev_types)
if imt.period == 3.0:
mean /= 0.612
if imt.period == 4.0:
mean /= 0.559
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"mean",
"=",
"self",
".",
"_compute_mean",
"(",
"C",
",",
"rup",
".",
"mag",
",",
"dists",
".",
"rjb",
")",
"stddevs",
"=",
"self",
".",
"_compute_stddevs",
"(",
"C",
",",
"rup",
".",
"mag",
",",
"dists",
".",
"rjb",
",",
"imt",
",",
"stddev_types",
")",
"# apply decay factor for 3 and 4 seconds (not originally supported",
"# by the equations)",
"if",
"imt",
".",
"period",
"==",
"3.0",
":",
"mean",
"/=",
"0.612",
"if",
"imt",
".",
"period",
"==",
"4.0",
":",
"mean",
"/=",
"0.559",
"return",
"mean",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L76-L97 |
gem/oq-engine | openquake/hazardlib/gsim/toro_2002.py | ToroEtAl2002._compute_term1 | def _compute_term1(self, C, mag):
"""
Compute magnitude dependent terms (2nd and 3rd) in equation 3
page 46.
"""
mag_diff = mag - 6
return C['c2'] * mag_diff + C['c3'] * mag_diff ** 2 | python | def _compute_term1(self, C, mag):
mag_diff = mag - 6
return C['c2'] * mag_diff + C['c3'] * mag_diff ** 2 | [
"def",
"_compute_term1",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"mag_diff",
"=",
"mag",
"-",
"6",
"return",
"C",
"[",
"'c2'",
"]",
"*",
"mag_diff",
"+",
"C",
"[",
"'c3'",
"]",
"*",
"mag_diff",
"**",
"2"
]
| Compute magnitude dependent terms (2nd and 3rd) in equation 3
page 46. | [
"Compute",
"magnitude",
"dependent",
"terms",
"(",
"2nd",
"and",
"3rd",
")",
"in",
"equation",
"3",
"page",
"46",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L99-L106 |
gem/oq-engine | openquake/hazardlib/gsim/toro_2002.py | ToroEtAl2002._compute_term2 | def _compute_term2(self, C, mag, rjb):
"""
Compute distance dependent terms (4th, 5th and 6th) in equation 3
page 46. The factor 'RM' is computed according to the 2002 model
(equation 4-3).
"""
RM = np.sqrt(rjb ** 2 + (C['c7'] ** 2) *
np.exp(-1.25 + 0.227 * mag) ** 2)
return (-C['c4'] * np.log(RM) -
(C['c5'] - C['c4']) *
np.maximum(np.log(RM / 100), 0) - C['c6'] * RM) | python | def _compute_term2(self, C, mag, rjb):
RM = np.sqrt(rjb ** 2 + (C['c7'] ** 2) *
np.exp(-1.25 + 0.227 * mag) ** 2)
return (-C['c4'] * np.log(RM) -
(C['c5'] - C['c4']) *
np.maximum(np.log(RM / 100), 0) - C['c6'] * RM) | [
"def",
"_compute_term2",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
")",
":",
"RM",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2",
"+",
"(",
"C",
"[",
"'c7'",
"]",
"**",
"2",
")",
"*",
"np",
".",
"exp",
"(",
"-",
"1.25",
"+",
"0.227",
"*",
"mag",
")",
"**",
"2",
")",
"return",
"(",
"-",
"C",
"[",
"'c4'",
"]",
"*",
"np",
".",
"log",
"(",
"RM",
")",
"-",
"(",
"C",
"[",
"'c5'",
"]",
"-",
"C",
"[",
"'c4'",
"]",
")",
"*",
"np",
".",
"maximum",
"(",
"np",
".",
"log",
"(",
"RM",
"/",
"100",
")",
",",
"0",
")",
"-",
"C",
"[",
"'c6'",
"]",
"*",
"RM",
")"
]
| Compute distance dependent terms (4th, 5th and 6th) in equation 3
page 46. The factor 'RM' is computed according to the 2002 model
(equation 4-3). | [
"Compute",
"distance",
"dependent",
"terms",
"(",
"4th",
"5th",
"and",
"6th",
")",
"in",
"equation",
"3",
"page",
"46",
".",
"The",
"factor",
"RM",
"is",
"computed",
"according",
"to",
"the",
"2002",
"model",
"(",
"equation",
"4",
"-",
"3",
")",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L108-L119 |
gem/oq-engine | openquake/hazardlib/gsim/toro_2002.py | ToroEtAl2002._compute_mean | def _compute_mean(self, C, mag, rjb):
"""
Compute mean value according to equation 3, page 46.
"""
mean = (C['c1'] +
self._compute_term1(C, mag) +
self._compute_term2(C, mag, rjb))
return mean | python | def _compute_mean(self, C, mag, rjb):
mean = (C['c1'] +
self._compute_term1(C, mag) +
self._compute_term2(C, mag, rjb))
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
")",
":",
"mean",
"=",
"(",
"C",
"[",
"'c1'",
"]",
"+",
"self",
".",
"_compute_term1",
"(",
"C",
",",
"mag",
")",
"+",
"self",
".",
"_compute_term2",
"(",
"C",
",",
"mag",
",",
"rjb",
")",
")",
"return",
"mean"
]
| Compute mean value according to equation 3, page 46. | [
"Compute",
"mean",
"value",
"according",
"to",
"equation",
"3",
"page",
"46",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L121-L128 |
gem/oq-engine | openquake/hazardlib/gsim/toro_2002.py | ToroEtAl2002._compute_stddevs | def _compute_stddevs(self, C, mag, rjb, imt, stddev_types):
"""
Compute total standard deviation, equations 5 and 6, page 48.
"""
# aleatory uncertainty
sigma_ale_m = np.interp(mag, [5.0, 5.5, 8.0],
[C['m50'], C['m55'], C['m80']])
sigma_ale_rjb = np.interp(rjb, [5.0, 20.0], [C['r5'], C['r20']])
sigma_ale = np.sqrt(sigma_ale_m ** 2 + sigma_ale_rjb ** 2)
# epistemic uncertainty
if imt.period < 1:
sigma_epi = 0.36 + 0.07 * (mag - 6)
else:
sigma_epi = 0.34 + 0.06 * (mag - 6)
sigma_total = np.sqrt(sigma_ale ** 2 + sigma_epi ** 2)
stddevs = []
for _ in stddev_types:
stddevs.append(sigma_total)
return stddevs | python | def _compute_stddevs(self, C, mag, rjb, imt, stddev_types):
sigma_ale_m = np.interp(mag, [5.0, 5.5, 8.0],
[C['m50'], C['m55'], C['m80']])
sigma_ale_rjb = np.interp(rjb, [5.0, 20.0], [C['r5'], C['r20']])
sigma_ale = np.sqrt(sigma_ale_m ** 2 + sigma_ale_rjb ** 2)
if imt.period < 1:
sigma_epi = 0.36 + 0.07 * (mag - 6)
else:
sigma_epi = 0.34 + 0.06 * (mag - 6)
sigma_total = np.sqrt(sigma_ale ** 2 + sigma_epi ** 2)
stddevs = []
for _ in stddev_types:
stddevs.append(sigma_total)
return stddevs | [
"def",
"_compute_stddevs",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
",",
"imt",
",",
"stddev_types",
")",
":",
"# aleatory uncertainty",
"sigma_ale_m",
"=",
"np",
".",
"interp",
"(",
"mag",
",",
"[",
"5.0",
",",
"5.5",
",",
"8.0",
"]",
",",
"[",
"C",
"[",
"'m50'",
"]",
",",
"C",
"[",
"'m55'",
"]",
",",
"C",
"[",
"'m80'",
"]",
"]",
")",
"sigma_ale_rjb",
"=",
"np",
".",
"interp",
"(",
"rjb",
",",
"[",
"5.0",
",",
"20.0",
"]",
",",
"[",
"C",
"[",
"'r5'",
"]",
",",
"C",
"[",
"'r20'",
"]",
"]",
")",
"sigma_ale",
"=",
"np",
".",
"sqrt",
"(",
"sigma_ale_m",
"**",
"2",
"+",
"sigma_ale_rjb",
"**",
"2",
")",
"# epistemic uncertainty",
"if",
"imt",
".",
"period",
"<",
"1",
":",
"sigma_epi",
"=",
"0.36",
"+",
"0.07",
"*",
"(",
"mag",
"-",
"6",
")",
"else",
":",
"sigma_epi",
"=",
"0.34",
"+",
"0.06",
"*",
"(",
"mag",
"-",
"6",
")",
"sigma_total",
"=",
"np",
".",
"sqrt",
"(",
"sigma_ale",
"**",
"2",
"+",
"sigma_epi",
"**",
"2",
")",
"stddevs",
"=",
"[",
"]",
"for",
"_",
"in",
"stddev_types",
":",
"stddevs",
".",
"append",
"(",
"sigma_total",
")",
"return",
"stddevs"
]
| Compute total standard deviation, equations 5 and 6, page 48. | [
"Compute",
"total",
"standard",
"deviation",
"equations",
"5",
"and",
"6",
"page",
"48",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L130-L152 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/nrcan15_site_term.py | NRCan15SiteTerm.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stds_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# Prepare sites
sites_rock = copy.deepcopy(sites)
sites_rock.vs30 = np.ones_like(sites_rock.vs30) * 760.
# compute mean and standard deviation
mean, stddvs = self.gmpe.get_mean_and_stddevs(sites_rock, rup, dists,
imt, stds_types)
if not str(imt) == 'PGA':
# compute mean and standard deviation on rock
mean_rock, stddvs_rock = self.gmpe.get_mean_and_stddevs(
sites_rock, rup, dists, imt, stds_types)
else:
mean_rock = mean
fa = self.BA08_AB06(sites.vs30, imt, np.exp(mean_rock))
mean = np.log(np.exp(mean) * fa)
return mean, stddvs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stds_types):
sites_rock = copy.deepcopy(sites)
sites_rock.vs30 = np.ones_like(sites_rock.vs30) * 760.
mean, stddvs = self.gmpe.get_mean_and_stddevs(sites_rock, rup, dists,
imt, stds_types)
if not str(imt) == 'PGA':
mean_rock, stddvs_rock = self.gmpe.get_mean_and_stddevs(
sites_rock, rup, dists, imt, stds_types)
else:
mean_rock = mean
fa = self.BA08_AB06(sites.vs30, imt, np.exp(mean_rock))
mean = np.log(np.exp(mean) * fa)
return mean, stddvs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stds_types",
")",
":",
"# Prepare sites",
"sites_rock",
"=",
"copy",
".",
"deepcopy",
"(",
"sites",
")",
"sites_rock",
".",
"vs30",
"=",
"np",
".",
"ones_like",
"(",
"sites_rock",
".",
"vs30",
")",
"*",
"760.",
"# compute mean and standard deviation",
"mean",
",",
"stddvs",
"=",
"self",
".",
"gmpe",
".",
"get_mean_and_stddevs",
"(",
"sites_rock",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stds_types",
")",
"if",
"not",
"str",
"(",
"imt",
")",
"==",
"'PGA'",
":",
"# compute mean and standard deviation on rock",
"mean_rock",
",",
"stddvs_rock",
"=",
"self",
".",
"gmpe",
".",
"get_mean_and_stddevs",
"(",
"sites_rock",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stds_types",
")",
"else",
":",
"mean_rock",
"=",
"mean",
"fa",
"=",
"self",
".",
"BA08_AB06",
"(",
"sites",
".",
"vs30",
",",
"imt",
",",
"np",
".",
"exp",
"(",
"mean_rock",
")",
")",
"mean",
"=",
"np",
".",
"log",
"(",
"np",
".",
"exp",
"(",
"mean",
")",
"*",
"fa",
")",
"return",
"mean",
",",
"stddvs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/nrcan15_site_term.py#L67-L87 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/nrcan15_site_term.py | NRCan15SiteTerm.BA08_AB06 | def BA08_AB06(self, vs30, imt, pgar):
"""
Computes amplification factor similarly to what is done in the 2015
version of the Canada building code. An initial version of this code
was kindly provided by Michal Kolaj - Geological Survey of Canada
:param vs30:
Can be either a scalar or a :class:`~numpy.ndarray` instance
:param imt:
The intensity measure type
:param pgar:
The value of hazard on rock (vs30=760). Can be either a scalar or
a :class:`~numpy.ndarray` instance. Unit of measure is fractions
of gravity acceleration.
:return:
A scalar or a :class:`~numpy.ndarray` instance with the
amplification factor.
"""
fa = np.ones_like(vs30)
if np.isscalar(vs30):
vs30 = np.array([vs30])
if np.isscalar(pgar):
pgar = np.array([pgar])
#
# Fixing vs30 for hard rock to 1999 m/s. Beyond this threshold the
# motion will not be deamplified further
vs = copy.copy(vs30)
vs[vs >= 2000] = 1999.
#
# Computing motion on rock
idx = np.where(vs30 > 760)
if np.size(idx) > 0:
"""
# This is the original implementation - Since this code is
# experimental we keep it for possible further developments
# For values of Vs30 greater than 760 a linear interpolation is
# used between the gm factor at 2000 m/s and 760 m/s
C2 = self.COEFFS_AB06r[imt]
fa[idx] = 10**(np.interp(np.log10(vs[idx]),
np.log10([760.0, 2000.0]),
np.log10([1.0, C2['c']])))
"""
C = self.COEFFS_BA08[imt]
nl = BooreAtkinson2008()._get_site_amplification_non_linear(
vs[idx], pgar[idx], C)
lin = BooreAtkinson2008()._get_site_amplification_linear(
vs[idx], C)
tmp = np.exp(nl+lin)
fa[idx] = tmp
#
# For values of Vs30 lower than 760 the amplification is computed
# using the site term of Boore and Atkinson (2008)
idx = np.where(vs < 760.)
if np.size(idx) > 0:
C = self.COEFFS_BA08[imt]
nl = BooreAtkinson2008()._get_site_amplification_non_linear(
vs[idx], pgar[idx], C)
lin = BooreAtkinson2008()._get_site_amplification_linear(
vs[idx], C)
fa[idx] = np.exp(nl+lin)
return fa | python | def BA08_AB06(self, vs30, imt, pgar):
fa = np.ones_like(vs30)
if np.isscalar(vs30):
vs30 = np.array([vs30])
if np.isscalar(pgar):
pgar = np.array([pgar])
vs = copy.copy(vs30)
vs[vs >= 2000] = 1999.
idx = np.where(vs30 > 760)
if np.size(idx) > 0:
C = self.COEFFS_BA08[imt]
nl = BooreAtkinson2008()._get_site_amplification_non_linear(
vs[idx], pgar[idx], C)
lin = BooreAtkinson2008()._get_site_amplification_linear(
vs[idx], C)
tmp = np.exp(nl+lin)
fa[idx] = tmp
idx = np.where(vs < 760.)
if np.size(idx) > 0:
C = self.COEFFS_BA08[imt]
nl = BooreAtkinson2008()._get_site_amplification_non_linear(
vs[idx], pgar[idx], C)
lin = BooreAtkinson2008()._get_site_amplification_linear(
vs[idx], C)
fa[idx] = np.exp(nl+lin)
return fa | [
"def",
"BA08_AB06",
"(",
"self",
",",
"vs30",
",",
"imt",
",",
"pgar",
")",
":",
"fa",
"=",
"np",
".",
"ones_like",
"(",
"vs30",
")",
"if",
"np",
".",
"isscalar",
"(",
"vs30",
")",
":",
"vs30",
"=",
"np",
".",
"array",
"(",
"[",
"vs30",
"]",
")",
"if",
"np",
".",
"isscalar",
"(",
"pgar",
")",
":",
"pgar",
"=",
"np",
".",
"array",
"(",
"[",
"pgar",
"]",
")",
"#",
"# Fixing vs30 for hard rock to 1999 m/s. Beyond this threshold the",
"# motion will not be deamplified further",
"vs",
"=",
"copy",
".",
"copy",
"(",
"vs30",
")",
"vs",
"[",
"vs",
">=",
"2000",
"]",
"=",
"1999.",
"#",
"# Computing motion on rock",
"idx",
"=",
"np",
".",
"where",
"(",
"vs30",
">",
"760",
")",
"if",
"np",
".",
"size",
"(",
"idx",
")",
">",
"0",
":",
"\"\"\"\n # This is the original implementation - Since this code is\n # experimental we keep it for possible further developments\n # For values of Vs30 greater than 760 a linear interpolation is\n # used between the gm factor at 2000 m/s and 760 m/s\n C2 = self.COEFFS_AB06r[imt]\n fa[idx] = 10**(np.interp(np.log10(vs[idx]),\n np.log10([760.0, 2000.0]),\n np.log10([1.0, C2['c']])))\n \"\"\"",
"C",
"=",
"self",
".",
"COEFFS_BA08",
"[",
"imt",
"]",
"nl",
"=",
"BooreAtkinson2008",
"(",
")",
".",
"_get_site_amplification_non_linear",
"(",
"vs",
"[",
"idx",
"]",
",",
"pgar",
"[",
"idx",
"]",
",",
"C",
")",
"lin",
"=",
"BooreAtkinson2008",
"(",
")",
".",
"_get_site_amplification_linear",
"(",
"vs",
"[",
"idx",
"]",
",",
"C",
")",
"tmp",
"=",
"np",
".",
"exp",
"(",
"nl",
"+",
"lin",
")",
"fa",
"[",
"idx",
"]",
"=",
"tmp",
"#",
"# For values of Vs30 lower than 760 the amplification is computed",
"# using the site term of Boore and Atkinson (2008)",
"idx",
"=",
"np",
".",
"where",
"(",
"vs",
"<",
"760.",
")",
"if",
"np",
".",
"size",
"(",
"idx",
")",
">",
"0",
":",
"C",
"=",
"self",
".",
"COEFFS_BA08",
"[",
"imt",
"]",
"nl",
"=",
"BooreAtkinson2008",
"(",
")",
".",
"_get_site_amplification_non_linear",
"(",
"vs",
"[",
"idx",
"]",
",",
"pgar",
"[",
"idx",
"]",
",",
"C",
")",
"lin",
"=",
"BooreAtkinson2008",
"(",
")",
".",
"_get_site_amplification_linear",
"(",
"vs",
"[",
"idx",
"]",
",",
"C",
")",
"fa",
"[",
"idx",
"]",
"=",
"np",
".",
"exp",
"(",
"nl",
"+",
"lin",
")",
"return",
"fa"
]
| Computes amplification factor similarly to what is done in the 2015
version of the Canada building code. An initial version of this code
was kindly provided by Michal Kolaj - Geological Survey of Canada
:param vs30:
Can be either a scalar or a :class:`~numpy.ndarray` instance
:param imt:
The intensity measure type
:param pgar:
The value of hazard on rock (vs30=760). Can be either a scalar or
a :class:`~numpy.ndarray` instance. Unit of measure is fractions
of gravity acceleration.
:return:
A scalar or a :class:`~numpy.ndarray` instance with the
amplification factor. | [
"Computes",
"amplification",
"factor",
"similarly",
"to",
"what",
"is",
"done",
"in",
"the",
"2015",
"version",
"of",
"the",
"Canada",
"building",
"code",
".",
"An",
"initial",
"version",
"of",
"this",
"code",
"was",
"kindly",
"provided",
"by",
"Michal",
"Kolaj",
"-",
"Geological",
"Survey",
"of",
"Canada"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/nrcan15_site_term.py#L89-L149 |
gem/oq-engine | openquake/calculators/scenario_damage.py | scenario_damage | def scenario_damage(riskinputs, riskmodel, param, monitor):
"""
Core function for a damage computation.
:param riskinputs:
:class:`openquake.risklib.riskinput.RiskInput` objects
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:param param:
dictionary of extra parameters
:returns:
a dictionary {'d_asset': [(l, r, a, mean-stddev), ...],
'd_event': damage array of shape R, L, E, D,
'c_asset': [(l, r, a, mean-stddev), ...],
'c_event': damage array of shape R, L, E}
`d_asset` and `d_tag` are related to the damage distributions
whereas `c_asset` and `c_tag` are the consequence distributions.
If there is no consequence model `c_asset` is an empty list and
`c_tag` is a zero-valued array.
"""
L = len(riskmodel.loss_types)
D = len(riskmodel.damage_states)
E = param['number_of_ground_motion_fields']
R = riskinputs[0].hazard_getter.num_rlzs
result = dict(d_asset=[], d_event=numpy.zeros((E, R, L, D), F64),
c_asset=[], c_event=numpy.zeros((E, R, L), F64))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor):
r = out.rlzi
for l, loss_type in enumerate(riskmodel.loss_types):
for asset, fractions in zip(ri.assets, out[loss_type]):
dmg = fractions[:, :D] * asset['number'] # shape (E, D)
result['d_event'][:, r, l] += dmg
result['d_asset'].append(
(l, r, asset['ordinal'], scientific.mean_std(dmg)))
if riskmodel.consequences:
csq = fractions[:, D] * asset['value-' + loss_type]
result['c_asset'].append(
(l, r, asset['ordinal'], scientific.mean_std(csq)))
result['c_event'][:, r, l] += csq
return result | python | def scenario_damage(riskinputs, riskmodel, param, monitor):
L = len(riskmodel.loss_types)
D = len(riskmodel.damage_states)
E = param['number_of_ground_motion_fields']
R = riskinputs[0].hazard_getter.num_rlzs
result = dict(d_asset=[], d_event=numpy.zeros((E, R, L, D), F64),
c_asset=[], c_event=numpy.zeros((E, R, L), F64))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor):
r = out.rlzi
for l, loss_type in enumerate(riskmodel.loss_types):
for asset, fractions in zip(ri.assets, out[loss_type]):
dmg = fractions[:, :D] * asset['number']
result['d_event'][:, r, l] += dmg
result['d_asset'].append(
(l, r, asset['ordinal'], scientific.mean_std(dmg)))
if riskmodel.consequences:
csq = fractions[:, D] * asset['value-' + loss_type]
result['c_asset'].append(
(l, r, asset['ordinal'], scientific.mean_std(csq)))
result['c_event'][:, r, l] += csq
return result | [
"def",
"scenario_damage",
"(",
"riskinputs",
",",
"riskmodel",
",",
"param",
",",
"monitor",
")",
":",
"L",
"=",
"len",
"(",
"riskmodel",
".",
"loss_types",
")",
"D",
"=",
"len",
"(",
"riskmodel",
".",
"damage_states",
")",
"E",
"=",
"param",
"[",
"'number_of_ground_motion_fields'",
"]",
"R",
"=",
"riskinputs",
"[",
"0",
"]",
".",
"hazard_getter",
".",
"num_rlzs",
"result",
"=",
"dict",
"(",
"d_asset",
"=",
"[",
"]",
",",
"d_event",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"E",
",",
"R",
",",
"L",
",",
"D",
")",
",",
"F64",
")",
",",
"c_asset",
"=",
"[",
"]",
",",
"c_event",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"E",
",",
"R",
",",
"L",
")",
",",
"F64",
")",
")",
"for",
"ri",
"in",
"riskinputs",
":",
"for",
"out",
"in",
"riskmodel",
".",
"gen_outputs",
"(",
"ri",
",",
"monitor",
")",
":",
"r",
"=",
"out",
".",
"rlzi",
"for",
"l",
",",
"loss_type",
"in",
"enumerate",
"(",
"riskmodel",
".",
"loss_types",
")",
":",
"for",
"asset",
",",
"fractions",
"in",
"zip",
"(",
"ri",
".",
"assets",
",",
"out",
"[",
"loss_type",
"]",
")",
":",
"dmg",
"=",
"fractions",
"[",
":",
",",
":",
"D",
"]",
"*",
"asset",
"[",
"'number'",
"]",
"# shape (E, D)",
"result",
"[",
"'d_event'",
"]",
"[",
":",
",",
"r",
",",
"l",
"]",
"+=",
"dmg",
"result",
"[",
"'d_asset'",
"]",
".",
"append",
"(",
"(",
"l",
",",
"r",
",",
"asset",
"[",
"'ordinal'",
"]",
",",
"scientific",
".",
"mean_std",
"(",
"dmg",
")",
")",
")",
"if",
"riskmodel",
".",
"consequences",
":",
"csq",
"=",
"fractions",
"[",
":",
",",
"D",
"]",
"*",
"asset",
"[",
"'value-'",
"+",
"loss_type",
"]",
"result",
"[",
"'c_asset'",
"]",
".",
"append",
"(",
"(",
"l",
",",
"r",
",",
"asset",
"[",
"'ordinal'",
"]",
",",
"scientific",
".",
"mean_std",
"(",
"csq",
")",
")",
")",
"result",
"[",
"'c_event'",
"]",
"[",
":",
",",
"r",
",",
"l",
"]",
"+=",
"csq",
"return",
"result"
]
| Core function for a damage computation.
:param riskinputs:
:class:`openquake.risklib.riskinput.RiskInput` objects
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:param param:
dictionary of extra parameters
:returns:
a dictionary {'d_asset': [(l, r, a, mean-stddev), ...],
'd_event': damage array of shape R, L, E, D,
'c_asset': [(l, r, a, mean-stddev), ...],
'c_event': damage array of shape R, L, E}
`d_asset` and `d_tag` are related to the damage distributions
whereas `c_asset` and `c_tag` are the consequence distributions.
If there is no consequence model `c_asset` is an empty list and
`c_tag` is a zero-valued array. | [
"Core",
"function",
"for",
"a",
"damage",
"computation",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/scenario_damage.py#L28-L71 |
gem/oq-engine | openquake/calculators/views.py | form | def form(value):
"""
Format numbers in a nice way.
>>> form(0)
'0'
>>> form(0.0)
'0.0'
>>> form(0.0001)
'1.000E-04'
>>> form(1003.4)
'1,003'
>>> form(103.4)
'103'
>>> form(9.3)
'9.30000'
>>> form(-1.2)
'-1.2'
"""
if isinstance(value, FLOAT + INT):
if value <= 0:
return str(value)
elif value < .001:
return '%.3E' % value
elif value < 10 and isinstance(value, FLOAT):
return '%.5f' % value
elif value > 1000:
return '{:,d}'.format(int(round(value)))
elif numpy.isnan(value):
return 'NaN'
else: # in the range 10-1000
return str(int(value))
elif isinstance(value, bytes):
return decode(value)
elif isinstance(value, str):
return value
elif isinstance(value, numpy.object_):
return str(value)
elif hasattr(value, '__len__') and len(value) > 1:
return ' '.join(map(form, value))
return str(value) | python | def form(value):
if isinstance(value, FLOAT + INT):
if value <= 0:
return str(value)
elif value < .001:
return '%.3E' % value
elif value < 10 and isinstance(value, FLOAT):
return '%.5f' % value
elif value > 1000:
return '{:,d}'.format(int(round(value)))
elif numpy.isnan(value):
return 'NaN'
else:
return str(int(value))
elif isinstance(value, bytes):
return decode(value)
elif isinstance(value, str):
return value
elif isinstance(value, numpy.object_):
return str(value)
elif hasattr(value, '__len__') and len(value) > 1:
return ' '.join(map(form, value))
return str(value) | [
"def",
"form",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"FLOAT",
"+",
"INT",
")",
":",
"if",
"value",
"<=",
"0",
":",
"return",
"str",
"(",
"value",
")",
"elif",
"value",
"<",
".001",
":",
"return",
"'%.3E'",
"%",
"value",
"elif",
"value",
"<",
"10",
"and",
"isinstance",
"(",
"value",
",",
"FLOAT",
")",
":",
"return",
"'%.5f'",
"%",
"value",
"elif",
"value",
">",
"1000",
":",
"return",
"'{:,d}'",
".",
"format",
"(",
"int",
"(",
"round",
"(",
"value",
")",
")",
")",
"elif",
"numpy",
".",
"isnan",
"(",
"value",
")",
":",
"return",
"'NaN'",
"else",
":",
"# in the range 10-1000",
"return",
"str",
"(",
"int",
"(",
"value",
")",
")",
"elif",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"return",
"decode",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"return",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"numpy",
".",
"object_",
")",
":",
"return",
"str",
"(",
"value",
")",
"elif",
"hasattr",
"(",
"value",
",",
"'__len__'",
")",
"and",
"len",
"(",
"value",
")",
">",
"1",
":",
"return",
"' '",
".",
"join",
"(",
"map",
"(",
"form",
",",
"value",
")",
")",
"return",
"str",
"(",
"value",
")"
]
| Format numbers in a nice way.
>>> form(0)
'0'
>>> form(0.0)
'0.0'
>>> form(0.0001)
'1.000E-04'
>>> form(1003.4)
'1,003'
>>> form(103.4)
'103'
>>> form(9.3)
'9.30000'
>>> form(-1.2)
'-1.2' | [
"Format",
"numbers",
"in",
"a",
"nice",
"way",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L51-L91 |
gem/oq-engine | openquake/calculators/views.py | rst_table | def rst_table(data, header=None, fmt=None):
"""
Build a .rst table from a matrix.
>>> tbl = [['a', 1], ['b', 2]]
>>> print(rst_table(tbl, header=['Name', 'Value']))
==== =====
Name Value
==== =====
a 1
b 2
==== =====
"""
if header is None and hasattr(data, '_fields'):
header = data._fields
try:
# see if data is a composite numpy array
data.dtype.fields
except AttributeError:
# not a composite array
header = header or ()
else:
if not header:
header = [col.split(':')[0] for col in build_header(data.dtype)]
if header:
col_sizes = [len(col) for col in header]
else:
col_sizes = [len(str(col)) for col in data[0]]
body = []
fmt = functools.partial(scientificformat, fmt=fmt) if fmt else form
for row in data:
tup = tuple(fmt(c) for c in row)
for (i, col) in enumerate(tup):
col_sizes[i] = max(col_sizes[i], len(col))
if len(tup) != len(col_sizes):
raise ValueError('The header has %d fields but the row %d fields!'
% (len(col_sizes), len(tup)))
body.append(tup)
sepline = ' '.join(('=' * size for size in col_sizes))
templ = ' '.join(('%-{}s'.format(size) for size in col_sizes))
if header:
lines = [sepline, templ % tuple(header), sepline]
else:
lines = [sepline]
for row in body:
lines.append(templ % row)
lines.append(sepline)
return '\n'.join(lines) | python | def rst_table(data, header=None, fmt=None):
if header is None and hasattr(data, '_fields'):
header = data._fields
try:
data.dtype.fields
except AttributeError:
header = header or ()
else:
if not header:
header = [col.split(':')[0] for col in build_header(data.dtype)]
if header:
col_sizes = [len(col) for col in header]
else:
col_sizes = [len(str(col)) for col in data[0]]
body = []
fmt = functools.partial(scientificformat, fmt=fmt) if fmt else form
for row in data:
tup = tuple(fmt(c) for c in row)
for (i, col) in enumerate(tup):
col_sizes[i] = max(col_sizes[i], len(col))
if len(tup) != len(col_sizes):
raise ValueError('The header has %d fields but the row %d fields!'
% (len(col_sizes), len(tup)))
body.append(tup)
sepline = ' '.join(('=' * size for size in col_sizes))
templ = ' '.join(('%-{}s'.format(size) for size in col_sizes))
if header:
lines = [sepline, templ % tuple(header), sepline]
else:
lines = [sepline]
for row in body:
lines.append(templ % row)
lines.append(sepline)
return '\n'.join(lines) | [
"def",
"rst_table",
"(",
"data",
",",
"header",
"=",
"None",
",",
"fmt",
"=",
"None",
")",
":",
"if",
"header",
"is",
"None",
"and",
"hasattr",
"(",
"data",
",",
"'_fields'",
")",
":",
"header",
"=",
"data",
".",
"_fields",
"try",
":",
"# see if data is a composite numpy array",
"data",
".",
"dtype",
".",
"fields",
"except",
"AttributeError",
":",
"# not a composite array",
"header",
"=",
"header",
"or",
"(",
")",
"else",
":",
"if",
"not",
"header",
":",
"header",
"=",
"[",
"col",
".",
"split",
"(",
"':'",
")",
"[",
"0",
"]",
"for",
"col",
"in",
"build_header",
"(",
"data",
".",
"dtype",
")",
"]",
"if",
"header",
":",
"col_sizes",
"=",
"[",
"len",
"(",
"col",
")",
"for",
"col",
"in",
"header",
"]",
"else",
":",
"col_sizes",
"=",
"[",
"len",
"(",
"str",
"(",
"col",
")",
")",
"for",
"col",
"in",
"data",
"[",
"0",
"]",
"]",
"body",
"=",
"[",
"]",
"fmt",
"=",
"functools",
".",
"partial",
"(",
"scientificformat",
",",
"fmt",
"=",
"fmt",
")",
"if",
"fmt",
"else",
"form",
"for",
"row",
"in",
"data",
":",
"tup",
"=",
"tuple",
"(",
"fmt",
"(",
"c",
")",
"for",
"c",
"in",
"row",
")",
"for",
"(",
"i",
",",
"col",
")",
"in",
"enumerate",
"(",
"tup",
")",
":",
"col_sizes",
"[",
"i",
"]",
"=",
"max",
"(",
"col_sizes",
"[",
"i",
"]",
",",
"len",
"(",
"col",
")",
")",
"if",
"len",
"(",
"tup",
")",
"!=",
"len",
"(",
"col_sizes",
")",
":",
"raise",
"ValueError",
"(",
"'The header has %d fields but the row %d fields!'",
"%",
"(",
"len",
"(",
"col_sizes",
")",
",",
"len",
"(",
"tup",
")",
")",
")",
"body",
".",
"append",
"(",
"tup",
")",
"sepline",
"=",
"' '",
".",
"join",
"(",
"(",
"'='",
"*",
"size",
"for",
"size",
"in",
"col_sizes",
")",
")",
"templ",
"=",
"' '",
".",
"join",
"(",
"(",
"'%-{}s'",
".",
"format",
"(",
"size",
")",
"for",
"size",
"in",
"col_sizes",
")",
")",
"if",
"header",
":",
"lines",
"=",
"[",
"sepline",
",",
"templ",
"%",
"tuple",
"(",
"header",
")",
",",
"sepline",
"]",
"else",
":",
"lines",
"=",
"[",
"sepline",
"]",
"for",
"row",
"in",
"body",
":",
"lines",
".",
"append",
"(",
"templ",
"%",
"row",
")",
"lines",
".",
"append",
"(",
"sepline",
")",
"return",
"'\\n'",
".",
"join",
"(",
"lines",
")"
]
| Build a .rst table from a matrix.
>>> tbl = [['a', 1], ['b', 2]]
>>> print(rst_table(tbl, header=['Name', 'Value']))
==== =====
Name Value
==== =====
a 1
b 2
==== ===== | [
"Build",
"a",
".",
"rst",
"table",
"from",
"a",
"matrix",
".",
">>>",
"tbl",
"=",
"[[",
"a",
"1",
"]",
"[",
"b",
"2",
"]]",
">>>",
"print",
"(",
"rst_table",
"(",
"tbl",
"header",
"=",
"[",
"Name",
"Value",
"]",
"))",
"====",
"=====",
"Name",
"Value",
"====",
"=====",
"a",
"1",
"b",
"2",
"====",
"====="
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L94-L142 |
gem/oq-engine | openquake/calculators/views.py | sum_tbl | def sum_tbl(tbl, kfield, vfields):
"""
Aggregate a composite array and compute the totals on a given key.
>>> dt = numpy.dtype([('name', (bytes, 10)), ('value', int)])
>>> tbl = numpy.array([('a', 1), ('a', 2), ('b', 3)], dt)
>>> sum_tbl(tbl, 'name', ['value'])['value']
array([3, 3])
"""
pairs = [(n, tbl.dtype[n]) for n in [kfield] + vfields]
dt = numpy.dtype(pairs + [('counts', int)])
def sum_all(group):
vals = numpy.zeros(1, dt)[0]
for rec in group:
for vfield in vfields:
vals[vfield] += rec[vfield]
vals['counts'] += 1
vals[kfield] = rec[kfield]
return vals
rows = groupby(tbl, operator.itemgetter(kfield), sum_all).values()
array = numpy.zeros(len(rows), dt)
for i, row in enumerate(rows):
for j, name in enumerate(dt.names):
array[i][name] = row[j]
return array | python | def sum_tbl(tbl, kfield, vfields):
pairs = [(n, tbl.dtype[n]) for n in [kfield] + vfields]
dt = numpy.dtype(pairs + [('counts', int)])
def sum_all(group):
vals = numpy.zeros(1, dt)[0]
for rec in group:
for vfield in vfields:
vals[vfield] += rec[vfield]
vals['counts'] += 1
vals[kfield] = rec[kfield]
return vals
rows = groupby(tbl, operator.itemgetter(kfield), sum_all).values()
array = numpy.zeros(len(rows), dt)
for i, row in enumerate(rows):
for j, name in enumerate(dt.names):
array[i][name] = row[j]
return array | [
"def",
"sum_tbl",
"(",
"tbl",
",",
"kfield",
",",
"vfields",
")",
":",
"pairs",
"=",
"[",
"(",
"n",
",",
"tbl",
".",
"dtype",
"[",
"n",
"]",
")",
"for",
"n",
"in",
"[",
"kfield",
"]",
"+",
"vfields",
"]",
"dt",
"=",
"numpy",
".",
"dtype",
"(",
"pairs",
"+",
"[",
"(",
"'counts'",
",",
"int",
")",
"]",
")",
"def",
"sum_all",
"(",
"group",
")",
":",
"vals",
"=",
"numpy",
".",
"zeros",
"(",
"1",
",",
"dt",
")",
"[",
"0",
"]",
"for",
"rec",
"in",
"group",
":",
"for",
"vfield",
"in",
"vfields",
":",
"vals",
"[",
"vfield",
"]",
"+=",
"rec",
"[",
"vfield",
"]",
"vals",
"[",
"'counts'",
"]",
"+=",
"1",
"vals",
"[",
"kfield",
"]",
"=",
"rec",
"[",
"kfield",
"]",
"return",
"vals",
"rows",
"=",
"groupby",
"(",
"tbl",
",",
"operator",
".",
"itemgetter",
"(",
"kfield",
")",
",",
"sum_all",
")",
".",
"values",
"(",
")",
"array",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"rows",
")",
",",
"dt",
")",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"rows",
")",
":",
"for",
"j",
",",
"name",
"in",
"enumerate",
"(",
"dt",
".",
"names",
")",
":",
"array",
"[",
"i",
"]",
"[",
"name",
"]",
"=",
"row",
"[",
"j",
"]",
"return",
"array"
]
| Aggregate a composite array and compute the totals on a given key.
>>> dt = numpy.dtype([('name', (bytes, 10)), ('value', int)])
>>> tbl = numpy.array([('a', 1), ('a', 2), ('b', 3)], dt)
>>> sum_tbl(tbl, 'name', ['value'])['value']
array([3, 3]) | [
"Aggregate",
"a",
"composite",
"array",
"and",
"compute",
"the",
"totals",
"on",
"a",
"given",
"key",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L145-L170 |
gem/oq-engine | openquake/calculators/views.py | view_slow_sources | def view_slow_sources(token, dstore, maxrows=20):
"""
Returns the slowest sources
"""
info = dstore['source_info'].value
info.sort(order='calc_time')
return rst_table(info[::-1][:maxrows]) | python | def view_slow_sources(token, dstore, maxrows=20):
info = dstore['source_info'].value
info.sort(order='calc_time')
return rst_table(info[::-1][:maxrows]) | [
"def",
"view_slow_sources",
"(",
"token",
",",
"dstore",
",",
"maxrows",
"=",
"20",
")",
":",
"info",
"=",
"dstore",
"[",
"'source_info'",
"]",
".",
"value",
"info",
".",
"sort",
"(",
"order",
"=",
"'calc_time'",
")",
"return",
"rst_table",
"(",
"info",
"[",
":",
":",
"-",
"1",
"]",
"[",
":",
"maxrows",
"]",
")"
]
| Returns the slowest sources | [
"Returns",
"the",
"slowest",
"sources"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L183-L189 |
gem/oq-engine | openquake/calculators/views.py | view_contents | def view_contents(token, dstore):
"""
Returns the size of the contents of the datastore and its total size
"""
try:
desc = dstore['oqparam'].description
except KeyError:
desc = ''
data = sorted((dstore.getsize(key), key) for key in dstore)
rows = [(key, humansize(nbytes)) for nbytes, key in data]
total = '\n%s : %s' % (
dstore.filename, humansize(os.path.getsize(dstore.filename)))
return rst_table(rows, header=(desc, '')) + total | python | def view_contents(token, dstore):
try:
desc = dstore['oqparam'].description
except KeyError:
desc = ''
data = sorted((dstore.getsize(key), key) for key in dstore)
rows = [(key, humansize(nbytes)) for nbytes, key in data]
total = '\n%s : %s' % (
dstore.filename, humansize(os.path.getsize(dstore.filename)))
return rst_table(rows, header=(desc, '')) + total | [
"def",
"view_contents",
"(",
"token",
",",
"dstore",
")",
":",
"try",
":",
"desc",
"=",
"dstore",
"[",
"'oqparam'",
"]",
".",
"description",
"except",
"KeyError",
":",
"desc",
"=",
"''",
"data",
"=",
"sorted",
"(",
"(",
"dstore",
".",
"getsize",
"(",
"key",
")",
",",
"key",
")",
"for",
"key",
"in",
"dstore",
")",
"rows",
"=",
"[",
"(",
"key",
",",
"humansize",
"(",
"nbytes",
")",
")",
"for",
"nbytes",
",",
"key",
"in",
"data",
"]",
"total",
"=",
"'\\n%s : %s'",
"%",
"(",
"dstore",
".",
"filename",
",",
"humansize",
"(",
"os",
".",
"path",
".",
"getsize",
"(",
"dstore",
".",
"filename",
")",
")",
")",
"return",
"rst_table",
"(",
"rows",
",",
"header",
"=",
"(",
"desc",
",",
"''",
")",
")",
"+",
"total"
]
| Returns the size of the contents of the datastore and its total size | [
"Returns",
"the",
"size",
"of",
"the",
"contents",
"of",
"the",
"datastore",
"and",
"its",
"total",
"size"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L193-L205 |
gem/oq-engine | openquake/calculators/views.py | view_job_info | def view_job_info(token, dstore):
"""
Determine the amount of data transferred from the controller node
to the workers and back in a classical calculation.
"""
data = [['task', 'sent', 'received']]
for task in dstore['task_info']:
dset = dstore['task_info/' + task]
if 'argnames' in dset.attrs:
argnames = dset.attrs['argnames'].split()
totsent = dset.attrs['sent']
sent = ['%s=%s' % (a, humansize(s))
for s, a in sorted(zip(totsent, argnames), reverse=True)]
recv = dset['received'].sum()
data.append((task, ' '.join(sent), humansize(recv)))
return rst_table(data) | python | def view_job_info(token, dstore):
data = [['task', 'sent', 'received']]
for task in dstore['task_info']:
dset = dstore['task_info/' + task]
if 'argnames' in dset.attrs:
argnames = dset.attrs['argnames'].split()
totsent = dset.attrs['sent']
sent = ['%s=%s' % (a, humansize(s))
for s, a in sorted(zip(totsent, argnames), reverse=True)]
recv = dset['received'].sum()
data.append((task, ' '.join(sent), humansize(recv)))
return rst_table(data) | [
"def",
"view_job_info",
"(",
"token",
",",
"dstore",
")",
":",
"data",
"=",
"[",
"[",
"'task'",
",",
"'sent'",
",",
"'received'",
"]",
"]",
"for",
"task",
"in",
"dstore",
"[",
"'task_info'",
"]",
":",
"dset",
"=",
"dstore",
"[",
"'task_info/'",
"+",
"task",
"]",
"if",
"'argnames'",
"in",
"dset",
".",
"attrs",
":",
"argnames",
"=",
"dset",
".",
"attrs",
"[",
"'argnames'",
"]",
".",
"split",
"(",
")",
"totsent",
"=",
"dset",
".",
"attrs",
"[",
"'sent'",
"]",
"sent",
"=",
"[",
"'%s=%s'",
"%",
"(",
"a",
",",
"humansize",
"(",
"s",
")",
")",
"for",
"s",
",",
"a",
"in",
"sorted",
"(",
"zip",
"(",
"totsent",
",",
"argnames",
")",
",",
"reverse",
"=",
"True",
")",
"]",
"recv",
"=",
"dset",
"[",
"'received'",
"]",
".",
"sum",
"(",
")",
"data",
".",
"append",
"(",
"(",
"task",
",",
"' '",
".",
"join",
"(",
"sent",
")",
",",
"humansize",
"(",
"recv",
")",
")",
")",
"return",
"rst_table",
"(",
"data",
")"
]
| Determine the amount of data transferred from the controller node
to the workers and back in a classical calculation. | [
"Determine",
"the",
"amount",
"of",
"data",
"transferred",
"from",
"the",
"controller",
"node",
"to",
"the",
"workers",
"and",
"back",
"in",
"a",
"classical",
"calculation",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L309-L324 |
gem/oq-engine | openquake/calculators/views.py | avglosses_data_transfer | def avglosses_data_transfer(token, dstore):
"""
Determine the amount of average losses transferred from the workers to the
controller node in a risk calculation.
"""
oq = dstore['oqparam']
N = len(dstore['assetcol'])
R = dstore['csm_info'].get_num_rlzs()
L = len(dstore.get_attr('risk_model', 'loss_types'))
ct = oq.concurrent_tasks
size_bytes = N * R * L * 8 * ct # 8 byte floats
return (
'%d asset(s) x %d realization(s) x %d loss type(s) losses x '
'8 bytes x %d tasks = %s' % (N, R, L, ct, humansize(size_bytes))) | python | def avglosses_data_transfer(token, dstore):
oq = dstore['oqparam']
N = len(dstore['assetcol'])
R = dstore['csm_info'].get_num_rlzs()
L = len(dstore.get_attr('risk_model', 'loss_types'))
ct = oq.concurrent_tasks
size_bytes = N * R * L * 8 * ct
return (
'%d asset(s) x %d realization(s) x %d loss type(s) losses x '
'8 bytes x %d tasks = %s' % (N, R, L, ct, humansize(size_bytes))) | [
"def",
"avglosses_data_transfer",
"(",
"token",
",",
"dstore",
")",
":",
"oq",
"=",
"dstore",
"[",
"'oqparam'",
"]",
"N",
"=",
"len",
"(",
"dstore",
"[",
"'assetcol'",
"]",
")",
"R",
"=",
"dstore",
"[",
"'csm_info'",
"]",
".",
"get_num_rlzs",
"(",
")",
"L",
"=",
"len",
"(",
"dstore",
".",
"get_attr",
"(",
"'risk_model'",
",",
"'loss_types'",
")",
")",
"ct",
"=",
"oq",
".",
"concurrent_tasks",
"size_bytes",
"=",
"N",
"*",
"R",
"*",
"L",
"*",
"8",
"*",
"ct",
"# 8 byte floats",
"return",
"(",
"'%d asset(s) x %d realization(s) x %d loss type(s) losses x '",
"'8 bytes x %d tasks = %s'",
"%",
"(",
"N",
",",
"R",
",",
"L",
",",
"ct",
",",
"humansize",
"(",
"size_bytes",
")",
")",
")"
]
| Determine the amount of average losses transferred from the workers to the
controller node in a risk calculation. | [
"Determine",
"the",
"amount",
"of",
"average",
"losses",
"transferred",
"from",
"the",
"workers",
"to",
"the",
"controller",
"node",
"in",
"a",
"risk",
"calculation",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L328-L341 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.