repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 39
1.84M
| func_code_tokens
listlengths 15
672k
| func_documentation_string
stringlengths 1
47.2k
| func_documentation_tokens
listlengths 1
3.92k
| split_name
stringclasses 1
value | func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|---|---|---|
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | plot_depth_histogram | def plot_depth_histogram(
catalogue, bin_width,
normalisation=False, bootstrap=None, filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
"""
Creates a histogram of the depths in the catalogue
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float bin_width:
Width of the histogram for the depth bins
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample depth uncertainty choose number of samples
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
# Create depth range
if len(catalogue.data['depth']) == 0: # pylint: disable=len-as-condition
raise ValueError('No depths reported in catalogue!')
depth_bins = np.arange(0.,
np.max(catalogue.data['depth']) + bin_width,
bin_width)
depth_hist = catalogue.get_depth_distribution(depth_bins,
normalisation,
bootstrap)
ax.bar(depth_bins[:-1],
depth_hist,
width=0.95 * bin_width,
edgecolor='k')
ax.set_xlabel('Depth (km)')
if normalisation:
ax.set_ylabel('Probability Mass Function')
else:
ax.set_ylabel('Count')
ax.set_title('Depth Histogram')
_save_image(fig, filename, filetype, dpi) | python | def plot_depth_histogram(
catalogue, bin_width,
normalisation=False, bootstrap=None, filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
if len(catalogue.data['depth']) == 0:
raise ValueError('No depths reported in catalogue!')
depth_bins = np.arange(0.,
np.max(catalogue.data['depth']) + bin_width,
bin_width)
depth_hist = catalogue.get_depth_distribution(depth_bins,
normalisation,
bootstrap)
ax.bar(depth_bins[:-1],
depth_hist,
width=0.95 * bin_width,
edgecolor='k')
ax.set_xlabel('Depth (km)')
if normalisation:
ax.set_ylabel('Probability Mass Function')
else:
ax.set_ylabel('Count')
ax.set_title('Depth Histogram')
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_depth_histogram",
"(",
"catalogue",
",",
"bin_width",
",",
"normalisation",
"=",
"False",
",",
"bootstrap",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"# Create depth range",
"if",
"len",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"==",
"0",
":",
"# pylint: disable=len-as-condition",
"raise",
"ValueError",
"(",
"'No depths reported in catalogue!'",
")",
"depth_bins",
"=",
"np",
".",
"arange",
"(",
"0.",
",",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"+",
"bin_width",
",",
"bin_width",
")",
"depth_hist",
"=",
"catalogue",
".",
"get_depth_distribution",
"(",
"depth_bins",
",",
"normalisation",
",",
"bootstrap",
")",
"ax",
".",
"bar",
"(",
"depth_bins",
"[",
":",
"-",
"1",
"]",
",",
"depth_hist",
",",
"width",
"=",
"0.95",
"*",
"bin_width",
",",
"edgecolor",
"=",
"'k'",
")",
"ax",
".",
"set_xlabel",
"(",
"'Depth (km)'",
")",
"if",
"normalisation",
":",
"ax",
".",
"set_ylabel",
"(",
"'Probability Mass Function'",
")",
"else",
":",
"ax",
".",
"set_ylabel",
"(",
"'Count'",
")",
"ax",
".",
"set_title",
"(",
"'Depth Histogram'",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
]
| Creates a histogram of the depths in the catalogue
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float bin_width:
Width of the histogram for the depth bins
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample depth uncertainty choose number of samples | [
"Creates",
"a",
"histogram",
"of",
"the",
"depths",
"in",
"the",
"catalogue"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L117-L158 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | plot_magnitude_depth_density | def plot_magnitude_depth_density(
catalogue, mag_int, depth_int,
logscale=False, normalisation=False, bootstrap=None, filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
"""
Creates a density plot of the magnitude and depth distribution
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float mag_int:
Width of the histogram for the magnitude bins
:param float depth_int:
Width of the histogram for the depth bins
:param bool logscale:
Choose to scale the colours in a log-scale (True) or linear (False)
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample magnitude and depth uncertainties choose number of samples
"""
if len(catalogue.data['depth']) == 0: # pylint: disable=len-as-condition
raise ValueError('No depths reported in catalogue!')
depth_bins = np.arange(0.,
np.max(catalogue.data['depth']) + depth_int,
depth_int)
mag_bins = _get_catalogue_bin_limits(catalogue, mag_int)
mag_depth_dist = catalogue.get_magnitude_depth_distribution(mag_bins,
depth_bins,
normalisation,
bootstrap)
vmin_val = np.min(mag_depth_dist[mag_depth_dist > 0.])
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
if logscale:
normaliser = LogNorm(vmin=vmin_val, vmax=np.max(mag_depth_dist))
else:
normaliser = Normalize(vmin=0, vmax=np.max(mag_depth_dist))
im = ax.pcolor(mag_bins[:-1],
depth_bins[:-1],
mag_depth_dist.T,
norm=normaliser)
ax.set_xlabel('Magnitude')
ax.set_ylabel('Depth (km)')
ax.set_xlim(mag_bins[0], mag_bins[-1])
ax.set_ylim(depth_bins[0], depth_bins[-1])
fig.colorbar(im, ax=ax)
if normalisation:
ax.set_title('Magnitude-Depth Density')
else:
ax.set_title('Magnitude-Depth Count')
_save_image(fig, filename, filetype, dpi) | python | def plot_magnitude_depth_density(
catalogue, mag_int, depth_int,
logscale=False, normalisation=False, bootstrap=None, filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
if len(catalogue.data['depth']) == 0:
raise ValueError('No depths reported in catalogue!')
depth_bins = np.arange(0.,
np.max(catalogue.data['depth']) + depth_int,
depth_int)
mag_bins = _get_catalogue_bin_limits(catalogue, mag_int)
mag_depth_dist = catalogue.get_magnitude_depth_distribution(mag_bins,
depth_bins,
normalisation,
bootstrap)
vmin_val = np.min(mag_depth_dist[mag_depth_dist > 0.])
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
if logscale:
normaliser = LogNorm(vmin=vmin_val, vmax=np.max(mag_depth_dist))
else:
normaliser = Normalize(vmin=0, vmax=np.max(mag_depth_dist))
im = ax.pcolor(mag_bins[:-1],
depth_bins[:-1],
mag_depth_dist.T,
norm=normaliser)
ax.set_xlabel('Magnitude')
ax.set_ylabel('Depth (km)')
ax.set_xlim(mag_bins[0], mag_bins[-1])
ax.set_ylim(depth_bins[0], depth_bins[-1])
fig.colorbar(im, ax=ax)
if normalisation:
ax.set_title('Magnitude-Depth Density')
else:
ax.set_title('Magnitude-Depth Count')
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_magnitude_depth_density",
"(",
"catalogue",
",",
"mag_int",
",",
"depth_int",
",",
"logscale",
"=",
"False",
",",
"normalisation",
"=",
"False",
",",
"bootstrap",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"len",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"==",
"0",
":",
"# pylint: disable=len-as-condition",
"raise",
"ValueError",
"(",
"'No depths reported in catalogue!'",
")",
"depth_bins",
"=",
"np",
".",
"arange",
"(",
"0.",
",",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"+",
"depth_int",
",",
"depth_int",
")",
"mag_bins",
"=",
"_get_catalogue_bin_limits",
"(",
"catalogue",
",",
"mag_int",
")",
"mag_depth_dist",
"=",
"catalogue",
".",
"get_magnitude_depth_distribution",
"(",
"mag_bins",
",",
"depth_bins",
",",
"normalisation",
",",
"bootstrap",
")",
"vmin_val",
"=",
"np",
".",
"min",
"(",
"mag_depth_dist",
"[",
"mag_depth_dist",
">",
"0.",
"]",
")",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"if",
"logscale",
":",
"normaliser",
"=",
"LogNorm",
"(",
"vmin",
"=",
"vmin_val",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_depth_dist",
")",
")",
"else",
":",
"normaliser",
"=",
"Normalize",
"(",
"vmin",
"=",
"0",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_depth_dist",
")",
")",
"im",
"=",
"ax",
".",
"pcolor",
"(",
"mag_bins",
"[",
":",
"-",
"1",
"]",
",",
"depth_bins",
"[",
":",
"-",
"1",
"]",
",",
"mag_depth_dist",
".",
"T",
",",
"norm",
"=",
"normaliser",
")",
"ax",
".",
"set_xlabel",
"(",
"'Magnitude'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Depth (km)'",
")",
"ax",
".",
"set_xlim",
"(",
"mag_bins",
"[",
"0",
"]",
",",
"mag_bins",
"[",
"-",
"1",
"]",
")",
"ax",
".",
"set_ylim",
"(",
"depth_bins",
"[",
"0",
"]",
",",
"depth_bins",
"[",
"-",
"1",
"]",
")",
"fig",
".",
"colorbar",
"(",
"im",
",",
"ax",
"=",
"ax",
")",
"if",
"normalisation",
":",
"ax",
".",
"set_title",
"(",
"'Magnitude-Depth Density'",
")",
"else",
":",
"ax",
".",
"set_title",
"(",
"'Magnitude-Depth Count'",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
]
| Creates a density plot of the magnitude and depth distribution
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float mag_int:
Width of the histogram for the magnitude bins
:param float depth_int:
Width of the histogram for the depth bins
:param bool logscale:
Choose to scale the colours in a log-scale (True) or linear (False)
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample magnitude and depth uncertainties choose number of samples | [
"Creates",
"a",
"density",
"plot",
"of",
"the",
"magnitude",
"and",
"depth",
"distribution"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L161-L218 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | plot_magnitude_time_scatter | def plot_magnitude_time_scatter(
catalogue, plot_error=False, fmt_string='o', filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
"""
Creates a simple scatter plot of magnitude with time
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param bool plot_error:
Choose to plot error bars (True) or not (False)
:param str fmt_string:
Symbology of plot
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
dtime = catalogue.get_decimal_time()
# pylint: disable=len-as-condition
if len(catalogue.data['sigmaMagnitude']) == 0:
print('Magnitude Error is missing - neglecting error bars!')
plot_error = False
if plot_error:
ax.errorbar(dtime,
catalogue.data['magnitude'],
xerr=None,
yerr=catalogue.data['sigmaMagnitude'],
fmt=fmt_string)
else:
ax.plot(dtime, catalogue.data['magnitude'], fmt_string)
ax.set_xlabel('Year')
ax.set_ylabel('Magnitude')
ax.set_title('Magnitude-Time Plot')
_save_image(fig, filename, filetype, dpi) | python | def plot_magnitude_time_scatter(
catalogue, plot_error=False, fmt_string='o', filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
dtime = catalogue.get_decimal_time()
if len(catalogue.data['sigmaMagnitude']) == 0:
print('Magnitude Error is missing - neglecting error bars!')
plot_error = False
if plot_error:
ax.errorbar(dtime,
catalogue.data['magnitude'],
xerr=None,
yerr=catalogue.data['sigmaMagnitude'],
fmt=fmt_string)
else:
ax.plot(dtime, catalogue.data['magnitude'], fmt_string)
ax.set_xlabel('Year')
ax.set_ylabel('Magnitude')
ax.set_title('Magnitude-Time Plot')
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_magnitude_time_scatter",
"(",
"catalogue",
",",
"plot_error",
"=",
"False",
",",
"fmt_string",
"=",
"'o'",
",",
"filename",
"=",
"None",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"dtime",
"=",
"catalogue",
".",
"get_decimal_time",
"(",
")",
"# pylint: disable=len-as-condition",
"if",
"len",
"(",
"catalogue",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
")",
"==",
"0",
":",
"print",
"(",
"'Magnitude Error is missing - neglecting error bars!'",
")",
"plot_error",
"=",
"False",
"if",
"plot_error",
":",
"ax",
".",
"errorbar",
"(",
"dtime",
",",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
",",
"xerr",
"=",
"None",
",",
"yerr",
"=",
"catalogue",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
",",
"fmt",
"=",
"fmt_string",
")",
"else",
":",
"ax",
".",
"plot",
"(",
"dtime",
",",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
",",
"fmt_string",
")",
"ax",
".",
"set_xlabel",
"(",
"'Year'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Magnitude'",
")",
"ax",
".",
"set_title",
"(",
"'Magnitude-Time Plot'",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
]
| Creates a simple scatter plot of magnitude with time
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param bool plot_error:
Choose to plot error bars (True) or not (False)
:param str fmt_string:
Symbology of plot | [
"Creates",
"a",
"simple",
"scatter",
"plot",
"of",
"magnitude",
"with",
"time"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L221-L258 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | plot_magnitude_time_density | def plot_magnitude_time_density(
catalogue, mag_int, time_int, completeness=None,
normalisation=False, logscale=True, bootstrap=None, xlim=[], ylim=[],
filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None):
"""
Creates a plot of magnitude-time density
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float mag_int:
Width of the histogram for the magnitude bins
:param float time_int:
Width of the histogram for the time bin (in decimal years)
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample magnitude and depth uncertainties choose number of samples
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
# Create the magnitude bins
if isinstance(mag_int, (np.ndarray, list)):
mag_bins = mag_int
else:
mag_bins = np.arange(
np.min(catalogue.data['magnitude']),
np.max(catalogue.data['magnitude']) + mag_int / 2.,
mag_int)
# Creates the time bins
if isinstance(time_int, (np.ndarray, list)):
time_bins = time_int
else:
time_bins = np.arange(
float(np.min(catalogue.data['year'])),
float(np.max(catalogue.data['year'])) + 1.,
float(time_int))
# Get magnitude-time distribution
mag_time_dist = catalogue.get_magnitude_time_distribution(
mag_bins,
time_bins,
normalisation,
bootstrap)
# Get smallest non-zero value
vmin_val = np.min(mag_time_dist[mag_time_dist > 0.])
# Create plot
if logscale:
norm_data = LogNorm(vmin=vmin_val, vmax=np.max(mag_time_dist))
else:
if normalisation:
norm_data = Normalize(vmin=vmin_val, vmax=np.max(mag_time_dist))
else:
norm_data = Normalize(vmin=1.0, vmax=np.max(mag_time_dist))
im = ax.pcolor(time_bins[:-1],
mag_bins[:-1],
mag_time_dist.T,
norm=norm_data)
ax.set_xlabel('Time (year)')
ax.set_ylabel('Magnitude')
if len(xlim) == 2:
ax.set_xlim(xlim[0], xlim[1])
else:
ax.set_xlim(time_bins[0], time_bins[-1])
if len(ylim) == 2:
ax.set_ylim(ylim[0], ylim[1])
else:
ax.set_ylim(mag_bins[0], mag_bins[-1] + (mag_bins[-1] - mag_bins[-2]))
# Fix the title
if normalisation:
fig.colorbar(im, label='Event Density', shrink=0.9, ax=ax)
else:
fig.colorbar(im, label='Event Count', shrink=0.9, ax=ax)
ax.grid(True)
# Plot completeness
if completeness is not None:
_plot_completeness(ax, completeness, time_bins[0], time_bins[-1])
_save_image(fig, filename, filetype, dpi) | python | def plot_magnitude_time_density(
catalogue, mag_int, time_int, completeness=None,
normalisation=False, logscale=True, bootstrap=None, xlim=[], ylim=[],
filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None):
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
if isinstance(mag_int, (np.ndarray, list)):
mag_bins = mag_int
else:
mag_bins = np.arange(
np.min(catalogue.data['magnitude']),
np.max(catalogue.data['magnitude']) + mag_int / 2.,
mag_int)
if isinstance(time_int, (np.ndarray, list)):
time_bins = time_int
else:
time_bins = np.arange(
float(np.min(catalogue.data['year'])),
float(np.max(catalogue.data['year'])) + 1.,
float(time_int))
mag_time_dist = catalogue.get_magnitude_time_distribution(
mag_bins,
time_bins,
normalisation,
bootstrap)
vmin_val = np.min(mag_time_dist[mag_time_dist > 0.])
if logscale:
norm_data = LogNorm(vmin=vmin_val, vmax=np.max(mag_time_dist))
else:
if normalisation:
norm_data = Normalize(vmin=vmin_val, vmax=np.max(mag_time_dist))
else:
norm_data = Normalize(vmin=1.0, vmax=np.max(mag_time_dist))
im = ax.pcolor(time_bins[:-1],
mag_bins[:-1],
mag_time_dist.T,
norm=norm_data)
ax.set_xlabel('Time (year)')
ax.set_ylabel('Magnitude')
if len(xlim) == 2:
ax.set_xlim(xlim[0], xlim[1])
else:
ax.set_xlim(time_bins[0], time_bins[-1])
if len(ylim) == 2:
ax.set_ylim(ylim[0], ylim[1])
else:
ax.set_ylim(mag_bins[0], mag_bins[-1] + (mag_bins[-1] - mag_bins[-2]))
if normalisation:
fig.colorbar(im, label='Event Density', shrink=0.9, ax=ax)
else:
fig.colorbar(im, label='Event Count', shrink=0.9, ax=ax)
ax.grid(True)
if completeness is not None:
_plot_completeness(ax, completeness, time_bins[0], time_bins[-1])
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_magnitude_time_density",
"(",
"catalogue",
",",
"mag_int",
",",
"time_int",
",",
"completeness",
"=",
"None",
",",
"normalisation",
"=",
"False",
",",
"logscale",
"=",
"True",
",",
"bootstrap",
"=",
"None",
",",
"xlim",
"=",
"[",
"]",
",",
"ylim",
"=",
"[",
"]",
",",
"filename",
"=",
"None",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"# Create the magnitude bins",
"if",
"isinstance",
"(",
"mag_int",
",",
"(",
"np",
".",
"ndarray",
",",
"list",
")",
")",
":",
"mag_bins",
"=",
"mag_int",
"else",
":",
"mag_bins",
"=",
"np",
".",
"arange",
"(",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
")",
",",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
")",
"+",
"mag_int",
"/",
"2.",
",",
"mag_int",
")",
"# Creates the time bins",
"if",
"isinstance",
"(",
"time_int",
",",
"(",
"np",
".",
"ndarray",
",",
"list",
")",
")",
":",
"time_bins",
"=",
"time_int",
"else",
":",
"time_bins",
"=",
"np",
".",
"arange",
"(",
"float",
"(",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
")",
",",
"float",
"(",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
")",
"+",
"1.",
",",
"float",
"(",
"time_int",
")",
")",
"# Get magnitude-time distribution",
"mag_time_dist",
"=",
"catalogue",
".",
"get_magnitude_time_distribution",
"(",
"mag_bins",
",",
"time_bins",
",",
"normalisation",
",",
"bootstrap",
")",
"# Get smallest non-zero value",
"vmin_val",
"=",
"np",
".",
"min",
"(",
"mag_time_dist",
"[",
"mag_time_dist",
">",
"0.",
"]",
")",
"# Create plot",
"if",
"logscale",
":",
"norm_data",
"=",
"LogNorm",
"(",
"vmin",
"=",
"vmin_val",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_time_dist",
")",
")",
"else",
":",
"if",
"normalisation",
":",
"norm_data",
"=",
"Normalize",
"(",
"vmin",
"=",
"vmin_val",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_time_dist",
")",
")",
"else",
":",
"norm_data",
"=",
"Normalize",
"(",
"vmin",
"=",
"1.0",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_time_dist",
")",
")",
"im",
"=",
"ax",
".",
"pcolor",
"(",
"time_bins",
"[",
":",
"-",
"1",
"]",
",",
"mag_bins",
"[",
":",
"-",
"1",
"]",
",",
"mag_time_dist",
".",
"T",
",",
"norm",
"=",
"norm_data",
")",
"ax",
".",
"set_xlabel",
"(",
"'Time (year)'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Magnitude'",
")",
"if",
"len",
"(",
"xlim",
")",
"==",
"2",
":",
"ax",
".",
"set_xlim",
"(",
"xlim",
"[",
"0",
"]",
",",
"xlim",
"[",
"1",
"]",
")",
"else",
":",
"ax",
".",
"set_xlim",
"(",
"time_bins",
"[",
"0",
"]",
",",
"time_bins",
"[",
"-",
"1",
"]",
")",
"if",
"len",
"(",
"ylim",
")",
"==",
"2",
":",
"ax",
".",
"set_ylim",
"(",
"ylim",
"[",
"0",
"]",
",",
"ylim",
"[",
"1",
"]",
")",
"else",
":",
"ax",
".",
"set_ylim",
"(",
"mag_bins",
"[",
"0",
"]",
",",
"mag_bins",
"[",
"-",
"1",
"]",
"+",
"(",
"mag_bins",
"[",
"-",
"1",
"]",
"-",
"mag_bins",
"[",
"-",
"2",
"]",
")",
")",
"# Fix the title",
"if",
"normalisation",
":",
"fig",
".",
"colorbar",
"(",
"im",
",",
"label",
"=",
"'Event Density'",
",",
"shrink",
"=",
"0.9",
",",
"ax",
"=",
"ax",
")",
"else",
":",
"fig",
".",
"colorbar",
"(",
"im",
",",
"label",
"=",
"'Event Count'",
",",
"shrink",
"=",
"0.9",
",",
"ax",
"=",
"ax",
")",
"ax",
".",
"grid",
"(",
"True",
")",
"# Plot completeness",
"if",
"completeness",
"is",
"not",
"None",
":",
"_plot_completeness",
"(",
"ax",
",",
"completeness",
",",
"time_bins",
"[",
"0",
"]",
",",
"time_bins",
"[",
"-",
"1",
"]",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
]
| Creates a plot of magnitude-time density
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float mag_int:
Width of the histogram for the magnitude bins
:param float time_int:
Width of the histogram for the time bin (in decimal years)
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample magnitude and depth uncertainties choose number of samples | [
"Creates",
"a",
"plot",
"of",
"magnitude",
"-",
"time",
"density"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L261-L342 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | _plot_completeness | def _plot_completeness(ax, comw, start_time, end_time):
'''
Adds completeness intervals to a plot
'''
comw = np.array(comw)
comp = np.column_stack([np.hstack([end_time, comw[:, 0], start_time]),
np.hstack([comw[0, 1], comw[:, 1], comw[-1, 1]])])
ax.step(comp[:-1, 0], comp[1:, 1], linestyle='-',
where="post", linewidth=3, color='brown') | python | def _plot_completeness(ax, comw, start_time, end_time):
comw = np.array(comw)
comp = np.column_stack([np.hstack([end_time, comw[:, 0], start_time]),
np.hstack([comw[0, 1], comw[:, 1], comw[-1, 1]])])
ax.step(comp[:-1, 0], comp[1:, 1], linestyle='-',
where="post", linewidth=3, color='brown') | [
"def",
"_plot_completeness",
"(",
"ax",
",",
"comw",
",",
"start_time",
",",
"end_time",
")",
":",
"comw",
"=",
"np",
".",
"array",
"(",
"comw",
")",
"comp",
"=",
"np",
".",
"column_stack",
"(",
"[",
"np",
".",
"hstack",
"(",
"[",
"end_time",
",",
"comw",
"[",
":",
",",
"0",
"]",
",",
"start_time",
"]",
")",
",",
"np",
".",
"hstack",
"(",
"[",
"comw",
"[",
"0",
",",
"1",
"]",
",",
"comw",
"[",
":",
",",
"1",
"]",
",",
"comw",
"[",
"-",
"1",
",",
"1",
"]",
"]",
")",
"]",
")",
"ax",
".",
"step",
"(",
"comp",
"[",
":",
"-",
"1",
",",
"0",
"]",
",",
"comp",
"[",
"1",
":",
",",
"1",
"]",
",",
"linestyle",
"=",
"'-'",
",",
"where",
"=",
"\"post\"",
",",
"linewidth",
"=",
"3",
",",
"color",
"=",
"'brown'",
")"
]
| Adds completeness intervals to a plot | [
"Adds",
"completeness",
"intervals",
"to",
"a",
"plot"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L345-L353 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | get_completeness_adjusted_table | def get_completeness_adjusted_table(catalogue, completeness, dmag,
offset=1.0E-5, end_year=None, plot=False,
figure_size=(8, 6), filename=None,
filetype='png', dpi=300, ax=None):
"""
Counts the number of earthquakes in each magnitude bin and normalises
the rate to annual rates, taking into account the completeness
"""
if not end_year:
end_year = catalogue.end_year
# Find the natural bin limits
mag_bins = _get_catalogue_bin_limits(catalogue, dmag)
obs_time = end_year - completeness[:, 0] + 1.
obs_rates = np.zeros_like(mag_bins)
durations = np.zeros_like(mag_bins)
n_comp = np.shape(completeness)[0]
for iloc in range(n_comp):
low_mag = completeness[iloc, 1]
comp_year = completeness[iloc, 0]
if iloc == (n_comp - 1):
idx = np.logical_and(
catalogue.data['magnitude'] >= low_mag - offset,
catalogue.data['year'] >= comp_year)
high_mag = mag_bins[-1]
obs_idx = mag_bins >= (low_mag - offset)
else:
high_mag = completeness[iloc + 1, 1]
mag_idx = np.logical_and(
catalogue.data['magnitude'] >= low_mag - offset,
catalogue.data['magnitude'] < (high_mag - offset))
idx = np.logical_and(mag_idx,
catalogue.data['year'] >= (comp_year - offset))
obs_idx = np.logical_and(mag_bins >= (low_mag - offset),
mag_bins < (high_mag + offset))
temp_rates = np.histogram(catalogue.data['magnitude'][idx],
mag_bins[obs_idx])[0]
temp_rates = temp_rates.astype(float) / obs_time[iloc]
obs_rates[obs_idx[:-1]] = temp_rates
durations[obs_idx[:-1]] = obs_time[iloc]
selector = np.where(obs_rates > 0.)[0]
mag_bins = mag_bins[selector]
obs_rates = obs_rates[selector]
durations = durations[selector]
# Get cumulative rates
cum_rates = np.array([sum(obs_rates[iloc:])
for iloc in range(0, len(obs_rates))])
if plot:
plt.figure(figsize=figure_size)
plt.semilogy(mag_bins + dmag / 2., obs_rates, "bo",
label="Incremental")
plt.semilogy(mag_bins + dmag / 2., cum_rates, "rs",
label="Cumulative")
plt.xlabel("Magnitude (M)", fontsize=16)
plt.ylabel("Annual Rate", fontsize=16)
plt.grid(True)
plt.legend(fontsize=16)
if filename:
plt.savefig(filename, format=filetype, dpi=dpi,
bbox_inches="tight")
return np.column_stack([mag_bins, durations, obs_rates, cum_rates,
np.log10(cum_rates)]) | python | def get_completeness_adjusted_table(catalogue, completeness, dmag,
offset=1.0E-5, end_year=None, plot=False,
figure_size=(8, 6), filename=None,
filetype='png', dpi=300, ax=None):
if not end_year:
end_year = catalogue.end_year
mag_bins = _get_catalogue_bin_limits(catalogue, dmag)
obs_time = end_year - completeness[:, 0] + 1.
obs_rates = np.zeros_like(mag_bins)
durations = np.zeros_like(mag_bins)
n_comp = np.shape(completeness)[0]
for iloc in range(n_comp):
low_mag = completeness[iloc, 1]
comp_year = completeness[iloc, 0]
if iloc == (n_comp - 1):
idx = np.logical_and(
catalogue.data['magnitude'] >= low_mag - offset,
catalogue.data['year'] >= comp_year)
high_mag = mag_bins[-1]
obs_idx = mag_bins >= (low_mag - offset)
else:
high_mag = completeness[iloc + 1, 1]
mag_idx = np.logical_and(
catalogue.data['magnitude'] >= low_mag - offset,
catalogue.data['magnitude'] < (high_mag - offset))
idx = np.logical_and(mag_idx,
catalogue.data['year'] >= (comp_year - offset))
obs_idx = np.logical_and(mag_bins >= (low_mag - offset),
mag_bins < (high_mag + offset))
temp_rates = np.histogram(catalogue.data['magnitude'][idx],
mag_bins[obs_idx])[0]
temp_rates = temp_rates.astype(float) / obs_time[iloc]
obs_rates[obs_idx[:-1]] = temp_rates
durations[obs_idx[:-1]] = obs_time[iloc]
selector = np.where(obs_rates > 0.)[0]
mag_bins = mag_bins[selector]
obs_rates = obs_rates[selector]
durations = durations[selector]
cum_rates = np.array([sum(obs_rates[iloc:])
for iloc in range(0, len(obs_rates))])
if plot:
plt.figure(figsize=figure_size)
plt.semilogy(mag_bins + dmag / 2., obs_rates, "bo",
label="Incremental")
plt.semilogy(mag_bins + dmag / 2., cum_rates, "rs",
label="Cumulative")
plt.xlabel("Magnitude (M)", fontsize=16)
plt.ylabel("Annual Rate", fontsize=16)
plt.grid(True)
plt.legend(fontsize=16)
if filename:
plt.savefig(filename, format=filetype, dpi=dpi,
bbox_inches="tight")
return np.column_stack([mag_bins, durations, obs_rates, cum_rates,
np.log10(cum_rates)]) | [
"def",
"get_completeness_adjusted_table",
"(",
"catalogue",
",",
"completeness",
",",
"dmag",
",",
"offset",
"=",
"1.0E-5",
",",
"end_year",
"=",
"None",
",",
"plot",
"=",
"False",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filename",
"=",
"None",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"not",
"end_year",
":",
"end_year",
"=",
"catalogue",
".",
"end_year",
"# Find the natural bin limits",
"mag_bins",
"=",
"_get_catalogue_bin_limits",
"(",
"catalogue",
",",
"dmag",
")",
"obs_time",
"=",
"end_year",
"-",
"completeness",
"[",
":",
",",
"0",
"]",
"+",
"1.",
"obs_rates",
"=",
"np",
".",
"zeros_like",
"(",
"mag_bins",
")",
"durations",
"=",
"np",
".",
"zeros_like",
"(",
"mag_bins",
")",
"n_comp",
"=",
"np",
".",
"shape",
"(",
"completeness",
")",
"[",
"0",
"]",
"for",
"iloc",
"in",
"range",
"(",
"n_comp",
")",
":",
"low_mag",
"=",
"completeness",
"[",
"iloc",
",",
"1",
"]",
"comp_year",
"=",
"completeness",
"[",
"iloc",
",",
"0",
"]",
"if",
"iloc",
"==",
"(",
"n_comp",
"-",
"1",
")",
":",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
">=",
"low_mag",
"-",
"offset",
",",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
">=",
"comp_year",
")",
"high_mag",
"=",
"mag_bins",
"[",
"-",
"1",
"]",
"obs_idx",
"=",
"mag_bins",
">=",
"(",
"low_mag",
"-",
"offset",
")",
"else",
":",
"high_mag",
"=",
"completeness",
"[",
"iloc",
"+",
"1",
",",
"1",
"]",
"mag_idx",
"=",
"np",
".",
"logical_and",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
">=",
"low_mag",
"-",
"offset",
",",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
"<",
"(",
"high_mag",
"-",
"offset",
")",
")",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"mag_idx",
",",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
">=",
"(",
"comp_year",
"-",
"offset",
")",
")",
"obs_idx",
"=",
"np",
".",
"logical_and",
"(",
"mag_bins",
">=",
"(",
"low_mag",
"-",
"offset",
")",
",",
"mag_bins",
"<",
"(",
"high_mag",
"+",
"offset",
")",
")",
"temp_rates",
"=",
"np",
".",
"histogram",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
"[",
"idx",
"]",
",",
"mag_bins",
"[",
"obs_idx",
"]",
")",
"[",
"0",
"]",
"temp_rates",
"=",
"temp_rates",
".",
"astype",
"(",
"float",
")",
"/",
"obs_time",
"[",
"iloc",
"]",
"obs_rates",
"[",
"obs_idx",
"[",
":",
"-",
"1",
"]",
"]",
"=",
"temp_rates",
"durations",
"[",
"obs_idx",
"[",
":",
"-",
"1",
"]",
"]",
"=",
"obs_time",
"[",
"iloc",
"]",
"selector",
"=",
"np",
".",
"where",
"(",
"obs_rates",
">",
"0.",
")",
"[",
"0",
"]",
"mag_bins",
"=",
"mag_bins",
"[",
"selector",
"]",
"obs_rates",
"=",
"obs_rates",
"[",
"selector",
"]",
"durations",
"=",
"durations",
"[",
"selector",
"]",
"# Get cumulative rates",
"cum_rates",
"=",
"np",
".",
"array",
"(",
"[",
"sum",
"(",
"obs_rates",
"[",
"iloc",
":",
"]",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"obs_rates",
")",
")",
"]",
")",
"if",
"plot",
":",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"figure_size",
")",
"plt",
".",
"semilogy",
"(",
"mag_bins",
"+",
"dmag",
"/",
"2.",
",",
"obs_rates",
",",
"\"bo\"",
",",
"label",
"=",
"\"Incremental\"",
")",
"plt",
".",
"semilogy",
"(",
"mag_bins",
"+",
"dmag",
"/",
"2.",
",",
"cum_rates",
",",
"\"rs\"",
",",
"label",
"=",
"\"Cumulative\"",
")",
"plt",
".",
"xlabel",
"(",
"\"Magnitude (M)\"",
",",
"fontsize",
"=",
"16",
")",
"plt",
".",
"ylabel",
"(",
"\"Annual Rate\"",
",",
"fontsize",
"=",
"16",
")",
"plt",
".",
"grid",
"(",
"True",
")",
"plt",
".",
"legend",
"(",
"fontsize",
"=",
"16",
")",
"if",
"filename",
":",
"plt",
".",
"savefig",
"(",
"filename",
",",
"format",
"=",
"filetype",
",",
"dpi",
"=",
"dpi",
",",
"bbox_inches",
"=",
"\"tight\"",
")",
"return",
"np",
".",
"column_stack",
"(",
"[",
"mag_bins",
",",
"durations",
",",
"obs_rates",
",",
"cum_rates",
",",
"np",
".",
"log10",
"(",
"cum_rates",
")",
"]",
")"
]
| Counts the number of earthquakes in each magnitude bin and normalises
the rate to annual rates, taking into account the completeness | [
"Counts",
"the",
"number",
"of",
"earthquakes",
"in",
"each",
"magnitude",
"bin",
"and",
"normalises",
"the",
"rate",
"to",
"annual",
"rates",
"taking",
"into",
"account",
"the",
"completeness"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L356-L417 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/catalogue_plots.py | plot_observed_recurrence | def plot_observed_recurrence(
catalogue, completeness, dmag, end_year=None, filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
"""
Plots the observed recurrence taking into account the completeness
"""
# Get completeness adjusted recurrence table
if isinstance(completeness, float):
# Unique completeness
completeness = np.array([[np.min(catalogue.data['year']),
completeness]])
if not end_year:
end_year = catalogue.update_end_year()
catalogue.data["dtime"] = catalogue.get_decimal_time()
cent_mag, t_per, n_obs = get_completeness_counts(catalogue,
completeness,
dmag)
obs_rates = n_obs / t_per
cum_obs_rates = np.array([np.sum(obs_rates[i:])
for i in range(len(obs_rates))])
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
ax.semilogy(cent_mag, obs_rates, 'bo', label="Incremental")
ax.semilogy(cent_mag, cum_obs_rates, 'rs', label="Cumulative")
ax.set_xlim([cent_mag[0] - 0.1, cent_mag[-1] + 0.1])
ax.set_xlabel('Magnitude')
ax.set_ylabel('Annual Rate')
ax.legend()
_save_image(fig, filename, filetype, dpi) | python | def plot_observed_recurrence(
catalogue, completeness, dmag, end_year=None, filename=None,
figure_size=(8, 6), filetype='png', dpi=300, ax=None):
if isinstance(completeness, float):
completeness = np.array([[np.min(catalogue.data['year']),
completeness]])
if not end_year:
end_year = catalogue.update_end_year()
catalogue.data["dtime"] = catalogue.get_decimal_time()
cent_mag, t_per, n_obs = get_completeness_counts(catalogue,
completeness,
dmag)
obs_rates = n_obs / t_per
cum_obs_rates = np.array([np.sum(obs_rates[i:])
for i in range(len(obs_rates))])
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
ax.semilogy(cent_mag, obs_rates, 'bo', label="Incremental")
ax.semilogy(cent_mag, cum_obs_rates, 'rs', label="Cumulative")
ax.set_xlim([cent_mag[0] - 0.1, cent_mag[-1] + 0.1])
ax.set_xlabel('Magnitude')
ax.set_ylabel('Annual Rate')
ax.legend()
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_observed_recurrence",
"(",
"catalogue",
",",
"completeness",
",",
"dmag",
",",
"end_year",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"# Get completeness adjusted recurrence table",
"if",
"isinstance",
"(",
"completeness",
",",
"float",
")",
":",
"# Unique completeness",
"completeness",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
",",
"completeness",
"]",
"]",
")",
"if",
"not",
"end_year",
":",
"end_year",
"=",
"catalogue",
".",
"update_end_year",
"(",
")",
"catalogue",
".",
"data",
"[",
"\"dtime\"",
"]",
"=",
"catalogue",
".",
"get_decimal_time",
"(",
")",
"cent_mag",
",",
"t_per",
",",
"n_obs",
"=",
"get_completeness_counts",
"(",
"catalogue",
",",
"completeness",
",",
"dmag",
")",
"obs_rates",
"=",
"n_obs",
"/",
"t_per",
"cum_obs_rates",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"sum",
"(",
"obs_rates",
"[",
"i",
":",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"obs_rates",
")",
")",
"]",
")",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"ax",
".",
"semilogy",
"(",
"cent_mag",
",",
"obs_rates",
",",
"'bo'",
",",
"label",
"=",
"\"Incremental\"",
")",
"ax",
".",
"semilogy",
"(",
"cent_mag",
",",
"cum_obs_rates",
",",
"'rs'",
",",
"label",
"=",
"\"Cumulative\"",
")",
"ax",
".",
"set_xlim",
"(",
"[",
"cent_mag",
"[",
"0",
"]",
"-",
"0.1",
",",
"cent_mag",
"[",
"-",
"1",
"]",
"+",
"0.1",
"]",
")",
"ax",
".",
"set_xlabel",
"(",
"'Magnitude'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Annual Rate'",
")",
"ax",
".",
"legend",
"(",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
]
| Plots the observed recurrence taking into account the completeness | [
"Plots",
"the",
"observed",
"recurrence",
"taking",
"into",
"account",
"the",
"completeness"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L420-L452 |
gem/oq-engine | openquake/hmtk/strain/geodetic_strain.py | GeodeticStrain.get_secondary_strain_data | def get_secondary_strain_data(self, strain_data=None):
'''
Calculate the following and add to data dictionary:
1) 2nd invarient of strain
2) Dilatation rate
3) e1h and e2h
4) err
:param dict strain_data:
Strain data dictionary (as described) - will overwrite current
data if input
'''
if strain_data:
self.data = strain_data
if not isinstance(self.data, dict):
raise ValueError('Strain data not input or incorrectly formatted')
# Check to ensure essential attributes are in data dictionary
for essential_key in DATA_VARIABLES:
if essential_key not in self.data:
print(self.data)
raise ValueError('Essential strain information %s missing!'
% essential_key)
self.data_variables = deepcopy(DATA_VARIABLES)
# Second Invarient
self.data['2nd_inv'] = np.sqrt(
(self.data['exx'] ** 2.) +
(self.data['eyy'] ** 2.) +
2.0 * (self.data['exy'] ** 2.))
# Dilatation
self.data['dilatation'] = self.data['exx'] + self.data['eyy']
# err
self.data['err'] = -1. * self.data['dilatation']
center_normal_rate = (self.data['exx'] +
self.data['eyy']) / 2.
radius_rate = np.sqrt((self.data['exx'] -
center_normal_rate) ** 2. +
(self.data['exy'] ** 2.))
# e1h and e2h
self.data['e1h'] = center_normal_rate - radius_rate
self.data['e2h'] = center_normal_rate + radius_rate
self.data['area'] = np.zeros(self.get_number_observations())
self.data_variables.extend(['2nd_inv', 'dilatation', 'err', 'e1h',
'e2h']) | python | def get_secondary_strain_data(self, strain_data=None):
if strain_data:
self.data = strain_data
if not isinstance(self.data, dict):
raise ValueError('Strain data not input or incorrectly formatted')
for essential_key in DATA_VARIABLES:
if essential_key not in self.data:
print(self.data)
raise ValueError('Essential strain information %s missing!'
% essential_key)
self.data_variables = deepcopy(DATA_VARIABLES)
self.data['2nd_inv'] = np.sqrt(
(self.data['exx'] ** 2.) +
(self.data['eyy'] ** 2.) +
2.0 * (self.data['exy'] ** 2.))
self.data['dilatation'] = self.data['exx'] + self.data['eyy']
self.data['err'] = -1. * self.data['dilatation']
center_normal_rate = (self.data['exx'] +
self.data['eyy']) / 2.
radius_rate = np.sqrt((self.data['exx'] -
center_normal_rate) ** 2. +
(self.data['exy'] ** 2.))
self.data['e1h'] = center_normal_rate - radius_rate
self.data['e2h'] = center_normal_rate + radius_rate
self.data['area'] = np.zeros(self.get_number_observations())
self.data_variables.extend(['2nd_inv', 'dilatation', 'err', 'e1h',
'e2h']) | [
"def",
"get_secondary_strain_data",
"(",
"self",
",",
"strain_data",
"=",
"None",
")",
":",
"if",
"strain_data",
":",
"self",
".",
"data",
"=",
"strain_data",
"if",
"not",
"isinstance",
"(",
"self",
".",
"data",
",",
"dict",
")",
":",
"raise",
"ValueError",
"(",
"'Strain data not input or incorrectly formatted'",
")",
"# Check to ensure essential attributes are in data dictionary",
"for",
"essential_key",
"in",
"DATA_VARIABLES",
":",
"if",
"essential_key",
"not",
"in",
"self",
".",
"data",
":",
"print",
"(",
"self",
".",
"data",
")",
"raise",
"ValueError",
"(",
"'Essential strain information %s missing!'",
"%",
"essential_key",
")",
"self",
".",
"data_variables",
"=",
"deepcopy",
"(",
"DATA_VARIABLES",
")",
"# Second Invarient",
"self",
".",
"data",
"[",
"'2nd_inv'",
"]",
"=",
"np",
".",
"sqrt",
"(",
"(",
"self",
".",
"data",
"[",
"'exx'",
"]",
"**",
"2.",
")",
"+",
"(",
"self",
".",
"data",
"[",
"'eyy'",
"]",
"**",
"2.",
")",
"+",
"2.0",
"*",
"(",
"self",
".",
"data",
"[",
"'exy'",
"]",
"**",
"2.",
")",
")",
"# Dilatation",
"self",
".",
"data",
"[",
"'dilatation'",
"]",
"=",
"self",
".",
"data",
"[",
"'exx'",
"]",
"+",
"self",
".",
"data",
"[",
"'eyy'",
"]",
"# err",
"self",
".",
"data",
"[",
"'err'",
"]",
"=",
"-",
"1.",
"*",
"self",
".",
"data",
"[",
"'dilatation'",
"]",
"center_normal_rate",
"=",
"(",
"self",
".",
"data",
"[",
"'exx'",
"]",
"+",
"self",
".",
"data",
"[",
"'eyy'",
"]",
")",
"/",
"2.",
"radius_rate",
"=",
"np",
".",
"sqrt",
"(",
"(",
"self",
".",
"data",
"[",
"'exx'",
"]",
"-",
"center_normal_rate",
")",
"**",
"2.",
"+",
"(",
"self",
".",
"data",
"[",
"'exy'",
"]",
"**",
"2.",
")",
")",
"# e1h and e2h",
"self",
".",
"data",
"[",
"'e1h'",
"]",
"=",
"center_normal_rate",
"-",
"radius_rate",
"self",
".",
"data",
"[",
"'e2h'",
"]",
"=",
"center_normal_rate",
"+",
"radius_rate",
"self",
".",
"data",
"[",
"'area'",
"]",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"get_number_observations",
"(",
")",
")",
"self",
".",
"data_variables",
".",
"extend",
"(",
"[",
"'2nd_inv'",
",",
"'dilatation'",
",",
"'err'",
",",
"'e1h'",
",",
"'e2h'",
"]",
")"
]
| Calculate the following and add to data dictionary:
1) 2nd invarient of strain
2) Dilatation rate
3) e1h and e2h
4) err
:param dict strain_data:
Strain data dictionary (as described) - will overwrite current
data if input | [
"Calculate",
"the",
"following",
"and",
"add",
"to",
"data",
"dictionary",
":",
"1",
")",
"2nd",
"invarient",
"of",
"strain",
"2",
")",
"Dilatation",
"rate",
"3",
")",
"e1h",
"and",
"e2h",
"4",
")",
"err"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/geodetic_strain.py#L89-L135 |
gem/oq-engine | openquake/hmtk/strain/geodetic_strain.py | GeodeticStrain.get_number_observations | def get_number_observations(self):
'''
Returns the number of observations in the data file
'''
if isinstance(self.data, dict) and ('exx' in self.data.keys()):
return len(self.data['exx'])
else:
return 0 | python | def get_number_observations(self):
if isinstance(self.data, dict) and ('exx' in self.data.keys()):
return len(self.data['exx'])
else:
return 0 | [
"def",
"get_number_observations",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"data",
",",
"dict",
")",
"and",
"(",
"'exx'",
"in",
"self",
".",
"data",
".",
"keys",
"(",
")",
")",
":",
"return",
"len",
"(",
"self",
".",
"data",
"[",
"'exx'",
"]",
")",
"else",
":",
"return",
"0"
]
| Returns the number of observations in the data file | [
"Returns",
"the",
"number",
"of",
"observations",
"in",
"the",
"data",
"file"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/geodetic_strain.py#L137-L144 |
gem/oq-engine | openquake/commands/plot_lc.py | plot_lc | def plot_lc(calc_id, aid=None):
"""
Plot loss curves given a calculation id and an asset ordinal.
"""
# read the hazard data
dstore = util.read(calc_id)
dset = dstore['agg_curves-rlzs']
if aid is None: # plot the global curves
plt = make_figure(dset.attrs['return_periods'], dset.value)
else:
sys.exit('Not implemented yet')
plt.show() | python | def plot_lc(calc_id, aid=None):
dstore = util.read(calc_id)
dset = dstore['agg_curves-rlzs']
if aid is None:
plt = make_figure(dset.attrs['return_periods'], dset.value)
else:
sys.exit('Not implemented yet')
plt.show() | [
"def",
"plot_lc",
"(",
"calc_id",
",",
"aid",
"=",
"None",
")",
":",
"# read the hazard data",
"dstore",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"dset",
"=",
"dstore",
"[",
"'agg_curves-rlzs'",
"]",
"if",
"aid",
"is",
"None",
":",
"# plot the global curves",
"plt",
"=",
"make_figure",
"(",
"dset",
".",
"attrs",
"[",
"'return_periods'",
"]",
",",
"dset",
".",
"value",
")",
"else",
":",
"sys",
".",
"exit",
"(",
"'Not implemented yet'",
")",
"plt",
".",
"show",
"(",
")"
]
| Plot loss curves given a calculation id and an asset ordinal. | [
"Plot",
"loss",
"curves",
"given",
"a",
"calculation",
"id",
"and",
"an",
"asset",
"ordinal",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_lc.py#L41-L52 |
gem/oq-engine | openquake/hazardlib/gsim/nshmp_2014.py | nga_west2_epistemic_adjustment | def nga_west2_epistemic_adjustment(magnitude, distance):
"""
Applies the "average" adjustment factor for epistemic uncertainty
as defined in Table 17 of Petersen et al., (2014)::
| R < 10. | 10.0 <= R < 30.0 | R >= 30.0
-----------------------------------------------------------
M < 6.0 | 0.37 | 0.22 | 0.22
6 <= M <7.0 | 0.25 | 0.23 | 0.23
M >= 7.0 | 0.40 | 0.36 | 0.33
"""
if magnitude < 6.0:
adjustment = 0.22 * np.ones_like(distance)
adjustment[distance < 10.0] = 0.37
elif magnitude >= 7.0:
adjustment = 0.36 * np.ones_like(distance)
adjustment[distance < 10.0] = 0.40
adjustment[distance >= 30.0] = 0.33
else:
adjustment = 0.23 * np.ones_like(distance)
adjustment[distance < 10.0] = 0.25
return adjustment | python | def nga_west2_epistemic_adjustment(magnitude, distance):
if magnitude < 6.0:
adjustment = 0.22 * np.ones_like(distance)
adjustment[distance < 10.0] = 0.37
elif magnitude >= 7.0:
adjustment = 0.36 * np.ones_like(distance)
adjustment[distance < 10.0] = 0.40
adjustment[distance >= 30.0] = 0.33
else:
adjustment = 0.23 * np.ones_like(distance)
adjustment[distance < 10.0] = 0.25
return adjustment | [
"def",
"nga_west2_epistemic_adjustment",
"(",
"magnitude",
",",
"distance",
")",
":",
"if",
"magnitude",
"<",
"6.0",
":",
"adjustment",
"=",
"0.22",
"*",
"np",
".",
"ones_like",
"(",
"distance",
")",
"adjustment",
"[",
"distance",
"<",
"10.0",
"]",
"=",
"0.37",
"elif",
"magnitude",
">=",
"7.0",
":",
"adjustment",
"=",
"0.36",
"*",
"np",
".",
"ones_like",
"(",
"distance",
")",
"adjustment",
"[",
"distance",
"<",
"10.0",
"]",
"=",
"0.40",
"adjustment",
"[",
"distance",
">=",
"30.0",
"]",
"=",
"0.33",
"else",
":",
"adjustment",
"=",
"0.23",
"*",
"np",
".",
"ones_like",
"(",
"distance",
")",
"adjustment",
"[",
"distance",
"<",
"10.0",
"]",
"=",
"0.25",
"return",
"adjustment"
]
| Applies the "average" adjustment factor for epistemic uncertainty
as defined in Table 17 of Petersen et al., (2014)::
| R < 10. | 10.0 <= R < 30.0 | R >= 30.0
-----------------------------------------------------------
M < 6.0 | 0.37 | 0.22 | 0.22
6 <= M <7.0 | 0.25 | 0.23 | 0.23
M >= 7.0 | 0.40 | 0.36 | 0.33 | [
"Applies",
"the",
"average",
"adjustment",
"factor",
"for",
"epistemic",
"uncertainty",
"as",
"defined",
"in",
"Table",
"17",
"of",
"Petersen",
"et",
"al",
".",
"(",
"2014",
")",
"::"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nshmp_2014.py#L75-L96 |
gem/oq-engine | openquake/hazardlib/gsim/nshmp_2014.py | get_weighted_poes | def get_weighted_poes(gsim, sctx, rctx, dctx, imt, imls, truncation_level,
weighting=DEFAULT_WEIGHTING):
"""
This function implements the NGA West 2 GMPE epistemic uncertainty
adjustment factor without re-calculating the actual GMPE each time.
:param gsim:
Instance of the GMPE
:param list weighting:
Weightings as a list of tuples of (weight, number standard deviations
of the epistemic uncertainty adjustment)
"""
if truncation_level is not None and truncation_level < 0:
raise ValueError('truncation level must be zero, positive number '
'or None')
gsim._check_imt(imt)
adjustment = nga_west2_epistemic_adjustment(rctx.mag, dctx.rrup)
adjustment = adjustment.reshape(adjustment.shape + (1, ))
if truncation_level == 0:
# zero truncation mode, just compare imls to mean
imls = gsim.to_distribution_values(imls)
mean, _ = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt, [])
mean = mean.reshape(mean.shape + (1, ))
output = np.zeros([mean.shape[0], imls.shape[0]])
for (wgt, fct) in weighting:
output += (wgt *
(imls <= (mean + (fct * adjustment))).astype(float))
return output
else:
# use real normal distribution
assert (const.StdDev.TOTAL
in gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES)
imls = gsim.to_distribution_values(imls)
mean, [stddev] = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt,
[const.StdDev.TOTAL])
mean = mean.reshape(mean.shape + (1, ))
stddev = stddev.reshape(stddev.shape + (1, ))
output = np.zeros([mean.shape[0], imls.shape[0]])
for (wgt, fct) in weighting:
values = (imls - (mean + (fct * adjustment))) / stddev
if truncation_level is None:
output += (wgt * _norm_sf(values))
else:
output += (wgt * _truncnorm_sf(truncation_level, values))
return output | python | def get_weighted_poes(gsim, sctx, rctx, dctx, imt, imls, truncation_level,
weighting=DEFAULT_WEIGHTING):
if truncation_level is not None and truncation_level < 0:
raise ValueError('truncation level must be zero, positive number '
'or None')
gsim._check_imt(imt)
adjustment = nga_west2_epistemic_adjustment(rctx.mag, dctx.rrup)
adjustment = adjustment.reshape(adjustment.shape + (1, ))
if truncation_level == 0:
imls = gsim.to_distribution_values(imls)
mean, _ = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt, [])
mean = mean.reshape(mean.shape + (1, ))
output = np.zeros([mean.shape[0], imls.shape[0]])
for (wgt, fct) in weighting:
output += (wgt *
(imls <= (mean + (fct * adjustment))).astype(float))
return output
else:
assert (const.StdDev.TOTAL
in gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES)
imls = gsim.to_distribution_values(imls)
mean, [stddev] = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt,
[const.StdDev.TOTAL])
mean = mean.reshape(mean.shape + (1, ))
stddev = stddev.reshape(stddev.shape + (1, ))
output = np.zeros([mean.shape[0], imls.shape[0]])
for (wgt, fct) in weighting:
values = (imls - (mean + (fct * adjustment))) / stddev
if truncation_level is None:
output += (wgt * _norm_sf(values))
else:
output += (wgt * _truncnorm_sf(truncation_level, values))
return output | [
"def",
"get_weighted_poes",
"(",
"gsim",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"imls",
",",
"truncation_level",
",",
"weighting",
"=",
"DEFAULT_WEIGHTING",
")",
":",
"if",
"truncation_level",
"is",
"not",
"None",
"and",
"truncation_level",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'truncation level must be zero, positive number '",
"'or None'",
")",
"gsim",
".",
"_check_imt",
"(",
"imt",
")",
"adjustment",
"=",
"nga_west2_epistemic_adjustment",
"(",
"rctx",
".",
"mag",
",",
"dctx",
".",
"rrup",
")",
"adjustment",
"=",
"adjustment",
".",
"reshape",
"(",
"adjustment",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"if",
"truncation_level",
"==",
"0",
":",
"# zero truncation mode, just compare imls to mean",
"imls",
"=",
"gsim",
".",
"to_distribution_values",
"(",
"imls",
")",
"mean",
",",
"_",
"=",
"gsim",
".",
"get_mean_and_stddevs",
"(",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"[",
"]",
")",
"mean",
"=",
"mean",
".",
"reshape",
"(",
"mean",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"output",
"=",
"np",
".",
"zeros",
"(",
"[",
"mean",
".",
"shape",
"[",
"0",
"]",
",",
"imls",
".",
"shape",
"[",
"0",
"]",
"]",
")",
"for",
"(",
"wgt",
",",
"fct",
")",
"in",
"weighting",
":",
"output",
"+=",
"(",
"wgt",
"*",
"(",
"imls",
"<=",
"(",
"mean",
"+",
"(",
"fct",
"*",
"adjustment",
")",
")",
")",
".",
"astype",
"(",
"float",
")",
")",
"return",
"output",
"else",
":",
"# use real normal distribution",
"assert",
"(",
"const",
".",
"StdDev",
".",
"TOTAL",
"in",
"gsim",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
")",
"imls",
"=",
"gsim",
".",
"to_distribution_values",
"(",
"imls",
")",
"mean",
",",
"[",
"stddev",
"]",
"=",
"gsim",
".",
"get_mean_and_stddevs",
"(",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"[",
"const",
".",
"StdDev",
".",
"TOTAL",
"]",
")",
"mean",
"=",
"mean",
".",
"reshape",
"(",
"mean",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"stddev",
"=",
"stddev",
".",
"reshape",
"(",
"stddev",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"output",
"=",
"np",
".",
"zeros",
"(",
"[",
"mean",
".",
"shape",
"[",
"0",
"]",
",",
"imls",
".",
"shape",
"[",
"0",
"]",
"]",
")",
"for",
"(",
"wgt",
",",
"fct",
")",
"in",
"weighting",
":",
"values",
"=",
"(",
"imls",
"-",
"(",
"mean",
"+",
"(",
"fct",
"*",
"adjustment",
")",
")",
")",
"/",
"stddev",
"if",
"truncation_level",
"is",
"None",
":",
"output",
"+=",
"(",
"wgt",
"*",
"_norm_sf",
"(",
"values",
")",
")",
"else",
":",
"output",
"+=",
"(",
"wgt",
"*",
"_truncnorm_sf",
"(",
"truncation_level",
",",
"values",
")",
")",
"return",
"output"
]
| This function implements the NGA West 2 GMPE epistemic uncertainty
adjustment factor without re-calculating the actual GMPE each time.
:param gsim:
Instance of the GMPE
:param list weighting:
Weightings as a list of tuples of (weight, number standard deviations
of the epistemic uncertainty adjustment) | [
"This",
"function",
"implements",
"the",
"NGA",
"West",
"2",
"GMPE",
"epistemic",
"uncertainty",
"adjustment",
"factor",
"without",
"re",
"-",
"calculating",
"the",
"actual",
"GMPE",
"each",
"time",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nshmp_2014.py#L102-L146 |
gem/oq-engine | openquake/hazardlib/gsim/nshmp_2014.py | AtkinsonMacias2009NSHMP2014.get_mean_and_stddevs | def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# Get original mean and standard deviations
mean, stddevs = super().get_mean_and_stddevs(
sctx, rctx, dctx, imt, stddev_types)
cff = SInterCan15Mid.SITE_COEFFS[imt]
mean += np.log(cff['mf'])
return mean, stddevs | python | def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
mean, stddevs = super().get_mean_and_stddevs(
sctx, rctx, dctx, imt, stddev_types)
cff = SInterCan15Mid.SITE_COEFFS[imt]
mean += np.log(cff['mf'])
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
":",
"# Get original mean and standard deviations",
"mean",
",",
"stddevs",
"=",
"super",
"(",
")",
".",
"get_mean_and_stddevs",
"(",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
"cff",
"=",
"SInterCan15Mid",
".",
"SITE_COEFFS",
"[",
"imt",
"]",
"mean",
"+=",
"np",
".",
"log",
"(",
"cff",
"[",
"'mf'",
"]",
")",
"return",
"mean",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nshmp_2014.py#L61-L72 |
gem/oq-engine | openquake/hazardlib/gsim/nshmp_2014.py | AbrahamsonEtAl2014NSHMPUpper.get_mean_and_stddevs | def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# Get original mean and standard deviations
mean, stddevs = super().get_mean_and_stddevs(
sctx, rctx, dctx, imt, stddev_types)
# Return mean, increased by the adjustment factor,
# and standard devation
return mean + nga_west2_epistemic_adjustment(rctx.mag, dctx.rrup),\
stddevs | python | def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
mean, stddevs = super().get_mean_and_stddevs(
sctx, rctx, dctx, imt, stddev_types)
return mean + nga_west2_epistemic_adjustment(rctx.mag, dctx.rrup),\
stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
":",
"# Get original mean and standard deviations",
"mean",
",",
"stddevs",
"=",
"super",
"(",
")",
".",
"get_mean_and_stddevs",
"(",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
"# Return mean, increased by the adjustment factor,",
"# and standard devation",
"return",
"mean",
"+",
"nga_west2_epistemic_adjustment",
"(",
"rctx",
".",
"mag",
",",
"dctx",
".",
"rrup",
")",
",",
"stddevs"
]
| See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nshmp_2014.py#L154-L166 |
gem/oq-engine | openquake/hazardlib/gsim/nshmp_2014.py | AbrahamsonEtAl2014NSHMPMean.get_poes | def get_poes(self, sctx, rctx, dctx, imt, imls, truncation_level):
"""
Adapts the original `get_poes()` from the :class:
openquake.hazardlib.gsim.base.GMPE to call a function that take the
weighted sum of the PoEs from the epistemic uncertainty adjustment
"""
return get_weighted_poes(self, sctx, rctx, dctx, imt, imls,
truncation_level) | python | def get_poes(self, sctx, rctx, dctx, imt, imls, truncation_level):
return get_weighted_poes(self, sctx, rctx, dctx, imt, imls,
truncation_level) | [
"def",
"get_poes",
"(",
"self",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"imls",
",",
"truncation_level",
")",
":",
"return",
"get_weighted_poes",
"(",
"self",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"imls",
",",
"truncation_level",
")"
]
| Adapts the original `get_poes()` from the :class:
openquake.hazardlib.gsim.base.GMPE to call a function that take the
weighted sum of the PoEs from the epistemic uncertainty adjustment | [
"Adapts",
"the",
"original",
"get_poes",
"()",
"from",
"the",
":",
"class",
":",
"openquake",
".",
"hazardlib",
".",
"gsim",
".",
"base",
".",
"GMPE",
"to",
"call",
"a",
"function",
"that",
"take",
"the",
"weighted",
"sum",
"of",
"the",
"PoEs",
"from",
"the",
"epistemic",
"uncertainty",
"adjustment"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nshmp_2014.py#L194-L201 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | register_fields | def register_fields(w):
"""
Register shapefile fields.
"""
PARAMS_LIST = [BASE_PARAMS, GEOMETRY_PARAMS, MFD_PARAMS]
for PARAMS in PARAMS_LIST:
for _, param, dtype in PARAMS:
w.field(param, fieldType=dtype, size=FIELD_SIZE)
PARAMS_LIST = [
RATE_PARAMS, STRIKE_PARAMS, DIP_PARAMS, RAKE_PARAMS, NPW_PARAMS,
HDEPTH_PARAMS, HDW_PARAMS, PLANES_STRIKES_PARAM, PLANES_DIPS_PARAM]
for PARAMS in PARAMS_LIST:
for param, dtype in PARAMS:
w.field(param, fieldType=dtype, size=FIELD_SIZE)
# source typology
w.field('sourcetype', 'C') | python | def register_fields(w):
PARAMS_LIST = [BASE_PARAMS, GEOMETRY_PARAMS, MFD_PARAMS]
for PARAMS in PARAMS_LIST:
for _, param, dtype in PARAMS:
w.field(param, fieldType=dtype, size=FIELD_SIZE)
PARAMS_LIST = [
RATE_PARAMS, STRIKE_PARAMS, DIP_PARAMS, RAKE_PARAMS, NPW_PARAMS,
HDEPTH_PARAMS, HDW_PARAMS, PLANES_STRIKES_PARAM, PLANES_DIPS_PARAM]
for PARAMS in PARAMS_LIST:
for param, dtype in PARAMS:
w.field(param, fieldType=dtype, size=FIELD_SIZE)
w.field('sourcetype', 'C') | [
"def",
"register_fields",
"(",
"w",
")",
":",
"PARAMS_LIST",
"=",
"[",
"BASE_PARAMS",
",",
"GEOMETRY_PARAMS",
",",
"MFD_PARAMS",
"]",
"for",
"PARAMS",
"in",
"PARAMS_LIST",
":",
"for",
"_",
",",
"param",
",",
"dtype",
"in",
"PARAMS",
":",
"w",
".",
"field",
"(",
"param",
",",
"fieldType",
"=",
"dtype",
",",
"size",
"=",
"FIELD_SIZE",
")",
"PARAMS_LIST",
"=",
"[",
"RATE_PARAMS",
",",
"STRIKE_PARAMS",
",",
"DIP_PARAMS",
",",
"RAKE_PARAMS",
",",
"NPW_PARAMS",
",",
"HDEPTH_PARAMS",
",",
"HDW_PARAMS",
",",
"PLANES_STRIKES_PARAM",
",",
"PLANES_DIPS_PARAM",
"]",
"for",
"PARAMS",
"in",
"PARAMS_LIST",
":",
"for",
"param",
",",
"dtype",
"in",
"PARAMS",
":",
"w",
".",
"field",
"(",
"param",
",",
"fieldType",
"=",
"dtype",
",",
"size",
"=",
"FIELD_SIZE",
")",
"# source typology",
"w",
".",
"field",
"(",
"'sourcetype'",
",",
"'C'",
")"
]
| Register shapefile fields. | [
"Register",
"shapefile",
"fields",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L86-L103 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | expand_src_param | def expand_src_param(values, shp_params):
"""
Expand hazardlib source attribute (defined through list of values)
into dictionary of shapefile parameters.
"""
if values is None:
return dict([(key, None) for key, _ in shp_params])
else:
num_values = len(values)
return dict(
[(key, float(values[i]) if i < num_values else None)
for i, (key, _) in enumerate(shp_params)]) | python | def expand_src_param(values, shp_params):
if values is None:
return dict([(key, None) for key, _ in shp_params])
else:
num_values = len(values)
return dict(
[(key, float(values[i]) if i < num_values else None)
for i, (key, _) in enumerate(shp_params)]) | [
"def",
"expand_src_param",
"(",
"values",
",",
"shp_params",
")",
":",
"if",
"values",
"is",
"None",
":",
"return",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"shp_params",
"]",
")",
"else",
":",
"num_values",
"=",
"len",
"(",
"values",
")",
"return",
"dict",
"(",
"[",
"(",
"key",
",",
"float",
"(",
"values",
"[",
"i",
"]",
")",
"if",
"i",
"<",
"num_values",
"else",
"None",
")",
"for",
"i",
",",
"(",
"key",
",",
"_",
")",
"in",
"enumerate",
"(",
"shp_params",
")",
"]",
")"
]
| Expand hazardlib source attribute (defined through list of values)
into dictionary of shapefile parameters. | [
"Expand",
"hazardlib",
"source",
"attribute",
"(",
"defined",
"through",
"list",
"of",
"values",
")",
"into",
"dictionary",
"of",
"shapefile",
"parameters",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L106-L117 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | extract_source_params | def extract_source_params(src):
"""
Extract params from source object.
"""
tags = get_taglist(src)
data = []
for key, param, vtype in BASE_PARAMS:
if key in src.attrib:
if vtype == "c":
data.append((param, src.attrib[key]))
elif vtype == "f":
data.append((param, float(src.attrib[key])))
else:
data.append((param, None))
elif key in tags:
if vtype == "c":
data.append((param, src.nodes[tags.index(key)].text))
elif vtype == "f":
data.append((param, float(src.nodes[tags.index(key)].text)))
else:
data.append((param, None))
else:
data.append((param, None))
return dict(data) | python | def extract_source_params(src):
tags = get_taglist(src)
data = []
for key, param, vtype in BASE_PARAMS:
if key in src.attrib:
if vtype == "c":
data.append((param, src.attrib[key]))
elif vtype == "f":
data.append((param, float(src.attrib[key])))
else:
data.append((param, None))
elif key in tags:
if vtype == "c":
data.append((param, src.nodes[tags.index(key)].text))
elif vtype == "f":
data.append((param, float(src.nodes[tags.index(key)].text)))
else:
data.append((param, None))
else:
data.append((param, None))
return dict(data) | [
"def",
"extract_source_params",
"(",
"src",
")",
":",
"tags",
"=",
"get_taglist",
"(",
"src",
")",
"data",
"=",
"[",
"]",
"for",
"key",
",",
"param",
",",
"vtype",
"in",
"BASE_PARAMS",
":",
"if",
"key",
"in",
"src",
".",
"attrib",
":",
"if",
"vtype",
"==",
"\"c\"",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"src",
".",
"attrib",
"[",
"key",
"]",
")",
")",
"elif",
"vtype",
"==",
"\"f\"",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"float",
"(",
"src",
".",
"attrib",
"[",
"key",
"]",
")",
")",
")",
"else",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"None",
")",
")",
"elif",
"key",
"in",
"tags",
":",
"if",
"vtype",
"==",
"\"c\"",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"key",
")",
"]",
".",
"text",
")",
")",
"elif",
"vtype",
"==",
"\"f\"",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"float",
"(",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"key",
")",
"]",
".",
"text",
")",
")",
")",
"else",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"None",
")",
")",
"else",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"None",
")",
")",
"return",
"dict",
"(",
"data",
")"
]
| Extract params from source object. | [
"Extract",
"params",
"from",
"source",
"object",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L120-L143 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | parse_complex_fault_geometry | def parse_complex_fault_geometry(node):
"""
Parses a complex fault geometry node returning both the attributes and
parameters in a dictionary
"""
assert "complexFaultGeometry" in node.tag
# Get general attributes
geometry = {"intermediateEdges": []}
for subnode in node:
crds = subnode.nodes[0].nodes[0].text
if "faultTopEdge" in subnode.tag:
geometry["faultTopEdge"] = numpy.array(
[[crds[i], crds[i + 1], crds[i + 2]]
for i in range(0, len(crds), 3)])
geometry["upperSeismoDepth"] = numpy.min(
geometry["faultTopEdge"][:, 2])
elif "faultBottomEdge" in subnode.tag:
geometry["faultBottomEdge"] = numpy.array(
[[crds[i], crds[i + 1], crds[i + 2]]
for i in range(0, len(crds), 3)])
geometry["lowerSeismoDepth"] = numpy.max(
geometry["faultBottomEdge"][:, 2])
elif "intermediateEdge" in subnode.tag:
geometry["intermediateEdges"].append(
numpy.array([[crds[i], crds[i + 1], crds[i + 2]]
for i in range(0, len(crds), 3)]))
else:
pass
geometry["dip"] = None
return geometry | python | def parse_complex_fault_geometry(node):
assert "complexFaultGeometry" in node.tag
geometry = {"intermediateEdges": []}
for subnode in node:
crds = subnode.nodes[0].nodes[0].text
if "faultTopEdge" in subnode.tag:
geometry["faultTopEdge"] = numpy.array(
[[crds[i], crds[i + 1], crds[i + 2]]
for i in range(0, len(crds), 3)])
geometry["upperSeismoDepth"] = numpy.min(
geometry["faultTopEdge"][:, 2])
elif "faultBottomEdge" in subnode.tag:
geometry["faultBottomEdge"] = numpy.array(
[[crds[i], crds[i + 1], crds[i + 2]]
for i in range(0, len(crds), 3)])
geometry["lowerSeismoDepth"] = numpy.max(
geometry["faultBottomEdge"][:, 2])
elif "intermediateEdge" in subnode.tag:
geometry["intermediateEdges"].append(
numpy.array([[crds[i], crds[i + 1], crds[i + 2]]
for i in range(0, len(crds), 3)]))
else:
pass
geometry["dip"] = None
return geometry | [
"def",
"parse_complex_fault_geometry",
"(",
"node",
")",
":",
"assert",
"\"complexFaultGeometry\"",
"in",
"node",
".",
"tag",
"# Get general attributes",
"geometry",
"=",
"{",
"\"intermediateEdges\"",
":",
"[",
"]",
"}",
"for",
"subnode",
"in",
"node",
":",
"crds",
"=",
"subnode",
".",
"nodes",
"[",
"0",
"]",
".",
"nodes",
"[",
"0",
"]",
".",
"text",
"if",
"\"faultTopEdge\"",
"in",
"subnode",
".",
"tag",
":",
"geometry",
"[",
"\"faultTopEdge\"",
"]",
"=",
"numpy",
".",
"array",
"(",
"[",
"[",
"crds",
"[",
"i",
"]",
",",
"crds",
"[",
"i",
"+",
"1",
"]",
",",
"crds",
"[",
"i",
"+",
"2",
"]",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"crds",
")",
",",
"3",
")",
"]",
")",
"geometry",
"[",
"\"upperSeismoDepth\"",
"]",
"=",
"numpy",
".",
"min",
"(",
"geometry",
"[",
"\"faultTopEdge\"",
"]",
"[",
":",
",",
"2",
"]",
")",
"elif",
"\"faultBottomEdge\"",
"in",
"subnode",
".",
"tag",
":",
"geometry",
"[",
"\"faultBottomEdge\"",
"]",
"=",
"numpy",
".",
"array",
"(",
"[",
"[",
"crds",
"[",
"i",
"]",
",",
"crds",
"[",
"i",
"+",
"1",
"]",
",",
"crds",
"[",
"i",
"+",
"2",
"]",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"crds",
")",
",",
"3",
")",
"]",
")",
"geometry",
"[",
"\"lowerSeismoDepth\"",
"]",
"=",
"numpy",
".",
"max",
"(",
"geometry",
"[",
"\"faultBottomEdge\"",
"]",
"[",
":",
",",
"2",
"]",
")",
"elif",
"\"intermediateEdge\"",
"in",
"subnode",
".",
"tag",
":",
"geometry",
"[",
"\"intermediateEdges\"",
"]",
".",
"append",
"(",
"numpy",
".",
"array",
"(",
"[",
"[",
"crds",
"[",
"i",
"]",
",",
"crds",
"[",
"i",
"+",
"1",
"]",
",",
"crds",
"[",
"i",
"+",
"2",
"]",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"crds",
")",
",",
"3",
")",
"]",
")",
")",
"else",
":",
"pass",
"geometry",
"[",
"\"dip\"",
"]",
"=",
"None",
"return",
"geometry"
]
| Parses a complex fault geometry node returning both the attributes and
parameters in a dictionary | [
"Parses",
"a",
"complex",
"fault",
"geometry",
"node",
"returning",
"both",
"the",
"attributes",
"and",
"parameters",
"in",
"a",
"dictionary"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L201-L231 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | parse_planar_fault_geometry | def parse_planar_fault_geometry(node):
"""
Parses a planar fault geometry node returning both the attributes and
parameters in a dictionary
"""
assert "planarSurface" in node.tag
geometry = {"strike": node.attrib["strike"],
"dip": node.attrib["dip"]}
upper_depth = numpy.inf
lower_depth = 0.0
tags = get_taglist(node)
corner_points = []
for locn in ["topLeft", "topRight", "bottomRight", "bottomLeft"]:
plane = node.nodes[tags.index(locn)]
upper_depth = plane["depth"] if plane["depth"] < upper_depth else\
upper_depth
lower_depth = plane["depth"] if plane["depth"] > lower_depth else\
lower_depth
corner_points.append([plane["lon"], plane["lat"], plane["depth"]])
geometry["upperSeismoDepth"] = upper_depth
geometry["lowerSeismoDepth"] = lower_depth
geometry["corners"] = numpy.array(corner_points)
return geometry | python | def parse_planar_fault_geometry(node):
assert "planarSurface" in node.tag
geometry = {"strike": node.attrib["strike"],
"dip": node.attrib["dip"]}
upper_depth = numpy.inf
lower_depth = 0.0
tags = get_taglist(node)
corner_points = []
for locn in ["topLeft", "topRight", "bottomRight", "bottomLeft"]:
plane = node.nodes[tags.index(locn)]
upper_depth = plane["depth"] if plane["depth"] < upper_depth else\
upper_depth
lower_depth = plane["depth"] if plane["depth"] > lower_depth else\
lower_depth
corner_points.append([plane["lon"], plane["lat"], plane["depth"]])
geometry["upperSeismoDepth"] = upper_depth
geometry["lowerSeismoDepth"] = lower_depth
geometry["corners"] = numpy.array(corner_points)
return geometry | [
"def",
"parse_planar_fault_geometry",
"(",
"node",
")",
":",
"assert",
"\"planarSurface\"",
"in",
"node",
".",
"tag",
"geometry",
"=",
"{",
"\"strike\"",
":",
"node",
".",
"attrib",
"[",
"\"strike\"",
"]",
",",
"\"dip\"",
":",
"node",
".",
"attrib",
"[",
"\"dip\"",
"]",
"}",
"upper_depth",
"=",
"numpy",
".",
"inf",
"lower_depth",
"=",
"0.0",
"tags",
"=",
"get_taglist",
"(",
"node",
")",
"corner_points",
"=",
"[",
"]",
"for",
"locn",
"in",
"[",
"\"topLeft\"",
",",
"\"topRight\"",
",",
"\"bottomRight\"",
",",
"\"bottomLeft\"",
"]",
":",
"plane",
"=",
"node",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"locn",
")",
"]",
"upper_depth",
"=",
"plane",
"[",
"\"depth\"",
"]",
"if",
"plane",
"[",
"\"depth\"",
"]",
"<",
"upper_depth",
"else",
"upper_depth",
"lower_depth",
"=",
"plane",
"[",
"\"depth\"",
"]",
"if",
"plane",
"[",
"\"depth\"",
"]",
">",
"lower_depth",
"else",
"lower_depth",
"corner_points",
".",
"append",
"(",
"[",
"plane",
"[",
"\"lon\"",
"]",
",",
"plane",
"[",
"\"lat\"",
"]",
",",
"plane",
"[",
"\"depth\"",
"]",
"]",
")",
"geometry",
"[",
"\"upperSeismoDepth\"",
"]",
"=",
"upper_depth",
"geometry",
"[",
"\"lowerSeismoDepth\"",
"]",
"=",
"lower_depth",
"geometry",
"[",
"\"corners\"",
"]",
"=",
"numpy",
".",
"array",
"(",
"corner_points",
")",
"return",
"geometry"
]
| Parses a planar fault geometry node returning both the attributes and
parameters in a dictionary | [
"Parses",
"a",
"planar",
"fault",
"geometry",
"node",
"returning",
"both",
"the",
"attributes",
"and",
"parameters",
"in",
"a",
"dictionary"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L234-L256 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | extract_mfd_params | def extract_mfd_params(src):
"""
Extracts the MFD parameters from an object
"""
tags = get_taglist(src)
if "incrementalMFD" in tags:
mfd_node = src.nodes[tags.index("incrementalMFD")]
elif "truncGutenbergRichterMFD" in tags:
mfd_node = src.nodes[tags.index("truncGutenbergRichterMFD")]
elif "arbitraryMFD" in tags:
mfd_node = src.nodes[tags.index("arbitraryMFD")]
elif "YoungsCoppersmithMFD" in tags:
mfd_node = src.nodes[tags.index("YoungsCoppersmithMFD")]
else:
raise ValueError("Source %s contains no supported MFD type!" % src.tag)
data = []
rates = []
for key, param, vtype in MFD_PARAMS:
if key in mfd_node.attrib and mfd_node.attrib[key] is not None:
data.append((param, mfd_node.attrib[key]))
else:
data.append((param, None))
if ("incrementalMFD" or "arbitraryMFD") in mfd_node.tag:
# Extract Rates
rates = ~mfd_node.occurRates
n_r = len(rates)
if n_r > MAX_RATES:
raise ValueError("Number of rates in source %s too large "
"to be placed into shapefile" % src.tag)
rate_dict = dict([(key, rates[i] if i < n_r else None)
for i, (key, _) in enumerate(RATE_PARAMS)])
elif "YoungsCoppersmithMFD" in mfd_node.tag:
rate_dict = dict([(key, mfd_node.attrib['characteristicRate'])
for i, (key, _) in enumerate(RATE_PARAMS)])
else:
rate_dict = dict([(key, None)
for i, (key, _) in enumerate(RATE_PARAMS)])
return dict(data), rate_dict | python | def extract_mfd_params(src):
tags = get_taglist(src)
if "incrementalMFD" in tags:
mfd_node = src.nodes[tags.index("incrementalMFD")]
elif "truncGutenbergRichterMFD" in tags:
mfd_node = src.nodes[tags.index("truncGutenbergRichterMFD")]
elif "arbitraryMFD" in tags:
mfd_node = src.nodes[tags.index("arbitraryMFD")]
elif "YoungsCoppersmithMFD" in tags:
mfd_node = src.nodes[tags.index("YoungsCoppersmithMFD")]
else:
raise ValueError("Source %s contains no supported MFD type!" % src.tag)
data = []
rates = []
for key, param, vtype in MFD_PARAMS:
if key in mfd_node.attrib and mfd_node.attrib[key] is not None:
data.append((param, mfd_node.attrib[key]))
else:
data.append((param, None))
if ("incrementalMFD" or "arbitraryMFD") in mfd_node.tag:
rates = ~mfd_node.occurRates
n_r = len(rates)
if n_r > MAX_RATES:
raise ValueError("Number of rates in source %s too large "
"to be placed into shapefile" % src.tag)
rate_dict = dict([(key, rates[i] if i < n_r else None)
for i, (key, _) in enumerate(RATE_PARAMS)])
elif "YoungsCoppersmithMFD" in mfd_node.tag:
rate_dict = dict([(key, mfd_node.attrib['characteristicRate'])
for i, (key, _) in enumerate(RATE_PARAMS)])
else:
rate_dict = dict([(key, None)
for i, (key, _) in enumerate(RATE_PARAMS)])
return dict(data), rate_dict | [
"def",
"extract_mfd_params",
"(",
"src",
")",
":",
"tags",
"=",
"get_taglist",
"(",
"src",
")",
"if",
"\"incrementalMFD\"",
"in",
"tags",
":",
"mfd_node",
"=",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"\"incrementalMFD\"",
")",
"]",
"elif",
"\"truncGutenbergRichterMFD\"",
"in",
"tags",
":",
"mfd_node",
"=",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"\"truncGutenbergRichterMFD\"",
")",
"]",
"elif",
"\"arbitraryMFD\"",
"in",
"tags",
":",
"mfd_node",
"=",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"\"arbitraryMFD\"",
")",
"]",
"elif",
"\"YoungsCoppersmithMFD\"",
"in",
"tags",
":",
"mfd_node",
"=",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"\"YoungsCoppersmithMFD\"",
")",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"Source %s contains no supported MFD type!\"",
"%",
"src",
".",
"tag",
")",
"data",
"=",
"[",
"]",
"rates",
"=",
"[",
"]",
"for",
"key",
",",
"param",
",",
"vtype",
"in",
"MFD_PARAMS",
":",
"if",
"key",
"in",
"mfd_node",
".",
"attrib",
"and",
"mfd_node",
".",
"attrib",
"[",
"key",
"]",
"is",
"not",
"None",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"mfd_node",
".",
"attrib",
"[",
"key",
"]",
")",
")",
"else",
":",
"data",
".",
"append",
"(",
"(",
"param",
",",
"None",
")",
")",
"if",
"(",
"\"incrementalMFD\"",
"or",
"\"arbitraryMFD\"",
")",
"in",
"mfd_node",
".",
"tag",
":",
"# Extract Rates",
"rates",
"=",
"~",
"mfd_node",
".",
"occurRates",
"n_r",
"=",
"len",
"(",
"rates",
")",
"if",
"n_r",
">",
"MAX_RATES",
":",
"raise",
"ValueError",
"(",
"\"Number of rates in source %s too large \"",
"\"to be placed into shapefile\"",
"%",
"src",
".",
"tag",
")",
"rate_dict",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"rates",
"[",
"i",
"]",
"if",
"i",
"<",
"n_r",
"else",
"None",
")",
"for",
"i",
",",
"(",
"key",
",",
"_",
")",
"in",
"enumerate",
"(",
"RATE_PARAMS",
")",
"]",
")",
"elif",
"\"YoungsCoppersmithMFD\"",
"in",
"mfd_node",
".",
"tag",
":",
"rate_dict",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"mfd_node",
".",
"attrib",
"[",
"'characteristicRate'",
"]",
")",
"for",
"i",
",",
"(",
"key",
",",
"_",
")",
"in",
"enumerate",
"(",
"RATE_PARAMS",
")",
"]",
")",
"else",
":",
"rate_dict",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"i",
",",
"(",
"key",
",",
"_",
")",
"in",
"enumerate",
"(",
"RATE_PARAMS",
")",
"]",
")",
"return",
"dict",
"(",
"data",
")",
",",
"rate_dict"
]
| Extracts the MFD parameters from an object | [
"Extracts",
"the",
"MFD",
"parameters",
"from",
"an",
"object"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L322-L359 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | extract_source_hypocentral_depths | def extract_source_hypocentral_depths(src):
"""
Extract source hypocentral depths.
"""
if "pointSource" not in src.tag and "areaSource" not in src.tag:
hds = dict([(key, None) for key, _ in HDEPTH_PARAMS])
hdsw = dict([(key, None) for key, _ in HDW_PARAMS])
return hds, hdsw
tags = get_taglist(src)
hdd_nodeset = src.nodes[tags.index("hypoDepthDist")]
if len(hdd_nodeset) > MAX_HYPO_DEPTHS:
raise ValueError("Number of hypocentral depths %s exceeds stated "
"maximum of %s" % (str(len(hdd_nodeset)),
str(MAX_HYPO_DEPTHS)))
if len(hdd_nodeset):
hds = []
hdws = []
for hdd_node in hdd_nodeset:
hds.append(float(hdd_node.attrib["depth"]))
hdws.append(float(hdd_node.attrib["probability"]))
hds = expand_src_param(hds, HDEPTH_PARAMS)
hdsw = expand_src_param(hdws, HDW_PARAMS)
else:
hds = dict([(key, None) for key, _ in HDEPTH_PARAMS])
hdsw = dict([(key, None) for key, _ in HDW_PARAMS])
return hds, hdsw | python | def extract_source_hypocentral_depths(src):
if "pointSource" not in src.tag and "areaSource" not in src.tag:
hds = dict([(key, None) for key, _ in HDEPTH_PARAMS])
hdsw = dict([(key, None) for key, _ in HDW_PARAMS])
return hds, hdsw
tags = get_taglist(src)
hdd_nodeset = src.nodes[tags.index("hypoDepthDist")]
if len(hdd_nodeset) > MAX_HYPO_DEPTHS:
raise ValueError("Number of hypocentral depths %s exceeds stated "
"maximum of %s" % (str(len(hdd_nodeset)),
str(MAX_HYPO_DEPTHS)))
if len(hdd_nodeset):
hds = []
hdws = []
for hdd_node in hdd_nodeset:
hds.append(float(hdd_node.attrib["depth"]))
hdws.append(float(hdd_node.attrib["probability"]))
hds = expand_src_param(hds, HDEPTH_PARAMS)
hdsw = expand_src_param(hdws, HDW_PARAMS)
else:
hds = dict([(key, None) for key, _ in HDEPTH_PARAMS])
hdsw = dict([(key, None) for key, _ in HDW_PARAMS])
return hds, hdsw | [
"def",
"extract_source_hypocentral_depths",
"(",
"src",
")",
":",
"if",
"\"pointSource\"",
"not",
"in",
"src",
".",
"tag",
"and",
"\"areaSource\"",
"not",
"in",
"src",
".",
"tag",
":",
"hds",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"HDEPTH_PARAMS",
"]",
")",
"hdsw",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"HDW_PARAMS",
"]",
")",
"return",
"hds",
",",
"hdsw",
"tags",
"=",
"get_taglist",
"(",
"src",
")",
"hdd_nodeset",
"=",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"\"hypoDepthDist\"",
")",
"]",
"if",
"len",
"(",
"hdd_nodeset",
")",
">",
"MAX_HYPO_DEPTHS",
":",
"raise",
"ValueError",
"(",
"\"Number of hypocentral depths %s exceeds stated \"",
"\"maximum of %s\"",
"%",
"(",
"str",
"(",
"len",
"(",
"hdd_nodeset",
")",
")",
",",
"str",
"(",
"MAX_HYPO_DEPTHS",
")",
")",
")",
"if",
"len",
"(",
"hdd_nodeset",
")",
":",
"hds",
"=",
"[",
"]",
"hdws",
"=",
"[",
"]",
"for",
"hdd_node",
"in",
"hdd_nodeset",
":",
"hds",
".",
"append",
"(",
"float",
"(",
"hdd_node",
".",
"attrib",
"[",
"\"depth\"",
"]",
")",
")",
"hdws",
".",
"append",
"(",
"float",
"(",
"hdd_node",
".",
"attrib",
"[",
"\"probability\"",
"]",
")",
")",
"hds",
"=",
"expand_src_param",
"(",
"hds",
",",
"HDEPTH_PARAMS",
")",
"hdsw",
"=",
"expand_src_param",
"(",
"hdws",
",",
"HDW_PARAMS",
")",
"else",
":",
"hds",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"HDEPTH_PARAMS",
"]",
")",
"hdsw",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"HDW_PARAMS",
"]",
")",
"return",
"hds",
",",
"hdsw"
]
| Extract source hypocentral depths. | [
"Extract",
"source",
"hypocentral",
"depths",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L397-L425 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | extract_source_planes_strikes_dips | def extract_source_planes_strikes_dips(src):
"""
Extract strike and dip angles for source defined by multiple planes.
"""
if "characteristicFaultSource" not in src.tag:
strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM])
dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM])
return strikes, dips
tags = get_taglist(src)
surface_set = src.nodes[tags.index("surface")]
strikes = []
dips = []
num_planes = 0
for surface in surface_set:
if "planarSurface" in surface.tag:
strikes.append(float(surface.attrib["strike"]))
dips.append(float(surface.attrib["dip"]))
num_planes += 1
if num_planes > MAX_PLANES:
raise ValueError("Number of planes in sourcs %s exceededs maximum "
"of %s" % (str(num_planes), str(MAX_PLANES)))
if num_planes:
strikes = expand_src_param(strikes, PLANES_STRIKES_PARAM)
dips = expand_src_param(dips, PLANES_DIPS_PARAM)
else:
strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM])
dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM])
return strikes, dips | python | def extract_source_planes_strikes_dips(src):
if "characteristicFaultSource" not in src.tag:
strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM])
dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM])
return strikes, dips
tags = get_taglist(src)
surface_set = src.nodes[tags.index("surface")]
strikes = []
dips = []
num_planes = 0
for surface in surface_set:
if "planarSurface" in surface.tag:
strikes.append(float(surface.attrib["strike"]))
dips.append(float(surface.attrib["dip"]))
num_planes += 1
if num_planes > MAX_PLANES:
raise ValueError("Number of planes in sourcs %s exceededs maximum "
"of %s" % (str(num_planes), str(MAX_PLANES)))
if num_planes:
strikes = expand_src_param(strikes, PLANES_STRIKES_PARAM)
dips = expand_src_param(dips, PLANES_DIPS_PARAM)
else:
strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM])
dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM])
return strikes, dips | [
"def",
"extract_source_planes_strikes_dips",
"(",
"src",
")",
":",
"if",
"\"characteristicFaultSource\"",
"not",
"in",
"src",
".",
"tag",
":",
"strikes",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"PLANES_STRIKES_PARAM",
"]",
")",
"dips",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"PLANES_DIPS_PARAM",
"]",
")",
"return",
"strikes",
",",
"dips",
"tags",
"=",
"get_taglist",
"(",
"src",
")",
"surface_set",
"=",
"src",
".",
"nodes",
"[",
"tags",
".",
"index",
"(",
"\"surface\"",
")",
"]",
"strikes",
"=",
"[",
"]",
"dips",
"=",
"[",
"]",
"num_planes",
"=",
"0",
"for",
"surface",
"in",
"surface_set",
":",
"if",
"\"planarSurface\"",
"in",
"surface",
".",
"tag",
":",
"strikes",
".",
"append",
"(",
"float",
"(",
"surface",
".",
"attrib",
"[",
"\"strike\"",
"]",
")",
")",
"dips",
".",
"append",
"(",
"float",
"(",
"surface",
".",
"attrib",
"[",
"\"dip\"",
"]",
")",
")",
"num_planes",
"+=",
"1",
"if",
"num_planes",
">",
"MAX_PLANES",
":",
"raise",
"ValueError",
"(",
"\"Number of planes in sourcs %s exceededs maximum \"",
"\"of %s\"",
"%",
"(",
"str",
"(",
"num_planes",
")",
",",
"str",
"(",
"MAX_PLANES",
")",
")",
")",
"if",
"num_planes",
":",
"strikes",
"=",
"expand_src_param",
"(",
"strikes",
",",
"PLANES_STRIKES_PARAM",
")",
"dips",
"=",
"expand_src_param",
"(",
"dips",
",",
"PLANES_DIPS_PARAM",
")",
"else",
":",
"strikes",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"PLANES_STRIKES_PARAM",
"]",
")",
"dips",
"=",
"dict",
"(",
"[",
"(",
"key",
",",
"None",
")",
"for",
"key",
",",
"_",
"in",
"PLANES_DIPS_PARAM",
"]",
")",
"return",
"strikes",
",",
"dips"
]
| Extract strike and dip angles for source defined by multiple planes. | [
"Extract",
"strike",
"and",
"dip",
"angles",
"for",
"source",
"defined",
"by",
"multiple",
"planes",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L428-L456 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | set_params | def set_params(w, src):
"""
Set source parameters.
"""
params = extract_source_params(src)
# this is done because for characteristic sources geometry is in
# 'surface' attribute
params.update(extract_geometry_params(src))
mfd_pars, rate_pars = extract_mfd_params(src)
params.update(mfd_pars)
params.update(rate_pars)
strikes, dips, rakes, np_weights = extract_source_nodal_planes(src)
params.update(strikes)
params.update(dips)
params.update(rakes)
params.update(np_weights)
hds, hdsw = extract_source_hypocentral_depths(src)
params.update(hds)
params.update(hdsw)
pstrikes, pdips = extract_source_planes_strikes_dips(src)
params.update(pstrikes)
params.update(pdips)
params['sourcetype'] = striptag(src.tag)
w.record(**params) | python | def set_params(w, src):
params = extract_source_params(src)
params.update(extract_geometry_params(src))
mfd_pars, rate_pars = extract_mfd_params(src)
params.update(mfd_pars)
params.update(rate_pars)
strikes, dips, rakes, np_weights = extract_source_nodal_planes(src)
params.update(strikes)
params.update(dips)
params.update(rakes)
params.update(np_weights)
hds, hdsw = extract_source_hypocentral_depths(src)
params.update(hds)
params.update(hdsw)
pstrikes, pdips = extract_source_planes_strikes_dips(src)
params.update(pstrikes)
params.update(pdips)
params['sourcetype'] = striptag(src.tag)
w.record(**params) | [
"def",
"set_params",
"(",
"w",
",",
"src",
")",
":",
"params",
"=",
"extract_source_params",
"(",
"src",
")",
"# this is done because for characteristic sources geometry is in",
"# 'surface' attribute",
"params",
".",
"update",
"(",
"extract_geometry_params",
"(",
"src",
")",
")",
"mfd_pars",
",",
"rate_pars",
"=",
"extract_mfd_params",
"(",
"src",
")",
"params",
".",
"update",
"(",
"mfd_pars",
")",
"params",
".",
"update",
"(",
"rate_pars",
")",
"strikes",
",",
"dips",
",",
"rakes",
",",
"np_weights",
"=",
"extract_source_nodal_planes",
"(",
"src",
")",
"params",
".",
"update",
"(",
"strikes",
")",
"params",
".",
"update",
"(",
"dips",
")",
"params",
".",
"update",
"(",
"rakes",
")",
"params",
".",
"update",
"(",
"np_weights",
")",
"hds",
",",
"hdsw",
"=",
"extract_source_hypocentral_depths",
"(",
"src",
")",
"params",
".",
"update",
"(",
"hds",
")",
"params",
".",
"update",
"(",
"hdsw",
")",
"pstrikes",
",",
"pdips",
"=",
"extract_source_planes_strikes_dips",
"(",
"src",
")",
"params",
".",
"update",
"(",
"pstrikes",
")",
"params",
".",
"update",
"(",
"pdips",
")",
"params",
"[",
"'sourcetype'",
"]",
"=",
"striptag",
"(",
"src",
".",
"tag",
")",
"w",
".",
"record",
"(",
"*",
"*",
"params",
")"
]
| Set source parameters. | [
"Set",
"source",
"parameters",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L459-L486 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | set_area_geometry | def set_area_geometry(w, src):
"""
Set area polygon as shapefile geometry
"""
assert "areaSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("areaGeometry")]
area_attrs = parse_area_geometry(geometry_node)
w.poly(parts=[area_attrs["polygon"].tolist()]) | python | def set_area_geometry(w, src):
assert "areaSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("areaGeometry")]
area_attrs = parse_area_geometry(geometry_node)
w.poly(parts=[area_attrs["polygon"].tolist()]) | [
"def",
"set_area_geometry",
"(",
"w",
",",
"src",
")",
":",
"assert",
"\"areaSource\"",
"in",
"src",
".",
"tag",
"geometry_node",
"=",
"src",
".",
"nodes",
"[",
"get_taglist",
"(",
"src",
")",
".",
"index",
"(",
"\"areaGeometry\"",
")",
"]",
"area_attrs",
"=",
"parse_area_geometry",
"(",
"geometry_node",
")",
"w",
".",
"poly",
"(",
"parts",
"=",
"[",
"area_attrs",
"[",
"\"polygon\"",
"]",
".",
"tolist",
"(",
")",
"]",
")"
]
| Set area polygon as shapefile geometry | [
"Set",
"area",
"polygon",
"as",
"shapefile",
"geometry"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L489-L496 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | set_point_geometry | def set_point_geometry(w, src):
"""
Set point location as shapefile geometry.
"""
assert "pointSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("pointGeometry")]
point_attrs = parse_point_geometry(geometry_node)
w.point(point_attrs["point"][0], point_attrs["point"][1]) | python | def set_point_geometry(w, src):
assert "pointSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("pointGeometry")]
point_attrs = parse_point_geometry(geometry_node)
w.point(point_attrs["point"][0], point_attrs["point"][1]) | [
"def",
"set_point_geometry",
"(",
"w",
",",
"src",
")",
":",
"assert",
"\"pointSource\"",
"in",
"src",
".",
"tag",
"geometry_node",
"=",
"src",
".",
"nodes",
"[",
"get_taglist",
"(",
"src",
")",
".",
"index",
"(",
"\"pointGeometry\"",
")",
"]",
"point_attrs",
"=",
"parse_point_geometry",
"(",
"geometry_node",
")",
"w",
".",
"point",
"(",
"point_attrs",
"[",
"\"point\"",
"]",
"[",
"0",
"]",
",",
"point_attrs",
"[",
"\"point\"",
"]",
"[",
"1",
"]",
")"
]
| Set point location as shapefile geometry. | [
"Set",
"point",
"location",
"as",
"shapefile",
"geometry",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L499-L506 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | set_simple_fault_geometry | def set_simple_fault_geometry(w, src):
"""
Set simple fault trace coordinates as shapefile geometry.
:parameter w:
Writer
:parameter src:
source
"""
assert "simpleFaultSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")]
fault_attrs = parse_simple_fault_geometry(geometry_node)
w.line(parts=[fault_attrs["trace"].tolist()]) | python | def set_simple_fault_geometry(w, src):
assert "simpleFaultSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")]
fault_attrs = parse_simple_fault_geometry(geometry_node)
w.line(parts=[fault_attrs["trace"].tolist()]) | [
"def",
"set_simple_fault_geometry",
"(",
"w",
",",
"src",
")",
":",
"assert",
"\"simpleFaultSource\"",
"in",
"src",
".",
"tag",
"geometry_node",
"=",
"src",
".",
"nodes",
"[",
"get_taglist",
"(",
"src",
")",
".",
"index",
"(",
"\"simpleFaultGeometry\"",
")",
"]",
"fault_attrs",
"=",
"parse_simple_fault_geometry",
"(",
"geometry_node",
")",
"w",
".",
"line",
"(",
"parts",
"=",
"[",
"fault_attrs",
"[",
"\"trace\"",
"]",
".",
"tolist",
"(",
")",
"]",
")"
]
| Set simple fault trace coordinates as shapefile geometry.
:parameter w:
Writer
:parameter src:
source | [
"Set",
"simple",
"fault",
"trace",
"coordinates",
"as",
"shapefile",
"geometry",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L509-L521 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | set_simple_fault_geometry_3D | def set_simple_fault_geometry_3D(w, src):
"""
Builds a 3D polygon from a node instance
"""
assert "simpleFaultSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")]
fault_attrs = parse_simple_fault_geometry(geometry_node)
build_polygon_from_fault_attrs(w, fault_attrs) | python | def set_simple_fault_geometry_3D(w, src):
assert "simpleFaultSource" in src.tag
geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")]
fault_attrs = parse_simple_fault_geometry(geometry_node)
build_polygon_from_fault_attrs(w, fault_attrs) | [
"def",
"set_simple_fault_geometry_3D",
"(",
"w",
",",
"src",
")",
":",
"assert",
"\"simpleFaultSource\"",
"in",
"src",
".",
"tag",
"geometry_node",
"=",
"src",
".",
"nodes",
"[",
"get_taglist",
"(",
"src",
")",
".",
"index",
"(",
"\"simpleFaultGeometry\"",
")",
"]",
"fault_attrs",
"=",
"parse_simple_fault_geometry",
"(",
"geometry_node",
")",
"build_polygon_from_fault_attrs",
"(",
"w",
",",
"fault_attrs",
")"
]
| Builds a 3D polygon from a node instance | [
"Builds",
"a",
"3D",
"polygon",
"from",
"a",
"node",
"instance"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L543-L550 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | SourceModel.appraise_source_model | def appraise_source_model(self):
"""
Identify parameters defined in NRML source model file, so that
shapefile contains only source model specific fields.
"""
for src in self.sources:
# source params
src_taglist = get_taglist(src)
if "areaSource" in src.tag:
self.has_area_source = True
npd_node = src.nodes[src_taglist.index("nodalPlaneDist")]
npd_size = len(npd_node)
hdd_node = src.nodes[src_taglist.index("hypoDepthDist")]
hdd_size = len(hdd_node)
self.num_np = (npd_size if npd_size > self.num_np
else self.num_np)
self.num_hd = (hdd_size if hdd_size > self.num_hd
else self.num_hd)
elif "pointSource" in src.tag:
self.has_point_source = True
npd_node = src.nodes[src_taglist.index("nodalPlaneDist")]
npd_size = len(npd_node)
hdd_node = src.nodes[src_taglist.index("hypoDepthDist")]
hdd_size = len(hdd_node)
self.num_np = (npd_size if npd_size > self.num_np
else self.num_np)
self.num_hd = (hdd_size if hdd_size > self.num_hd
else self.num_hd)
elif "simpleFaultSource" in src.tag:
self.has_simple_fault_geometry = True
elif "complexFaultSource" in src.tag:
self.has_complex_fault_geometry = True
elif "characteristicFaultSource" in src.tag:
# Get the surface node
surface_node = src.nodes[src_taglist.index("surface")]
p_size = 0
for surface in surface_node.nodes:
if "simpleFaultGeometry" in surface.tag:
self.has_simple_fault_geometry = True
elif "complexFaultGeometry" in surface.tag:
self.has_complex_fault_geometry = True
elif "planarSurface" in surface.tag:
self.has_planar_geometry = True
p_size += 1
self.num_p = p_size if p_size > self.num_p else self.num_p
else:
pass
# MFD params
if "truncGutenbergRichterMFD" in src_taglist:
self.has_mfd_gr = True
elif "incrementalMFD" in src_taglist:
self.has_mfd_incremental = True
# Get rate size
mfd_node = src.nodes[src_taglist.index("incrementalMFD")]
r_size = len(mfd_node.nodes[0].text)
self.num_r = r_size if r_size > self.num_r else self.num_r
else:
pass | python | def appraise_source_model(self):
for src in self.sources:
src_taglist = get_taglist(src)
if "areaSource" in src.tag:
self.has_area_source = True
npd_node = src.nodes[src_taglist.index("nodalPlaneDist")]
npd_size = len(npd_node)
hdd_node = src.nodes[src_taglist.index("hypoDepthDist")]
hdd_size = len(hdd_node)
self.num_np = (npd_size if npd_size > self.num_np
else self.num_np)
self.num_hd = (hdd_size if hdd_size > self.num_hd
else self.num_hd)
elif "pointSource" in src.tag:
self.has_point_source = True
npd_node = src.nodes[src_taglist.index("nodalPlaneDist")]
npd_size = len(npd_node)
hdd_node = src.nodes[src_taglist.index("hypoDepthDist")]
hdd_size = len(hdd_node)
self.num_np = (npd_size if npd_size > self.num_np
else self.num_np)
self.num_hd = (hdd_size if hdd_size > self.num_hd
else self.num_hd)
elif "simpleFaultSource" in src.tag:
self.has_simple_fault_geometry = True
elif "complexFaultSource" in src.tag:
self.has_complex_fault_geometry = True
elif "characteristicFaultSource" in src.tag:
surface_node = src.nodes[src_taglist.index("surface")]
p_size = 0
for surface in surface_node.nodes:
if "simpleFaultGeometry" in surface.tag:
self.has_simple_fault_geometry = True
elif "complexFaultGeometry" in surface.tag:
self.has_complex_fault_geometry = True
elif "planarSurface" in surface.tag:
self.has_planar_geometry = True
p_size += 1
self.num_p = p_size if p_size > self.num_p else self.num_p
else:
pass
if "truncGutenbergRichterMFD" in src_taglist:
self.has_mfd_gr = True
elif "incrementalMFD" in src_taglist:
self.has_mfd_incremental = True
mfd_node = src.nodes[src_taglist.index("incrementalMFD")]
r_size = len(mfd_node.nodes[0].text)
self.num_r = r_size if r_size > self.num_r else self.num_r
else:
pass | [
"def",
"appraise_source_model",
"(",
"self",
")",
":",
"for",
"src",
"in",
"self",
".",
"sources",
":",
"# source params",
"src_taglist",
"=",
"get_taglist",
"(",
"src",
")",
"if",
"\"areaSource\"",
"in",
"src",
".",
"tag",
":",
"self",
".",
"has_area_source",
"=",
"True",
"npd_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"nodalPlaneDist\"",
")",
"]",
"npd_size",
"=",
"len",
"(",
"npd_node",
")",
"hdd_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"hypoDepthDist\"",
")",
"]",
"hdd_size",
"=",
"len",
"(",
"hdd_node",
")",
"self",
".",
"num_np",
"=",
"(",
"npd_size",
"if",
"npd_size",
">",
"self",
".",
"num_np",
"else",
"self",
".",
"num_np",
")",
"self",
".",
"num_hd",
"=",
"(",
"hdd_size",
"if",
"hdd_size",
">",
"self",
".",
"num_hd",
"else",
"self",
".",
"num_hd",
")",
"elif",
"\"pointSource\"",
"in",
"src",
".",
"tag",
":",
"self",
".",
"has_point_source",
"=",
"True",
"npd_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"nodalPlaneDist\"",
")",
"]",
"npd_size",
"=",
"len",
"(",
"npd_node",
")",
"hdd_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"hypoDepthDist\"",
")",
"]",
"hdd_size",
"=",
"len",
"(",
"hdd_node",
")",
"self",
".",
"num_np",
"=",
"(",
"npd_size",
"if",
"npd_size",
">",
"self",
".",
"num_np",
"else",
"self",
".",
"num_np",
")",
"self",
".",
"num_hd",
"=",
"(",
"hdd_size",
"if",
"hdd_size",
">",
"self",
".",
"num_hd",
"else",
"self",
".",
"num_hd",
")",
"elif",
"\"simpleFaultSource\"",
"in",
"src",
".",
"tag",
":",
"self",
".",
"has_simple_fault_geometry",
"=",
"True",
"elif",
"\"complexFaultSource\"",
"in",
"src",
".",
"tag",
":",
"self",
".",
"has_complex_fault_geometry",
"=",
"True",
"elif",
"\"characteristicFaultSource\"",
"in",
"src",
".",
"tag",
":",
"# Get the surface node",
"surface_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"surface\"",
")",
"]",
"p_size",
"=",
"0",
"for",
"surface",
"in",
"surface_node",
".",
"nodes",
":",
"if",
"\"simpleFaultGeometry\"",
"in",
"surface",
".",
"tag",
":",
"self",
".",
"has_simple_fault_geometry",
"=",
"True",
"elif",
"\"complexFaultGeometry\"",
"in",
"surface",
".",
"tag",
":",
"self",
".",
"has_complex_fault_geometry",
"=",
"True",
"elif",
"\"planarSurface\"",
"in",
"surface",
".",
"tag",
":",
"self",
".",
"has_planar_geometry",
"=",
"True",
"p_size",
"+=",
"1",
"self",
".",
"num_p",
"=",
"p_size",
"if",
"p_size",
">",
"self",
".",
"num_p",
"else",
"self",
".",
"num_p",
"else",
":",
"pass",
"# MFD params",
"if",
"\"truncGutenbergRichterMFD\"",
"in",
"src_taglist",
":",
"self",
".",
"has_mfd_gr",
"=",
"True",
"elif",
"\"incrementalMFD\"",
"in",
"src_taglist",
":",
"self",
".",
"has_mfd_incremental",
"=",
"True",
"# Get rate size",
"mfd_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"incrementalMFD\"",
")",
"]",
"r_size",
"=",
"len",
"(",
"mfd_node",
".",
"nodes",
"[",
"0",
"]",
".",
"text",
")",
"self",
".",
"num_r",
"=",
"r_size",
"if",
"r_size",
">",
"self",
".",
"num_r",
"else",
"self",
".",
"num_r",
"else",
":",
"pass"
]
| Identify parameters defined in NRML source model file, so that
shapefile contains only source model specific fields. | [
"Identify",
"parameters",
"defined",
"in",
"NRML",
"source",
"model",
"file",
"so",
"that",
"shapefile",
"contains",
"only",
"source",
"model",
"specific",
"fields",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L826-L884 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | SourceModelParser.read | def read(self, nrml_file, validate=False,
simple_fault_spacing=1.0, complex_mesh_spacing=5.0,
mfd_spacing=0.1):
"""
Build the source model from nrml format
"""
self.source_file = nrml_file
if validate:
converter = SourceConverter(1.0, simple_fault_spacing,
complex_mesh_spacing,
mfd_spacing,
10.0)
converter.fname = nrml_file
root = nrml.read(nrml_file)
if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
sg_nodes = [root.sourceModel.nodes]
else: # NRML 0.5
sg_nodes = root.sourceModel.nodes
sources = []
for sg_node in sg_nodes:
for no, src_node in enumerate(sg_node, 1):
if validate:
print("Validating Source %s" % src_node.attrib["id"])
converter.convert_node(src_node)
sources.append(src_node)
return SourceModel(sources) | python | def read(self, nrml_file, validate=False,
simple_fault_spacing=1.0, complex_mesh_spacing=5.0,
mfd_spacing=0.1):
self.source_file = nrml_file
if validate:
converter = SourceConverter(1.0, simple_fault_spacing,
complex_mesh_spacing,
mfd_spacing,
10.0)
converter.fname = nrml_file
root = nrml.read(nrml_file)
if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
sg_nodes = [root.sourceModel.nodes]
else:
sg_nodes = root.sourceModel.nodes
sources = []
for sg_node in sg_nodes:
for no, src_node in enumerate(sg_node, 1):
if validate:
print("Validating Source %s" % src_node.attrib["id"])
converter.convert_node(src_node)
sources.append(src_node)
return SourceModel(sources) | [
"def",
"read",
"(",
"self",
",",
"nrml_file",
",",
"validate",
"=",
"False",
",",
"simple_fault_spacing",
"=",
"1.0",
",",
"complex_mesh_spacing",
"=",
"5.0",
",",
"mfd_spacing",
"=",
"0.1",
")",
":",
"self",
".",
"source_file",
"=",
"nrml_file",
"if",
"validate",
":",
"converter",
"=",
"SourceConverter",
"(",
"1.0",
",",
"simple_fault_spacing",
",",
"complex_mesh_spacing",
",",
"mfd_spacing",
",",
"10.0",
")",
"converter",
".",
"fname",
"=",
"nrml_file",
"root",
"=",
"nrml",
".",
"read",
"(",
"nrml_file",
")",
"if",
"root",
"[",
"'xmlns'",
"]",
"==",
"'http://openquake.org/xmlns/nrml/0.4'",
":",
"sg_nodes",
"=",
"[",
"root",
".",
"sourceModel",
".",
"nodes",
"]",
"else",
":",
"# NRML 0.5",
"sg_nodes",
"=",
"root",
".",
"sourceModel",
".",
"nodes",
"sources",
"=",
"[",
"]",
"for",
"sg_node",
"in",
"sg_nodes",
":",
"for",
"no",
",",
"src_node",
"in",
"enumerate",
"(",
"sg_node",
",",
"1",
")",
":",
"if",
"validate",
":",
"print",
"(",
"\"Validating Source %s\"",
"%",
"src_node",
".",
"attrib",
"[",
"\"id\"",
"]",
")",
"converter",
".",
"convert_node",
"(",
"src_node",
")",
"sources",
".",
"append",
"(",
"src_node",
")",
"return",
"SourceModel",
"(",
"sources",
")"
]
| Build the source model from nrml format | [
"Build",
"the",
"source",
"model",
"from",
"nrml",
"format"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L910-L935 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | SourceModelParser.write | def write(self, destination, source_model, name=None):
"""
Exports to NRML
"""
if os.path.exists(destination):
os.remove(destination)
self.destination = destination
if name:
source_model.name = name
output_source_model = Node("sourceModel", {"name": name})
dic = groupby(source_model.sources,
operator.itemgetter('tectonicRegion'))
for i, (trt, srcs) in enumerate(dic.items(), 1):
output_source_model.append(
Node('sourceGroup',
{'tectonicRegion': trt, 'name': 'group %d' % i},
nodes=srcs))
print("Exporting Source Model to %s" % self.destination)
with open(self.destination, "wb") as f:
nrml.write([output_source_model], f, "%s") | python | def write(self, destination, source_model, name=None):
if os.path.exists(destination):
os.remove(destination)
self.destination = destination
if name:
source_model.name = name
output_source_model = Node("sourceModel", {"name": name})
dic = groupby(source_model.sources,
operator.itemgetter('tectonicRegion'))
for i, (trt, srcs) in enumerate(dic.items(), 1):
output_source_model.append(
Node('sourceGroup',
{'tectonicRegion': trt, 'name': 'group %d' % i},
nodes=srcs))
print("Exporting Source Model to %s" % self.destination)
with open(self.destination, "wb") as f:
nrml.write([output_source_model], f, "%s") | [
"def",
"write",
"(",
"self",
",",
"destination",
",",
"source_model",
",",
"name",
"=",
"None",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"destination",
")",
":",
"os",
".",
"remove",
"(",
"destination",
")",
"self",
".",
"destination",
"=",
"destination",
"if",
"name",
":",
"source_model",
".",
"name",
"=",
"name",
"output_source_model",
"=",
"Node",
"(",
"\"sourceModel\"",
",",
"{",
"\"name\"",
":",
"name",
"}",
")",
"dic",
"=",
"groupby",
"(",
"source_model",
".",
"sources",
",",
"operator",
".",
"itemgetter",
"(",
"'tectonicRegion'",
")",
")",
"for",
"i",
",",
"(",
"trt",
",",
"srcs",
")",
"in",
"enumerate",
"(",
"dic",
".",
"items",
"(",
")",
",",
"1",
")",
":",
"output_source_model",
".",
"append",
"(",
"Node",
"(",
"'sourceGroup'",
",",
"{",
"'tectonicRegion'",
":",
"trt",
",",
"'name'",
":",
"'group %d'",
"%",
"i",
"}",
",",
"nodes",
"=",
"srcs",
")",
")",
"print",
"(",
"\"Exporting Source Model to %s\"",
"%",
"self",
".",
"destination",
")",
"with",
"open",
"(",
"self",
".",
"destination",
",",
"\"wb\"",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"output_source_model",
"]",
",",
"f",
",",
"\"%s\"",
")"
]
| Exports to NRML | [
"Exports",
"to",
"NRML"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L937-L956 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | ShapefileParser.filter_params | def filter_params(self, src_mod):
"""
Remove params uneeded by source_model
"""
# point and area related params
STRIKE_PARAMS[src_mod.num_np:] = []
DIP_PARAMS[src_mod.num_np:] = []
RAKE_PARAMS[src_mod.num_np:] = []
NPW_PARAMS[src_mod.num_np:] = []
HDEPTH_PARAMS[src_mod.num_hd:] = []
HDW_PARAMS[src_mod.num_hd:] = []
# planar rupture related params
PLANES_STRIKES_PARAM[src_mod.num_p:] = []
PLANES_DIPS_PARAM[src_mod.num_p:] = []
# rate params
RATE_PARAMS[src_mod.num_r:] = []
if src_mod.has_simple_fault_geometry is False:
GEOMETRY_PARAMS.remove(('dip', 'dip', 'f'))
if (src_mod.has_simple_fault_geometry is False and
src_mod.has_complex_fault_geometry is False and
src_mod.has_planar_geometry is False):
BASE_PARAMS.remove(('rake', 'rake', 'f'))
if (src_mod.has_simple_fault_geometry is False and
src_mod.has_complex_fault_geometry is False and
src_mod.has_area_source is False and
src_mod.has_point_source is False):
GEOMETRY_PARAMS[:] = []
if src_mod.has_mfd_incremental is False:
MFD_PARAMS.remove(('binWidth', 'bin_width', 'f')) | python | def filter_params(self, src_mod):
STRIKE_PARAMS[src_mod.num_np:] = []
DIP_PARAMS[src_mod.num_np:] = []
RAKE_PARAMS[src_mod.num_np:] = []
NPW_PARAMS[src_mod.num_np:] = []
HDEPTH_PARAMS[src_mod.num_hd:] = []
HDW_PARAMS[src_mod.num_hd:] = []
PLANES_STRIKES_PARAM[src_mod.num_p:] = []
PLANES_DIPS_PARAM[src_mod.num_p:] = []
RATE_PARAMS[src_mod.num_r:] = []
if src_mod.has_simple_fault_geometry is False:
GEOMETRY_PARAMS.remove(('dip', 'dip', 'f'))
if (src_mod.has_simple_fault_geometry is False and
src_mod.has_complex_fault_geometry is False and
src_mod.has_planar_geometry is False):
BASE_PARAMS.remove(('rake', 'rake', 'f'))
if (src_mod.has_simple_fault_geometry is False and
src_mod.has_complex_fault_geometry is False and
src_mod.has_area_source is False and
src_mod.has_point_source is False):
GEOMETRY_PARAMS[:] = []
if src_mod.has_mfd_incremental is False:
MFD_PARAMS.remove(('binWidth', 'bin_width', 'f')) | [
"def",
"filter_params",
"(",
"self",
",",
"src_mod",
")",
":",
"# point and area related params",
"STRIKE_PARAMS",
"[",
"src_mod",
".",
"num_np",
":",
"]",
"=",
"[",
"]",
"DIP_PARAMS",
"[",
"src_mod",
".",
"num_np",
":",
"]",
"=",
"[",
"]",
"RAKE_PARAMS",
"[",
"src_mod",
".",
"num_np",
":",
"]",
"=",
"[",
"]",
"NPW_PARAMS",
"[",
"src_mod",
".",
"num_np",
":",
"]",
"=",
"[",
"]",
"HDEPTH_PARAMS",
"[",
"src_mod",
".",
"num_hd",
":",
"]",
"=",
"[",
"]",
"HDW_PARAMS",
"[",
"src_mod",
".",
"num_hd",
":",
"]",
"=",
"[",
"]",
"# planar rupture related params",
"PLANES_STRIKES_PARAM",
"[",
"src_mod",
".",
"num_p",
":",
"]",
"=",
"[",
"]",
"PLANES_DIPS_PARAM",
"[",
"src_mod",
".",
"num_p",
":",
"]",
"=",
"[",
"]",
"# rate params",
"RATE_PARAMS",
"[",
"src_mod",
".",
"num_r",
":",
"]",
"=",
"[",
"]",
"if",
"src_mod",
".",
"has_simple_fault_geometry",
"is",
"False",
":",
"GEOMETRY_PARAMS",
".",
"remove",
"(",
"(",
"'dip'",
",",
"'dip'",
",",
"'f'",
")",
")",
"if",
"(",
"src_mod",
".",
"has_simple_fault_geometry",
"is",
"False",
"and",
"src_mod",
".",
"has_complex_fault_geometry",
"is",
"False",
"and",
"src_mod",
".",
"has_planar_geometry",
"is",
"False",
")",
":",
"BASE_PARAMS",
".",
"remove",
"(",
"(",
"'rake'",
",",
"'rake'",
",",
"'f'",
")",
")",
"if",
"(",
"src_mod",
".",
"has_simple_fault_geometry",
"is",
"False",
"and",
"src_mod",
".",
"has_complex_fault_geometry",
"is",
"False",
"and",
"src_mod",
".",
"has_area_source",
"is",
"False",
"and",
"src_mod",
".",
"has_point_source",
"is",
"False",
")",
":",
"GEOMETRY_PARAMS",
"[",
":",
"]",
"=",
"[",
"]",
"if",
"src_mod",
".",
"has_mfd_incremental",
"is",
"False",
":",
"MFD_PARAMS",
".",
"remove",
"(",
"(",
"'binWidth'",
",",
"'bin_width'",
",",
"'f'",
")",
")"
]
| Remove params uneeded by source_model | [
"Remove",
"params",
"uneeded",
"by",
"source_model"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L960-L992 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | ShapefileParser.read | def read(self, input_shapefile, validate=False,
simple_fault_spacing=1.0, complex_mesh_spacing=5.0,
mfd_spacing=0.1):
"""
Build the source model from nrml format
"""
reader = shapefile.Reader(input_shapefile)
fields = [field[0] for field in reader.fields[1:]]
shapes = reader.shapes()
records = reader.records()
sources = []
if validate:
converter = SourceConverter(1.0, simple_fault_spacing,
complex_mesh_spacing,
mfd_spacing,
10.0)
for iloc in range(0, reader.numRecords):
# Build record dictionary
record = record_to_dict(records[iloc], fields)
shape = shapes[iloc]
if "pointSource" in record["sourcetype"]:
src = build_point_source_from_shp(shape, record)
elif "areaSource" in record["sourcetype"]:
src = build_area_source_from_shp(shape, record)
elif "simpleFaultSource" in record["sourcetype"]:
src = build_simple_fault_source_from_shp(shape, record)
elif "complexFaultSource" in record["sourcetype"]:
src = build_complex_fault_source_from_shp(shape, record)
elif "characteristicFaultSource" in record["sourcetype"]:
print("Characteristic Fault Source Not Yet Supported - Sorry!")
src = None
if src and validate:
print("Validating Source %s" % src.attrib["id"])
converter.convert_node(src)
if src:
sources.append(src)
return SourceModel(sources) | python | def read(self, input_shapefile, validate=False,
simple_fault_spacing=1.0, complex_mesh_spacing=5.0,
mfd_spacing=0.1):
reader = shapefile.Reader(input_shapefile)
fields = [field[0] for field in reader.fields[1:]]
shapes = reader.shapes()
records = reader.records()
sources = []
if validate:
converter = SourceConverter(1.0, simple_fault_spacing,
complex_mesh_spacing,
mfd_spacing,
10.0)
for iloc in range(0, reader.numRecords):
record = record_to_dict(records[iloc], fields)
shape = shapes[iloc]
if "pointSource" in record["sourcetype"]:
src = build_point_source_from_shp(shape, record)
elif "areaSource" in record["sourcetype"]:
src = build_area_source_from_shp(shape, record)
elif "simpleFaultSource" in record["sourcetype"]:
src = build_simple_fault_source_from_shp(shape, record)
elif "complexFaultSource" in record["sourcetype"]:
src = build_complex_fault_source_from_shp(shape, record)
elif "characteristicFaultSource" in record["sourcetype"]:
print("Characteristic Fault Source Not Yet Supported - Sorry!")
src = None
if src and validate:
print("Validating Source %s" % src.attrib["id"])
converter.convert_node(src)
if src:
sources.append(src)
return SourceModel(sources) | [
"def",
"read",
"(",
"self",
",",
"input_shapefile",
",",
"validate",
"=",
"False",
",",
"simple_fault_spacing",
"=",
"1.0",
",",
"complex_mesh_spacing",
"=",
"5.0",
",",
"mfd_spacing",
"=",
"0.1",
")",
":",
"reader",
"=",
"shapefile",
".",
"Reader",
"(",
"input_shapefile",
")",
"fields",
"=",
"[",
"field",
"[",
"0",
"]",
"for",
"field",
"in",
"reader",
".",
"fields",
"[",
"1",
":",
"]",
"]",
"shapes",
"=",
"reader",
".",
"shapes",
"(",
")",
"records",
"=",
"reader",
".",
"records",
"(",
")",
"sources",
"=",
"[",
"]",
"if",
"validate",
":",
"converter",
"=",
"SourceConverter",
"(",
"1.0",
",",
"simple_fault_spacing",
",",
"complex_mesh_spacing",
",",
"mfd_spacing",
",",
"10.0",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"reader",
".",
"numRecords",
")",
":",
"# Build record dictionary",
"record",
"=",
"record_to_dict",
"(",
"records",
"[",
"iloc",
"]",
",",
"fields",
")",
"shape",
"=",
"shapes",
"[",
"iloc",
"]",
"if",
"\"pointSource\"",
"in",
"record",
"[",
"\"sourcetype\"",
"]",
":",
"src",
"=",
"build_point_source_from_shp",
"(",
"shape",
",",
"record",
")",
"elif",
"\"areaSource\"",
"in",
"record",
"[",
"\"sourcetype\"",
"]",
":",
"src",
"=",
"build_area_source_from_shp",
"(",
"shape",
",",
"record",
")",
"elif",
"\"simpleFaultSource\"",
"in",
"record",
"[",
"\"sourcetype\"",
"]",
":",
"src",
"=",
"build_simple_fault_source_from_shp",
"(",
"shape",
",",
"record",
")",
"elif",
"\"complexFaultSource\"",
"in",
"record",
"[",
"\"sourcetype\"",
"]",
":",
"src",
"=",
"build_complex_fault_source_from_shp",
"(",
"shape",
",",
"record",
")",
"elif",
"\"characteristicFaultSource\"",
"in",
"record",
"[",
"\"sourcetype\"",
"]",
":",
"print",
"(",
"\"Characteristic Fault Source Not Yet Supported - Sorry!\"",
")",
"src",
"=",
"None",
"if",
"src",
"and",
"validate",
":",
"print",
"(",
"\"Validating Source %s\"",
"%",
"src",
".",
"attrib",
"[",
"\"id\"",
"]",
")",
"converter",
".",
"convert_node",
"(",
"src",
")",
"if",
"src",
":",
"sources",
".",
"append",
"(",
"src",
")",
"return",
"SourceModel",
"(",
"sources",
")"
]
| Build the source model from nrml format | [
"Build",
"the",
"source",
"model",
"from",
"nrml",
"format"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L994-L1030 |
gem/oq-engine | openquake/commonlib/shapefileparser.py | ShapefileParser.write | def write(self, destination, source_model, name=None):
"""
Save sources - to multiple
shapefiles corresponding to different source typolgies/geometries
('_point', '_area', '_simple', '_complex', '_planar')
"""
if os.path.exists(destination + ".shp"):
os.system("rm %s.*" % destination)
self.destination = destination
self.filter_params(source_model)
w_area = shapefile.Writer(shapefile.POLYGON)
w_point = shapefile.Writer(shapefile.POINT)
w_simple = shapefile.Writer(shapefile.POLYLINE)
w_simple3d = shapefile.Writer(shapefile.POLYGONZ)
w_complex = shapefile.Writer(shapefile.POLYLINEZ)
w_planar = shapefile.Writer(shapefile.POLYGONZ)
register_fields(w_area)
register_fields(w_point)
register_fields(w_simple)
register_fields(w_simple3d)
register_fields(w_complex)
register_fields(w_planar)
for src in source_model.sources:
# Order is important here
if "areaSource" in src.tag:
set_params(w_area, src)
set_area_geometry(w_area, src)
elif "pointSource" in src.tag:
set_params(w_point, src)
set_point_geometry(w_point, src)
elif "complexFaultSource" in src.tag:
set_params(w_complex, src)
set_complex_fault_geometry(w_complex, src)
elif "simpleFaultSource" in src.tag:
set_params(w_simple, src)
set_simple_fault_geometry(w_simple, src)
# Create the 3D polygon
set_params(w_simple3d, src)
set_simple_fault_geometry_3D(w_simple3d, src)
elif "characteristicFaultSource" in src.tag:
src_taglist = get_taglist(src)
surface_node = src.nodes[src_taglist.index("surface")]
for subnode in surface_node:
if "simpleFaultGeometry" in subnode.tag:
set_params(w_simple, src)
set_params(w_simple3d, src)
elif "complexFaultGeometry" in subnode.tag:
set_params(w_complex, src)
elif "planarSurface" in subnode.tag:
set_params(w_planar, src)
else:
raise ValueError(
'Geometry class %s not recognized'
% subnode.tag)
set_characteristic_geometry(w_simple, w_simple3d,
w_complex, w_planar, src)
else:
raise ValueError('Source type %s not recognized'
% src.tag)
root = self.destination
if len(w_area.shapes()) > 0:
w_area.save('%s_area' % root)
if len(w_point.shapes()) > 0:
w_point.save('%s_point' % root)
if len(w_complex.shapes()) > 0:
w_complex.save('%s_complex' % root)
if len(w_simple.shapes()) > 0:
w_simple.save('%s_simple' % root)
w_simple3d.save('%s_simple3d' % root)
if len(w_planar.shapes()) > 0:
w_planar.save('%s_planar' % root) | python | def write(self, destination, source_model, name=None):
if os.path.exists(destination + ".shp"):
os.system("rm %s.*" % destination)
self.destination = destination
self.filter_params(source_model)
w_area = shapefile.Writer(shapefile.POLYGON)
w_point = shapefile.Writer(shapefile.POINT)
w_simple = shapefile.Writer(shapefile.POLYLINE)
w_simple3d = shapefile.Writer(shapefile.POLYGONZ)
w_complex = shapefile.Writer(shapefile.POLYLINEZ)
w_planar = shapefile.Writer(shapefile.POLYGONZ)
register_fields(w_area)
register_fields(w_point)
register_fields(w_simple)
register_fields(w_simple3d)
register_fields(w_complex)
register_fields(w_planar)
for src in source_model.sources:
if "areaSource" in src.tag:
set_params(w_area, src)
set_area_geometry(w_area, src)
elif "pointSource" in src.tag:
set_params(w_point, src)
set_point_geometry(w_point, src)
elif "complexFaultSource" in src.tag:
set_params(w_complex, src)
set_complex_fault_geometry(w_complex, src)
elif "simpleFaultSource" in src.tag:
set_params(w_simple, src)
set_simple_fault_geometry(w_simple, src)
set_params(w_simple3d, src)
set_simple_fault_geometry_3D(w_simple3d, src)
elif "characteristicFaultSource" in src.tag:
src_taglist = get_taglist(src)
surface_node = src.nodes[src_taglist.index("surface")]
for subnode in surface_node:
if "simpleFaultGeometry" in subnode.tag:
set_params(w_simple, src)
set_params(w_simple3d, src)
elif "complexFaultGeometry" in subnode.tag:
set_params(w_complex, src)
elif "planarSurface" in subnode.tag:
set_params(w_planar, src)
else:
raise ValueError(
'Geometry class %s not recognized'
% subnode.tag)
set_characteristic_geometry(w_simple, w_simple3d,
w_complex, w_planar, src)
else:
raise ValueError('Source type %s not recognized'
% src.tag)
root = self.destination
if len(w_area.shapes()) > 0:
w_area.save('%s_area' % root)
if len(w_point.shapes()) > 0:
w_point.save('%s_point' % root)
if len(w_complex.shapes()) > 0:
w_complex.save('%s_complex' % root)
if len(w_simple.shapes()) > 0:
w_simple.save('%s_simple' % root)
w_simple3d.save('%s_simple3d' % root)
if len(w_planar.shapes()) > 0:
w_planar.save('%s_planar' % root) | [
"def",
"write",
"(",
"self",
",",
"destination",
",",
"source_model",
",",
"name",
"=",
"None",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"destination",
"+",
"\".shp\"",
")",
":",
"os",
".",
"system",
"(",
"\"rm %s.*\"",
"%",
"destination",
")",
"self",
".",
"destination",
"=",
"destination",
"self",
".",
"filter_params",
"(",
"source_model",
")",
"w_area",
"=",
"shapefile",
".",
"Writer",
"(",
"shapefile",
".",
"POLYGON",
")",
"w_point",
"=",
"shapefile",
".",
"Writer",
"(",
"shapefile",
".",
"POINT",
")",
"w_simple",
"=",
"shapefile",
".",
"Writer",
"(",
"shapefile",
".",
"POLYLINE",
")",
"w_simple3d",
"=",
"shapefile",
".",
"Writer",
"(",
"shapefile",
".",
"POLYGONZ",
")",
"w_complex",
"=",
"shapefile",
".",
"Writer",
"(",
"shapefile",
".",
"POLYLINEZ",
")",
"w_planar",
"=",
"shapefile",
".",
"Writer",
"(",
"shapefile",
".",
"POLYGONZ",
")",
"register_fields",
"(",
"w_area",
")",
"register_fields",
"(",
"w_point",
")",
"register_fields",
"(",
"w_simple",
")",
"register_fields",
"(",
"w_simple3d",
")",
"register_fields",
"(",
"w_complex",
")",
"register_fields",
"(",
"w_planar",
")",
"for",
"src",
"in",
"source_model",
".",
"sources",
":",
"# Order is important here",
"if",
"\"areaSource\"",
"in",
"src",
".",
"tag",
":",
"set_params",
"(",
"w_area",
",",
"src",
")",
"set_area_geometry",
"(",
"w_area",
",",
"src",
")",
"elif",
"\"pointSource\"",
"in",
"src",
".",
"tag",
":",
"set_params",
"(",
"w_point",
",",
"src",
")",
"set_point_geometry",
"(",
"w_point",
",",
"src",
")",
"elif",
"\"complexFaultSource\"",
"in",
"src",
".",
"tag",
":",
"set_params",
"(",
"w_complex",
",",
"src",
")",
"set_complex_fault_geometry",
"(",
"w_complex",
",",
"src",
")",
"elif",
"\"simpleFaultSource\"",
"in",
"src",
".",
"tag",
":",
"set_params",
"(",
"w_simple",
",",
"src",
")",
"set_simple_fault_geometry",
"(",
"w_simple",
",",
"src",
")",
"# Create the 3D polygon",
"set_params",
"(",
"w_simple3d",
",",
"src",
")",
"set_simple_fault_geometry_3D",
"(",
"w_simple3d",
",",
"src",
")",
"elif",
"\"characteristicFaultSource\"",
"in",
"src",
".",
"tag",
":",
"src_taglist",
"=",
"get_taglist",
"(",
"src",
")",
"surface_node",
"=",
"src",
".",
"nodes",
"[",
"src_taglist",
".",
"index",
"(",
"\"surface\"",
")",
"]",
"for",
"subnode",
"in",
"surface_node",
":",
"if",
"\"simpleFaultGeometry\"",
"in",
"subnode",
".",
"tag",
":",
"set_params",
"(",
"w_simple",
",",
"src",
")",
"set_params",
"(",
"w_simple3d",
",",
"src",
")",
"elif",
"\"complexFaultGeometry\"",
"in",
"subnode",
".",
"tag",
":",
"set_params",
"(",
"w_complex",
",",
"src",
")",
"elif",
"\"planarSurface\"",
"in",
"subnode",
".",
"tag",
":",
"set_params",
"(",
"w_planar",
",",
"src",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Geometry class %s not recognized'",
"%",
"subnode",
".",
"tag",
")",
"set_characteristic_geometry",
"(",
"w_simple",
",",
"w_simple3d",
",",
"w_complex",
",",
"w_planar",
",",
"src",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Source type %s not recognized'",
"%",
"src",
".",
"tag",
")",
"root",
"=",
"self",
".",
"destination",
"if",
"len",
"(",
"w_area",
".",
"shapes",
"(",
")",
")",
">",
"0",
":",
"w_area",
".",
"save",
"(",
"'%s_area'",
"%",
"root",
")",
"if",
"len",
"(",
"w_point",
".",
"shapes",
"(",
")",
")",
">",
"0",
":",
"w_point",
".",
"save",
"(",
"'%s_point'",
"%",
"root",
")",
"if",
"len",
"(",
"w_complex",
".",
"shapes",
"(",
")",
")",
">",
"0",
":",
"w_complex",
".",
"save",
"(",
"'%s_complex'",
"%",
"root",
")",
"if",
"len",
"(",
"w_simple",
".",
"shapes",
"(",
")",
")",
">",
"0",
":",
"w_simple",
".",
"save",
"(",
"'%s_simple'",
"%",
"root",
")",
"w_simple3d",
".",
"save",
"(",
"'%s_simple3d'",
"%",
"root",
")",
"if",
"len",
"(",
"w_planar",
".",
"shapes",
"(",
")",
")",
">",
"0",
":",
"w_planar",
".",
"save",
"(",
"'%s_planar'",
"%",
"root",
")"
]
| Save sources - to multiple
shapefiles corresponding to different source typolgies/geometries
('_point', '_area', '_simple', '_complex', '_planar') | [
"Save",
"sources",
"-",
"to",
"multiple",
"shapefiles",
"corresponding",
"to",
"different",
"source",
"typolgies",
"/",
"geometries",
"(",
"_point",
"_area",
"_simple",
"_complex",
"_planar",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L1032-L1107 |
gem/oq-engine | openquake/commands/shell.py | shell | def shell(script=None, args=()):
"""
Start an embedded (i)python instance with a global object "o" or
run a Python script in the engine environment.
"""
if script:
sys.argv = sys.argv[2:] # strip ['oq', 'shell']
runpy.run_path(script, run_name='__main__')
return
o = OpenQuake() # noqa
try:
import IPython
IPython.embed(banner1='IPython shell with a global object "o"')
except ImportError:
import code
code.interact(banner='Python shell with a global object "o"',
local=dict(o=o)) | python | def shell(script=None, args=()):
if script:
sys.argv = sys.argv[2:]
runpy.run_path(script, run_name='__main__')
return
o = OpenQuake()
try:
import IPython
IPython.embed(banner1='IPython shell with a global object "o"')
except ImportError:
import code
code.interact(banner='Python shell with a global object "o"',
local=dict(o=o)) | [
"def",
"shell",
"(",
"script",
"=",
"None",
",",
"args",
"=",
"(",
")",
")",
":",
"if",
"script",
":",
"sys",
".",
"argv",
"=",
"sys",
".",
"argv",
"[",
"2",
":",
"]",
"# strip ['oq', 'shell']",
"runpy",
".",
"run_path",
"(",
"script",
",",
"run_name",
"=",
"'__main__'",
")",
"return",
"o",
"=",
"OpenQuake",
"(",
")",
"# noqa",
"try",
":",
"import",
"IPython",
"IPython",
".",
"embed",
"(",
"banner1",
"=",
"'IPython shell with a global object \"o\"'",
")",
"except",
"ImportError",
":",
"import",
"code",
"code",
".",
"interact",
"(",
"banner",
"=",
"'Python shell with a global object \"o\"'",
",",
"local",
"=",
"dict",
"(",
"o",
"=",
"o",
")",
")"
]
| Start an embedded (i)python instance with a global object "o" or
run a Python script in the engine environment. | [
"Start",
"an",
"embedded",
"(",
"i",
")",
"python",
"instance",
"with",
"a",
"global",
"object",
"o",
"or",
"run",
"a",
"Python",
"script",
"in",
"the",
"engine",
"environment",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/shell.py#L57-L73 |
gem/oq-engine | openquake/hmtk/seismicity/completeness/comp_stepp_1971.py | get_bilinear_residuals_stepp | def get_bilinear_residuals_stepp(input_params, xvals, yvals, slope1_fit):
'''
Returns the residual sum-of-squares value of a bilinear fit to a data
set - with a segment - 1 gradient fixed by an input value (slope_1_fit)
:param list input_params:
Input parameters for the bilinear model [slope2, crossover_point,
intercept]
:param numpy.ndarray xvals:
x-values of the data to be fit
:param numpy.ndarray yvals:
y-values of the data to be fit
:param float slope1_fit:
Gradient of the first slope
:returns:
Residual sum-of-squares of fit
'''
params = np.hstack([slope1_fit, input_params])
num_x = len(xvals)
y_model = np.zeros(num_x, dtype=float)
residuals = np.zeros(num_x, dtype=float)
for iloc in range(0, num_x):
y_model[iloc] = piecewise_linear_scalar(params, xvals[iloc])
residuals[iloc] = (yvals[iloc] - y_model[iloc]) ** 2.0
return np.sum(residuals) | python | def get_bilinear_residuals_stepp(input_params, xvals, yvals, slope1_fit):
params = np.hstack([slope1_fit, input_params])
num_x = len(xvals)
y_model = np.zeros(num_x, dtype=float)
residuals = np.zeros(num_x, dtype=float)
for iloc in range(0, num_x):
y_model[iloc] = piecewise_linear_scalar(params, xvals[iloc])
residuals[iloc] = (yvals[iloc] - y_model[iloc]) ** 2.0
return np.sum(residuals) | [
"def",
"get_bilinear_residuals_stepp",
"(",
"input_params",
",",
"xvals",
",",
"yvals",
",",
"slope1_fit",
")",
":",
"params",
"=",
"np",
".",
"hstack",
"(",
"[",
"slope1_fit",
",",
"input_params",
"]",
")",
"num_x",
"=",
"len",
"(",
"xvals",
")",
"y_model",
"=",
"np",
".",
"zeros",
"(",
"num_x",
",",
"dtype",
"=",
"float",
")",
"residuals",
"=",
"np",
".",
"zeros",
"(",
"num_x",
",",
"dtype",
"=",
"float",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"num_x",
")",
":",
"y_model",
"[",
"iloc",
"]",
"=",
"piecewise_linear_scalar",
"(",
"params",
",",
"xvals",
"[",
"iloc",
"]",
")",
"residuals",
"[",
"iloc",
"]",
"=",
"(",
"yvals",
"[",
"iloc",
"]",
"-",
"y_model",
"[",
"iloc",
"]",
")",
"**",
"2.0",
"return",
"np",
".",
"sum",
"(",
"residuals",
")"
]
| Returns the residual sum-of-squares value of a bilinear fit to a data
set - with a segment - 1 gradient fixed by an input value (slope_1_fit)
:param list input_params:
Input parameters for the bilinear model [slope2, crossover_point,
intercept]
:param numpy.ndarray xvals:
x-values of the data to be fit
:param numpy.ndarray yvals:
y-values of the data to be fit
:param float slope1_fit:
Gradient of the first slope
:returns:
Residual sum-of-squares of fit | [
"Returns",
"the",
"residual",
"sum",
"-",
"of",
"-",
"squares",
"value",
"of",
"a",
"bilinear",
"fit",
"to",
"a",
"data",
"set",
"-",
"with",
"a",
"segment",
"-",
"1",
"gradient",
"fixed",
"by",
"an",
"input",
"value",
"(",
"slope_1_fit",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/completeness/comp_stepp_1971.py#L62-L89 |
gem/oq-engine | openquake/baselib/node.py | floatformat | def floatformat(fmt_string):
"""
Context manager to change the default format string for the
function :func:`openquake.commonlib.writers.scientificformat`.
:param fmt_string: the format to use; for instance '%13.9E'
"""
fmt_defaults = scientificformat.__defaults__
scientificformat.__defaults__ = (fmt_string,) + fmt_defaults[1:]
try:
yield
finally:
scientificformat.__defaults__ = fmt_defaults | python | def floatformat(fmt_string):
fmt_defaults = scientificformat.__defaults__
scientificformat.__defaults__ = (fmt_string,) + fmt_defaults[1:]
try:
yield
finally:
scientificformat.__defaults__ = fmt_defaults | [
"def",
"floatformat",
"(",
"fmt_string",
")",
":",
"fmt_defaults",
"=",
"scientificformat",
".",
"__defaults__",
"scientificformat",
".",
"__defaults__",
"=",
"(",
"fmt_string",
",",
")",
"+",
"fmt_defaults",
"[",
"1",
":",
"]",
"try",
":",
"yield",
"finally",
":",
"scientificformat",
".",
"__defaults__",
"=",
"fmt_defaults"
]
| Context manager to change the default format string for the
function :func:`openquake.commonlib.writers.scientificformat`.
:param fmt_string: the format to use; for instance '%13.9E' | [
"Context",
"manager",
"to",
"change",
"the",
"default",
"format",
"string",
"for",
"the",
"function",
":",
"func",
":",
"openquake",
".",
"commonlib",
".",
"writers",
".",
"scientificformat",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L160-L172 |
gem/oq-engine | openquake/baselib/node.py | scientificformat | def scientificformat(value, fmt='%13.9E', sep=' ', sep2=':'):
"""
:param value: the value to convert into a string
:param fmt: the formatting string to use for float values
:param sep: separator to use for vector-like values
:param sep2: second separator to use for matrix-like values
Convert a float or an array into a string by using the scientific notation
and a fixed precision (by default 10 decimal digits). For instance:
>>> scientificformat(-0E0)
'0.000000000E+00'
>>> scientificformat(-0.004)
'-4.000000000E-03'
>>> scientificformat([0.004])
'4.000000000E-03'
>>> scientificformat([0.01, 0.02], '%10.6E')
'1.000000E-02 2.000000E-02'
>>> scientificformat([[0.1, 0.2], [0.3, 0.4]], '%4.1E')
'1.0E-01:2.0E-01 3.0E-01:4.0E-01'
"""
if isinstance(value, numpy.bool_):
return '1' if value else '0'
elif isinstance(value, bytes):
return value.decode('utf8')
elif isinstance(value, str):
return value
elif hasattr(value, '__len__'):
return sep.join((scientificformat(f, fmt, sep2) for f in value))
elif isinstance(value, (float, numpy.float64, numpy.float32)):
fmt_value = fmt % value
if set(fmt_value) <= zeroset:
# '-0.0000000E+00' is converted into '0.0000000E+00
fmt_value = fmt_value.replace('-', '')
return fmt_value
return str(value) | python | def scientificformat(value, fmt='%13.9E', sep=' ', sep2=':'):
if isinstance(value, numpy.bool_):
return '1' if value else '0'
elif isinstance(value, bytes):
return value.decode('utf8')
elif isinstance(value, str):
return value
elif hasattr(value, '__len__'):
return sep.join((scientificformat(f, fmt, sep2) for f in value))
elif isinstance(value, (float, numpy.float64, numpy.float32)):
fmt_value = fmt % value
if set(fmt_value) <= zeroset:
fmt_value = fmt_value.replace('-', '')
return fmt_value
return str(value) | [
"def",
"scientificformat",
"(",
"value",
",",
"fmt",
"=",
"'%13.9E'",
",",
"sep",
"=",
"' '",
",",
"sep2",
"=",
"':'",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"numpy",
".",
"bool_",
")",
":",
"return",
"'1'",
"if",
"value",
"else",
"'0'",
"elif",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"return",
"value",
".",
"decode",
"(",
"'utf8'",
")",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"return",
"value",
"elif",
"hasattr",
"(",
"value",
",",
"'__len__'",
")",
":",
"return",
"sep",
".",
"join",
"(",
"(",
"scientificformat",
"(",
"f",
",",
"fmt",
",",
"sep2",
")",
"for",
"f",
"in",
"value",
")",
")",
"elif",
"isinstance",
"(",
"value",
",",
"(",
"float",
",",
"numpy",
".",
"float64",
",",
"numpy",
".",
"float32",
")",
")",
":",
"fmt_value",
"=",
"fmt",
"%",
"value",
"if",
"set",
"(",
"fmt_value",
")",
"<=",
"zeroset",
":",
"# '-0.0000000E+00' is converted into '0.0000000E+00",
"fmt_value",
"=",
"fmt_value",
".",
"replace",
"(",
"'-'",
",",
"''",
")",
"return",
"fmt_value",
"return",
"str",
"(",
"value",
")"
]
| :param value: the value to convert into a string
:param fmt: the formatting string to use for float values
:param sep: separator to use for vector-like values
:param sep2: second separator to use for matrix-like values
Convert a float or an array into a string by using the scientific notation
and a fixed precision (by default 10 decimal digits). For instance:
>>> scientificformat(-0E0)
'0.000000000E+00'
>>> scientificformat(-0.004)
'-4.000000000E-03'
>>> scientificformat([0.004])
'4.000000000E-03'
>>> scientificformat([0.01, 0.02], '%10.6E')
'1.000000E-02 2.000000E-02'
>>> scientificformat([[0.1, 0.2], [0.3, 0.4]], '%4.1E')
'1.0E-01:2.0E-01 3.0E-01:4.0E-01' | [
":",
"param",
"value",
":",
"the",
"value",
"to",
"convert",
"into",
"a",
"string",
":",
"param",
"fmt",
":",
"the",
"formatting",
"string",
"to",
"use",
"for",
"float",
"values",
":",
"param",
"sep",
":",
"separator",
"to",
"use",
"for",
"vector",
"-",
"like",
"values",
":",
"param",
"sep2",
":",
"second",
"separator",
"to",
"use",
"for",
"matrix",
"-",
"like",
"values"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L178-L213 |
gem/oq-engine | openquake/baselib/node.py | tostring | def tostring(node, indent=4, nsmap=None):
"""
Convert a node into an XML string by using the StreamingXMLWriter.
This is useful for testing purposes.
:param node: a node object (typically an ElementTree object)
:param indent: the indentation to use in the XML (default 4 spaces)
"""
out = io.BytesIO()
writer = StreamingXMLWriter(out, indent, nsmap=nsmap)
writer.serialize(node)
return out.getvalue() | python | def tostring(node, indent=4, nsmap=None):
out = io.BytesIO()
writer = StreamingXMLWriter(out, indent, nsmap=nsmap)
writer.serialize(node)
return out.getvalue() | [
"def",
"tostring",
"(",
"node",
",",
"indent",
"=",
"4",
",",
"nsmap",
"=",
"None",
")",
":",
"out",
"=",
"io",
".",
"BytesIO",
"(",
")",
"writer",
"=",
"StreamingXMLWriter",
"(",
"out",
",",
"indent",
",",
"nsmap",
"=",
"nsmap",
")",
"writer",
".",
"serialize",
"(",
"node",
")",
"return",
"out",
".",
"getvalue",
"(",
")"
]
| Convert a node into an XML string by using the StreamingXMLWriter.
This is useful for testing purposes.
:param node: a node object (typically an ElementTree object)
:param indent: the indentation to use in the XML (default 4 spaces) | [
"Convert",
"a",
"node",
"into",
"an",
"XML",
"string",
"by",
"using",
"the",
"StreamingXMLWriter",
".",
"This",
"is",
"useful",
"for",
"testing",
"purposes",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L216-L227 |
gem/oq-engine | openquake/baselib/node.py | parse | def parse(source, remove_comments=True, **kw):
"""Thin wrapper around ElementTree.parse"""
return ElementTree.parse(source, SourceLineParser(), **kw) | python | def parse(source, remove_comments=True, **kw):
return ElementTree.parse(source, SourceLineParser(), **kw) | [
"def",
"parse",
"(",
"source",
",",
"remove_comments",
"=",
"True",
",",
"*",
"*",
"kw",
")",
":",
"return",
"ElementTree",
".",
"parse",
"(",
"source",
",",
"SourceLineParser",
"(",
")",
",",
"*",
"*",
"kw",
")"
]
| Thin wrapper around ElementTree.parse | [
"Thin",
"wrapper",
"around",
"ElementTree",
".",
"parse"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L350-L352 |
gem/oq-engine | openquake/baselib/node.py | iterparse | def iterparse(source, events=('end',), remove_comments=True, **kw):
"""Thin wrapper around ElementTree.iterparse"""
return ElementTree.iterparse(source, events, SourceLineParser(), **kw) | python | def iterparse(source, events=('end',), remove_comments=True, **kw):
return ElementTree.iterparse(source, events, SourceLineParser(), **kw) | [
"def",
"iterparse",
"(",
"source",
",",
"events",
"=",
"(",
"'end'",
",",
")",
",",
"remove_comments",
"=",
"True",
",",
"*",
"*",
"kw",
")",
":",
"return",
"ElementTree",
".",
"iterparse",
"(",
"source",
",",
"events",
",",
"SourceLineParser",
"(",
")",
",",
"*",
"*",
"kw",
")"
]
| Thin wrapper around ElementTree.iterparse | [
"Thin",
"wrapper",
"around",
"ElementTree",
".",
"iterparse"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L355-L357 |
gem/oq-engine | openquake/baselib/node.py | _displayattrs | def _displayattrs(attrib, expandattrs):
"""
Helper function to display the attributes of a Node object in lexicographic
order.
:param attrib: dictionary with the attributes
:param expandattrs: if True also displays the value of the attributes
"""
if not attrib:
return ''
if expandattrs:
alist = ['%s=%r' % item for item in sorted(attrib.items())]
else:
alist = list(attrib)
return '{%s}' % ', '.join(alist) | python | def _displayattrs(attrib, expandattrs):
if not attrib:
return ''
if expandattrs:
alist = ['%s=%r' % item for item in sorted(attrib.items())]
else:
alist = list(attrib)
return '{%s}' % ', '.join(alist) | [
"def",
"_displayattrs",
"(",
"attrib",
",",
"expandattrs",
")",
":",
"if",
"not",
"attrib",
":",
"return",
"''",
"if",
"expandattrs",
":",
"alist",
"=",
"[",
"'%s=%r'",
"%",
"item",
"for",
"item",
"in",
"sorted",
"(",
"attrib",
".",
"items",
"(",
")",
")",
"]",
"else",
":",
"alist",
"=",
"list",
"(",
"attrib",
")",
"return",
"'{%s}'",
"%",
"', '",
".",
"join",
"(",
"alist",
")"
]
| Helper function to display the attributes of a Node object in lexicographic
order.
:param attrib: dictionary with the attributes
:param expandattrs: if True also displays the value of the attributes | [
"Helper",
"function",
"to",
"display",
"the",
"attributes",
"of",
"a",
"Node",
"object",
"in",
"lexicographic",
"order",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L363-L377 |
gem/oq-engine | openquake/baselib/node.py | _display | def _display(node, indent, expandattrs, expandvals, output):
"""Core function to display a Node object"""
attrs = _displayattrs(node.attrib, expandattrs)
if node.text is None or not expandvals:
val = ''
elif isinstance(node.text, str):
val = ' %s' % repr(node.text.strip())
else:
val = ' %s' % repr(node.text) # node.text can be a tuple
output.write(encode(indent + striptag(node.tag) + attrs + val + '\n'))
for sub_node in node:
_display(sub_node, indent + ' ', expandattrs, expandvals, output) | python | def _display(node, indent, expandattrs, expandvals, output):
attrs = _displayattrs(node.attrib, expandattrs)
if node.text is None or not expandvals:
val = ''
elif isinstance(node.text, str):
val = ' %s' % repr(node.text.strip())
else:
val = ' %s' % repr(node.text)
output.write(encode(indent + striptag(node.tag) + attrs + val + '\n'))
for sub_node in node:
_display(sub_node, indent + ' ', expandattrs, expandvals, output) | [
"def",
"_display",
"(",
"node",
",",
"indent",
",",
"expandattrs",
",",
"expandvals",
",",
"output",
")",
":",
"attrs",
"=",
"_displayattrs",
"(",
"node",
".",
"attrib",
",",
"expandattrs",
")",
"if",
"node",
".",
"text",
"is",
"None",
"or",
"not",
"expandvals",
":",
"val",
"=",
"''",
"elif",
"isinstance",
"(",
"node",
".",
"text",
",",
"str",
")",
":",
"val",
"=",
"' %s'",
"%",
"repr",
"(",
"node",
".",
"text",
".",
"strip",
"(",
")",
")",
"else",
":",
"val",
"=",
"' %s'",
"%",
"repr",
"(",
"node",
".",
"text",
")",
"# node.text can be a tuple",
"output",
".",
"write",
"(",
"encode",
"(",
"indent",
"+",
"striptag",
"(",
"node",
".",
"tag",
")",
"+",
"attrs",
"+",
"val",
"+",
"'\\n'",
")",
")",
"for",
"sub_node",
"in",
"node",
":",
"_display",
"(",
"sub_node",
",",
"indent",
"+",
"' '",
",",
"expandattrs",
",",
"expandvals",
",",
"output",
")"
]
| Core function to display a Node object | [
"Core",
"function",
"to",
"display",
"a",
"Node",
"object"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L380-L391 |
gem/oq-engine | openquake/baselib/node.py | node_display | def node_display(root, expandattrs=False, expandvals=False, output=sys.stdout):
"""
Write an indented representation of the Node object on the output;
this is intended for testing/debugging purposes.
:param root: a Node object
:param bool expandattrs: if True, the values of the attributes are
also printed, not only the names
:param bool expandvals: if True, the values of the tags are also printed,
not only the names.
:param output: stream where to write the string representation of the node
"""
_display(root, '', expandattrs, expandvals, output) | python | def node_display(root, expandattrs=False, expandvals=False, output=sys.stdout):
_display(root, '', expandattrs, expandvals, output) | [
"def",
"node_display",
"(",
"root",
",",
"expandattrs",
"=",
"False",
",",
"expandvals",
"=",
"False",
",",
"output",
"=",
"sys",
".",
"stdout",
")",
":",
"_display",
"(",
"root",
",",
"''",
",",
"expandattrs",
",",
"expandvals",
",",
"output",
")"
]
| Write an indented representation of the Node object on the output;
this is intended for testing/debugging purposes.
:param root: a Node object
:param bool expandattrs: if True, the values of the attributes are
also printed, not only the names
:param bool expandvals: if True, the values of the tags are also printed,
not only the names.
:param output: stream where to write the string representation of the node | [
"Write",
"an",
"indented",
"representation",
"of",
"the",
"Node",
"object",
"on",
"the",
"output",
";",
"this",
"is",
"intended",
"for",
"testing",
"/",
"debugging",
"purposes",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L394-L406 |
gem/oq-engine | openquake/baselib/node.py | to_literal | def to_literal(self):
"""
Convert the node into a literal Python object
"""
if not self.nodes:
return (self.tag, self.attrib, self.text, [])
else:
return (self.tag, self.attrib, self.text,
list(map(to_literal, self.nodes))) | python | def to_literal(self):
if not self.nodes:
return (self.tag, self.attrib, self.text, [])
else:
return (self.tag, self.attrib, self.text,
list(map(to_literal, self.nodes))) | [
"def",
"to_literal",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"nodes",
":",
"return",
"(",
"self",
".",
"tag",
",",
"self",
".",
"attrib",
",",
"self",
".",
"text",
",",
"[",
"]",
")",
"else",
":",
"return",
"(",
"self",
".",
"tag",
",",
"self",
".",
"attrib",
",",
"self",
".",
"text",
",",
"list",
"(",
"map",
"(",
"to_literal",
",",
"self",
".",
"nodes",
")",
")",
")"
]
| Convert the node into a literal Python object | [
"Convert",
"the",
"node",
"into",
"a",
"literal",
"Python",
"object"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L574-L582 |
gem/oq-engine | openquake/baselib/node.py | pprint | def pprint(self, stream=None, indent=1, width=80, depth=None):
"""
Pretty print the underlying literal Python object
"""
pp.pprint(to_literal(self), stream, indent, width, depth) | python | def pprint(self, stream=None, indent=1, width=80, depth=None):
pp.pprint(to_literal(self), stream, indent, width, depth) | [
"def",
"pprint",
"(",
"self",
",",
"stream",
"=",
"None",
",",
"indent",
"=",
"1",
",",
"width",
"=",
"80",
",",
"depth",
"=",
"None",
")",
":",
"pp",
".",
"pprint",
"(",
"to_literal",
"(",
"self",
")",
",",
"stream",
",",
"indent",
",",
"width",
",",
"depth",
")"
]
| Pretty print the underlying literal Python object | [
"Pretty",
"print",
"the",
"underlying",
"literal",
"Python",
"object"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L585-L589 |
gem/oq-engine | openquake/baselib/node.py | node_from_dict | def node_from_dict(dic, nodefactory=Node):
"""
Convert a (nested) dictionary with attributes tag, attrib, text, nodes
into a Node object.
"""
tag = dic['tag']
text = dic.get('text')
attrib = dic.get('attrib', {})
nodes = dic.get('nodes', [])
if not nodes:
return nodefactory(tag, attrib, text)
return nodefactory(tag, attrib, nodes=list(map(node_from_dict, nodes))) | python | def node_from_dict(dic, nodefactory=Node):
tag = dic['tag']
text = dic.get('text')
attrib = dic.get('attrib', {})
nodes = dic.get('nodes', [])
if not nodes:
return nodefactory(tag, attrib, text)
return nodefactory(tag, attrib, nodes=list(map(node_from_dict, nodes))) | [
"def",
"node_from_dict",
"(",
"dic",
",",
"nodefactory",
"=",
"Node",
")",
":",
"tag",
"=",
"dic",
"[",
"'tag'",
"]",
"text",
"=",
"dic",
".",
"get",
"(",
"'text'",
")",
"attrib",
"=",
"dic",
".",
"get",
"(",
"'attrib'",
",",
"{",
"}",
")",
"nodes",
"=",
"dic",
".",
"get",
"(",
"'nodes'",
",",
"[",
"]",
")",
"if",
"not",
"nodes",
":",
"return",
"nodefactory",
"(",
"tag",
",",
"attrib",
",",
"text",
")",
"return",
"nodefactory",
"(",
"tag",
",",
"attrib",
",",
"nodes",
"=",
"list",
"(",
"map",
"(",
"node_from_dict",
",",
"nodes",
")",
")",
")"
]
| Convert a (nested) dictionary with attributes tag, attrib, text, nodes
into a Node object. | [
"Convert",
"a",
"(",
"nested",
")",
"dictionary",
"with",
"attributes",
"tag",
"attrib",
"text",
"nodes",
"into",
"a",
"Node",
"object",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L592-L603 |
gem/oq-engine | openquake/baselib/node.py | node_to_dict | def node_to_dict(node):
"""
Convert a Node object into a (nested) dictionary
with attributes tag, attrib, text, nodes.
:param node: a Node-compatible object
"""
dic = dict(tag=striptag(node.tag))
if node.attrib:
dic['attrib'] = node.attrib
if node.text is not None:
dic['text'] = node.text
if node.nodes:
dic['nodes'] = [node_to_dict(n) for n in node]
return dic | python | def node_to_dict(node):
dic = dict(tag=striptag(node.tag))
if node.attrib:
dic['attrib'] = node.attrib
if node.text is not None:
dic['text'] = node.text
if node.nodes:
dic['nodes'] = [node_to_dict(n) for n in node]
return dic | [
"def",
"node_to_dict",
"(",
"node",
")",
":",
"dic",
"=",
"dict",
"(",
"tag",
"=",
"striptag",
"(",
"node",
".",
"tag",
")",
")",
"if",
"node",
".",
"attrib",
":",
"dic",
"[",
"'attrib'",
"]",
"=",
"node",
".",
"attrib",
"if",
"node",
".",
"text",
"is",
"not",
"None",
":",
"dic",
"[",
"'text'",
"]",
"=",
"node",
".",
"text",
"if",
"node",
".",
"nodes",
":",
"dic",
"[",
"'nodes'",
"]",
"=",
"[",
"node_to_dict",
"(",
"n",
")",
"for",
"n",
"in",
"node",
"]",
"return",
"dic"
]
| Convert a Node object into a (nested) dictionary
with attributes tag, attrib, text, nodes.
:param node: a Node-compatible object | [
"Convert",
"a",
"Node",
"object",
"into",
"a",
"(",
"nested",
")",
"dictionary",
"with",
"attributes",
"tag",
"attrib",
"text",
"nodes",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L606-L620 |
gem/oq-engine | openquake/baselib/node.py | node_from_elem | def node_from_elem(elem, nodefactory=Node, lazy=()):
"""
Convert (recursively) an ElementTree object into a Node object.
"""
children = list(elem)
lineno = getattr(elem, 'lineno', None)
if not children:
return nodefactory(elem.tag, dict(elem.attrib), elem.text,
lineno=lineno)
if striptag(elem.tag) in lazy:
nodes = (node_from_elem(ch, nodefactory, lazy) for ch in children)
else:
nodes = [node_from_elem(ch, nodefactory, lazy) for ch in children]
return nodefactory(elem.tag, dict(elem.attrib), nodes=nodes, lineno=lineno) | python | def node_from_elem(elem, nodefactory=Node, lazy=()):
children = list(elem)
lineno = getattr(elem, 'lineno', None)
if not children:
return nodefactory(elem.tag, dict(elem.attrib), elem.text,
lineno=lineno)
if striptag(elem.tag) in lazy:
nodes = (node_from_elem(ch, nodefactory, lazy) for ch in children)
else:
nodes = [node_from_elem(ch, nodefactory, lazy) for ch in children]
return nodefactory(elem.tag, dict(elem.attrib), nodes=nodes, lineno=lineno) | [
"def",
"node_from_elem",
"(",
"elem",
",",
"nodefactory",
"=",
"Node",
",",
"lazy",
"=",
"(",
")",
")",
":",
"children",
"=",
"list",
"(",
"elem",
")",
"lineno",
"=",
"getattr",
"(",
"elem",
",",
"'lineno'",
",",
"None",
")",
"if",
"not",
"children",
":",
"return",
"nodefactory",
"(",
"elem",
".",
"tag",
",",
"dict",
"(",
"elem",
".",
"attrib",
")",
",",
"elem",
".",
"text",
",",
"lineno",
"=",
"lineno",
")",
"if",
"striptag",
"(",
"elem",
".",
"tag",
")",
"in",
"lazy",
":",
"nodes",
"=",
"(",
"node_from_elem",
"(",
"ch",
",",
"nodefactory",
",",
"lazy",
")",
"for",
"ch",
"in",
"children",
")",
"else",
":",
"nodes",
"=",
"[",
"node_from_elem",
"(",
"ch",
",",
"nodefactory",
",",
"lazy",
")",
"for",
"ch",
"in",
"children",
"]",
"return",
"nodefactory",
"(",
"elem",
".",
"tag",
",",
"dict",
"(",
"elem",
".",
"attrib",
")",
",",
"nodes",
"=",
"nodes",
",",
"lineno",
"=",
"lineno",
")"
]
| Convert (recursively) an ElementTree object into a Node object. | [
"Convert",
"(",
"recursively",
")",
"an",
"ElementTree",
"object",
"into",
"a",
"Node",
"object",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L623-L636 |
gem/oq-engine | openquake/baselib/node.py | node_to_elem | def node_to_elem(root):
"""
Convert (recursively) a Node object into an ElementTree object.
"""
def generate_elem(append, node, level):
var = "e" + str(level)
arg = repr(node.tag)
if node.attrib:
arg += ", **%r" % node.attrib
if level == 1:
append("e1 = Element(%s)" % arg)
else:
append("%s = SubElement(e%d, %s)" % (var, level - 1, arg))
if not node.nodes:
append("%s.text = %r" % (var, node.text))
for x in node:
generate_elem(append, x, level + 1)
# generate code to create a tree
output = []
generate_elem(output.append, root, 1) # print "\n".join(output)
namespace = {"Element": ElementTree.Element,
"SubElement": ElementTree.SubElement}
exec("\n".join(output), globals(), namespace)
return namespace["e1"] | python | def node_to_elem(root):
def generate_elem(append, node, level):
var = "e" + str(level)
arg = repr(node.tag)
if node.attrib:
arg += ", **%r" % node.attrib
if level == 1:
append("e1 = Element(%s)" % arg)
else:
append("%s = SubElement(e%d, %s)" % (var, level - 1, arg))
if not node.nodes:
append("%s.text = %r" % (var, node.text))
for x in node:
generate_elem(append, x, level + 1)
output = []
generate_elem(output.append, root, 1)
namespace = {"Element": ElementTree.Element,
"SubElement": ElementTree.SubElement}
exec("\n".join(output), globals(), namespace)
return namespace["e1"] | [
"def",
"node_to_elem",
"(",
"root",
")",
":",
"def",
"generate_elem",
"(",
"append",
",",
"node",
",",
"level",
")",
":",
"var",
"=",
"\"e\"",
"+",
"str",
"(",
"level",
")",
"arg",
"=",
"repr",
"(",
"node",
".",
"tag",
")",
"if",
"node",
".",
"attrib",
":",
"arg",
"+=",
"\", **%r\"",
"%",
"node",
".",
"attrib",
"if",
"level",
"==",
"1",
":",
"append",
"(",
"\"e1 = Element(%s)\"",
"%",
"arg",
")",
"else",
":",
"append",
"(",
"\"%s = SubElement(e%d, %s)\"",
"%",
"(",
"var",
",",
"level",
"-",
"1",
",",
"arg",
")",
")",
"if",
"not",
"node",
".",
"nodes",
":",
"append",
"(",
"\"%s.text = %r\"",
"%",
"(",
"var",
",",
"node",
".",
"text",
")",
")",
"for",
"x",
"in",
"node",
":",
"generate_elem",
"(",
"append",
",",
"x",
",",
"level",
"+",
"1",
")",
"# generate code to create a tree",
"output",
"=",
"[",
"]",
"generate_elem",
"(",
"output",
".",
"append",
",",
"root",
",",
"1",
")",
"# print \"\\n\".join(output)",
"namespace",
"=",
"{",
"\"Element\"",
":",
"ElementTree",
".",
"Element",
",",
"\"SubElement\"",
":",
"ElementTree",
".",
"SubElement",
"}",
"exec",
"(",
"\"\\n\"",
".",
"join",
"(",
"output",
")",
",",
"globals",
"(",
")",
",",
"namespace",
")",
"return",
"namespace",
"[",
"\"e1\"",
"]"
]
| Convert (recursively) a Node object into an ElementTree object. | [
"Convert",
"(",
"recursively",
")",
"a",
"Node",
"object",
"into",
"an",
"ElementTree",
"object",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L640-L663 |
gem/oq-engine | openquake/baselib/node.py | read_nodes | def read_nodes(fname, filter_elem, nodefactory=Node, remove_comments=True):
"""
Convert an XML file into a lazy iterator over Node objects
satifying the given specification, i.e. a function element -> boolean.
:param fname: file name of file object
:param filter_elem: element specification
In case of errors, add the file name to the error message.
"""
try:
for _, el in iterparse(fname, remove_comments=remove_comments):
if filter_elem(el):
yield node_from_elem(el, nodefactory)
el.clear() # save memory
except Exception:
etype, exc, tb = sys.exc_info()
msg = str(exc)
if not str(fname) in msg:
msg = '%s in %s' % (msg, fname)
raise_(etype, msg, tb) | python | def read_nodes(fname, filter_elem, nodefactory=Node, remove_comments=True):
try:
for _, el in iterparse(fname, remove_comments=remove_comments):
if filter_elem(el):
yield node_from_elem(el, nodefactory)
el.clear()
except Exception:
etype, exc, tb = sys.exc_info()
msg = str(exc)
if not str(fname) in msg:
msg = '%s in %s' % (msg, fname)
raise_(etype, msg, tb) | [
"def",
"read_nodes",
"(",
"fname",
",",
"filter_elem",
",",
"nodefactory",
"=",
"Node",
",",
"remove_comments",
"=",
"True",
")",
":",
"try",
":",
"for",
"_",
",",
"el",
"in",
"iterparse",
"(",
"fname",
",",
"remove_comments",
"=",
"remove_comments",
")",
":",
"if",
"filter_elem",
"(",
"el",
")",
":",
"yield",
"node_from_elem",
"(",
"el",
",",
"nodefactory",
")",
"el",
".",
"clear",
"(",
")",
"# save memory",
"except",
"Exception",
":",
"etype",
",",
"exc",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"msg",
"=",
"str",
"(",
"exc",
")",
"if",
"not",
"str",
"(",
"fname",
")",
"in",
"msg",
":",
"msg",
"=",
"'%s in %s'",
"%",
"(",
"msg",
",",
"fname",
")",
"raise_",
"(",
"etype",
",",
"msg",
",",
"tb",
")"
]
| Convert an XML file into a lazy iterator over Node objects
satifying the given specification, i.e. a function element -> boolean.
:param fname: file name of file object
:param filter_elem: element specification
In case of errors, add the file name to the error message. | [
"Convert",
"an",
"XML",
"file",
"into",
"a",
"lazy",
"iterator",
"over",
"Node",
"objects",
"satifying",
"the",
"given",
"specification",
"i",
".",
"e",
".",
"a",
"function",
"element",
"-",
">",
"boolean",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L666-L686 |
gem/oq-engine | openquake/baselib/node.py | node_from_xml | def node_from_xml(xmlfile, nodefactory=Node):
"""
Convert a .xml file into a Node object.
:param xmlfile: a file name or file object open for reading
"""
root = parse(xmlfile).getroot()
return node_from_elem(root, nodefactory) | python | def node_from_xml(xmlfile, nodefactory=Node):
root = parse(xmlfile).getroot()
return node_from_elem(root, nodefactory) | [
"def",
"node_from_xml",
"(",
"xmlfile",
",",
"nodefactory",
"=",
"Node",
")",
":",
"root",
"=",
"parse",
"(",
"xmlfile",
")",
".",
"getroot",
"(",
")",
"return",
"node_from_elem",
"(",
"root",
",",
"nodefactory",
")"
]
| Convert a .xml file into a Node object.
:param xmlfile: a file name or file object open for reading | [
"Convert",
"a",
".",
"xml",
"file",
"into",
"a",
"Node",
"object",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L689-L696 |
gem/oq-engine | openquake/baselib/node.py | node_to_xml | def node_to_xml(node, output=sys.stdout, nsmap=None):
"""
Convert a Node object into a pretty .xml file without keeping
everything in memory. If you just want the string representation
use tostring(node).
:param node: a Node-compatible object (ElementTree nodes are fine)
:param nsmap: if given, shorten the tags with aliases
"""
if nsmap:
for ns, prefix in nsmap.items():
if prefix:
node['xmlns:' + prefix[:-1]] = ns
else:
node['xmlns'] = ns
with StreamingXMLWriter(output, nsmap=nsmap) as w:
w.serialize(node) | python | def node_to_xml(node, output=sys.stdout, nsmap=None):
if nsmap:
for ns, prefix in nsmap.items():
if prefix:
node['xmlns:' + prefix[:-1]] = ns
else:
node['xmlns'] = ns
with StreamingXMLWriter(output, nsmap=nsmap) as w:
w.serialize(node) | [
"def",
"node_to_xml",
"(",
"node",
",",
"output",
"=",
"sys",
".",
"stdout",
",",
"nsmap",
"=",
"None",
")",
":",
"if",
"nsmap",
":",
"for",
"ns",
",",
"prefix",
"in",
"nsmap",
".",
"items",
"(",
")",
":",
"if",
"prefix",
":",
"node",
"[",
"'xmlns:'",
"+",
"prefix",
"[",
":",
"-",
"1",
"]",
"]",
"=",
"ns",
"else",
":",
"node",
"[",
"'xmlns'",
"]",
"=",
"ns",
"with",
"StreamingXMLWriter",
"(",
"output",
",",
"nsmap",
"=",
"nsmap",
")",
"as",
"w",
":",
"w",
".",
"serialize",
"(",
"node",
")"
]
| Convert a Node object into a pretty .xml file without keeping
everything in memory. If you just want the string representation
use tostring(node).
:param node: a Node-compatible object (ElementTree nodes are fine)
:param nsmap: if given, shorten the tags with aliases | [
"Convert",
"a",
"Node",
"object",
"into",
"a",
"pretty",
".",
"xml",
"file",
"without",
"keeping",
"everything",
"in",
"memory",
".",
"If",
"you",
"just",
"want",
"the",
"string",
"representation",
"use",
"tostring",
"(",
"node",
")",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L699-L716 |
gem/oq-engine | openquake/baselib/node.py | node_from_ini | def node_from_ini(ini_file, nodefactory=Node, root_name='ini'):
"""
Convert a .ini file into a Node object.
:param ini_file: a filename or a file like object in read mode
"""
fileobj = open(ini_file) if isinstance(ini_file, str) else ini_file
cfp = configparser.RawConfigParser()
cfp.read_file(fileobj)
root = nodefactory(root_name)
sections = cfp.sections()
for section in sections:
params = dict(cfp.items(section))
root.append(Node(section, params))
return root | python | def node_from_ini(ini_file, nodefactory=Node, root_name='ini'):
fileobj = open(ini_file) if isinstance(ini_file, str) else ini_file
cfp = configparser.RawConfigParser()
cfp.read_file(fileobj)
root = nodefactory(root_name)
sections = cfp.sections()
for section in sections:
params = dict(cfp.items(section))
root.append(Node(section, params))
return root | [
"def",
"node_from_ini",
"(",
"ini_file",
",",
"nodefactory",
"=",
"Node",
",",
"root_name",
"=",
"'ini'",
")",
":",
"fileobj",
"=",
"open",
"(",
"ini_file",
")",
"if",
"isinstance",
"(",
"ini_file",
",",
"str",
")",
"else",
"ini_file",
"cfp",
"=",
"configparser",
".",
"RawConfigParser",
"(",
")",
"cfp",
".",
"read_file",
"(",
"fileobj",
")",
"root",
"=",
"nodefactory",
"(",
"root_name",
")",
"sections",
"=",
"cfp",
".",
"sections",
"(",
")",
"for",
"section",
"in",
"sections",
":",
"params",
"=",
"dict",
"(",
"cfp",
".",
"items",
"(",
"section",
")",
")",
"root",
".",
"append",
"(",
"Node",
"(",
"section",
",",
"params",
")",
")",
"return",
"root"
]
| Convert a .ini file into a Node object.
:param ini_file: a filename or a file like object in read mode | [
"Convert",
"a",
".",
"ini",
"file",
"into",
"a",
"Node",
"object",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L719-L733 |
gem/oq-engine | openquake/baselib/node.py | node_to_ini | def node_to_ini(node, output=sys.stdout):
"""
Convert a Node object with the right structure into a .ini file.
:params node: a Node object
:params output: a file-like object opened in write mode
"""
for subnode in node:
output.write(u'\n[%s]\n' % subnode.tag)
for name, value in sorted(subnode.attrib.items()):
output.write(u'%s=%s\n' % (name, value))
output.flush() | python | def node_to_ini(node, output=sys.stdout):
for subnode in node:
output.write(u'\n[%s]\n' % subnode.tag)
for name, value in sorted(subnode.attrib.items()):
output.write(u'%s=%s\n' % (name, value))
output.flush() | [
"def",
"node_to_ini",
"(",
"node",
",",
"output",
"=",
"sys",
".",
"stdout",
")",
":",
"for",
"subnode",
"in",
"node",
":",
"output",
".",
"write",
"(",
"u'\\n[%s]\\n'",
"%",
"subnode",
".",
"tag",
")",
"for",
"name",
",",
"value",
"in",
"sorted",
"(",
"subnode",
".",
"attrib",
".",
"items",
"(",
")",
")",
":",
"output",
".",
"write",
"(",
"u'%s=%s\\n'",
"%",
"(",
"name",
",",
"value",
")",
")",
"output",
".",
"flush",
"(",
")"
]
| Convert a Node object with the right structure into a .ini file.
:params node: a Node object
:params output: a file-like object opened in write mode | [
"Convert",
"a",
"Node",
"object",
"with",
"the",
"right",
"structure",
"into",
"a",
".",
"ini",
"file",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L736-L747 |
gem/oq-engine | openquake/baselib/node.py | node_copy | def node_copy(node, nodefactory=Node):
"""Make a deep copy of the node"""
return nodefactory(node.tag, node.attrib.copy(), node.text,
[node_copy(n, nodefactory) for n in node]) | python | def node_copy(node, nodefactory=Node):
return nodefactory(node.tag, node.attrib.copy(), node.text,
[node_copy(n, nodefactory) for n in node]) | [
"def",
"node_copy",
"(",
"node",
",",
"nodefactory",
"=",
"Node",
")",
":",
"return",
"nodefactory",
"(",
"node",
".",
"tag",
",",
"node",
".",
"attrib",
".",
"copy",
"(",
")",
",",
"node",
".",
"text",
",",
"[",
"node_copy",
"(",
"n",
",",
"nodefactory",
")",
"for",
"n",
"in",
"node",
"]",
")"
]
| Make a deep copy of the node | [
"Make",
"a",
"deep",
"copy",
"of",
"the",
"node"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L750-L753 |
gem/oq-engine | openquake/baselib/node.py | context | def context(fname, node):
"""
Context manager managing exceptions and adding line number of the
current node and name of the current file to the error message.
:param fname: the current file being processed
:param node: the current node being processed
"""
try:
yield node
except Exception:
etype, exc, tb = sys.exc_info()
msg = 'node %s: %s, line %s of %s' % (
striptag(node.tag), exc, getattr(node, 'lineno', '?'), fname)
raise_(etype, msg, tb) | python | def context(fname, node):
try:
yield node
except Exception:
etype, exc, tb = sys.exc_info()
msg = 'node %s: %s, line %s of %s' % (
striptag(node.tag), exc, getattr(node, 'lineno', '?'), fname)
raise_(etype, msg, tb) | [
"def",
"context",
"(",
"fname",
",",
"node",
")",
":",
"try",
":",
"yield",
"node",
"except",
"Exception",
":",
"etype",
",",
"exc",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"msg",
"=",
"'node %s: %s, line %s of %s'",
"%",
"(",
"striptag",
"(",
"node",
".",
"tag",
")",
",",
"exc",
",",
"getattr",
"(",
"node",
",",
"'lineno'",
",",
"'?'",
")",
",",
"fname",
")",
"raise_",
"(",
"etype",
",",
"msg",
",",
"tb",
")"
]
| Context manager managing exceptions and adding line number of the
current node and name of the current file to the error message.
:param fname: the current file being processed
:param node: the current node being processed | [
"Context",
"manager",
"managing",
"exceptions",
"and",
"adding",
"line",
"number",
"of",
"the",
"current",
"node",
"and",
"name",
"of",
"the",
"current",
"file",
"to",
"the",
"error",
"message",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L757-L771 |
gem/oq-engine | openquake/baselib/node.py | StreamingXMLWriter.shorten | def shorten(self, tag):
"""
Get the short representation of a fully qualified tag
:param str tag: a (fully qualified or not) XML tag
"""
if tag.startswith('{'):
ns, _tag = tag.rsplit('}')
tag = self.nsmap.get(ns[1:], '') + _tag
return tag | python | def shorten(self, tag):
if tag.startswith('{'):
ns, _tag = tag.rsplit('}')
tag = self.nsmap.get(ns[1:], '') + _tag
return tag | [
"def",
"shorten",
"(",
"self",
",",
"tag",
")",
":",
"if",
"tag",
".",
"startswith",
"(",
"'{'",
")",
":",
"ns",
",",
"_tag",
"=",
"tag",
".",
"rsplit",
"(",
"'}'",
")",
"tag",
"=",
"self",
".",
"nsmap",
".",
"get",
"(",
"ns",
"[",
"1",
":",
"]",
",",
"''",
")",
"+",
"_tag",
"return",
"tag"
]
| Get the short representation of a fully qualified tag
:param str tag: a (fully qualified or not) XML tag | [
"Get",
"the",
"short",
"representation",
"of",
"a",
"fully",
"qualified",
"tag"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L254-L263 |
gem/oq-engine | openquake/baselib/node.py | StreamingXMLWriter._write | def _write(self, text):
"""Write text by respecting the current indentlevel"""
spaces = ' ' * (self.indent * self.indentlevel)
t = spaces + text.strip() + '\n'
if hasattr(t, 'encode'):
t = t.encode(self.encoding, 'xmlcharrefreplace')
self.stream.write(t) | python | def _write(self, text):
spaces = ' ' * (self.indent * self.indentlevel)
t = spaces + text.strip() + '\n'
if hasattr(t, 'encode'):
t = t.encode(self.encoding, 'xmlcharrefreplace')
self.stream.write(t) | [
"def",
"_write",
"(",
"self",
",",
"text",
")",
":",
"spaces",
"=",
"' '",
"*",
"(",
"self",
".",
"indent",
"*",
"self",
".",
"indentlevel",
")",
"t",
"=",
"spaces",
"+",
"text",
".",
"strip",
"(",
")",
"+",
"'\\n'",
"if",
"hasattr",
"(",
"t",
",",
"'encode'",
")",
":",
"t",
"=",
"t",
".",
"encode",
"(",
"self",
".",
"encoding",
",",
"'xmlcharrefreplace'",
")",
"self",
".",
"stream",
".",
"write",
"(",
"t",
")"
]
| Write text by respecting the current indentlevel | [
"Write",
"text",
"by",
"respecting",
"the",
"current",
"indentlevel"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L265-L271 |
gem/oq-engine | openquake/baselib/node.py | StreamingXMLWriter.emptyElement | def emptyElement(self, name, attrs):
"""Add an empty element (may have attributes)"""
attr = ' '.join('%s=%s' % (n, quoteattr(scientificformat(v)))
for n, v in sorted(attrs.items()))
self._write('<%s %s/>' % (name, attr)) | python | def emptyElement(self, name, attrs):
attr = ' '.join('%s=%s' % (n, quoteattr(scientificformat(v)))
for n, v in sorted(attrs.items()))
self._write('<%s %s/>' % (name, attr)) | [
"def",
"emptyElement",
"(",
"self",
",",
"name",
",",
"attrs",
")",
":",
"attr",
"=",
"' '",
".",
"join",
"(",
"'%s=%s'",
"%",
"(",
"n",
",",
"quoteattr",
"(",
"scientificformat",
"(",
"v",
")",
")",
")",
"for",
"n",
",",
"v",
"in",
"sorted",
"(",
"attrs",
".",
"items",
"(",
")",
")",
")",
"self",
".",
"_write",
"(",
"'<%s %s/>'",
"%",
"(",
"name",
",",
"attr",
")",
")"
]
| Add an empty element (may have attributes) | [
"Add",
"an",
"empty",
"element",
"(",
"may",
"have",
"attributes",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L273-L277 |
gem/oq-engine | openquake/baselib/node.py | StreamingXMLWriter.start_tag | def start_tag(self, name, attrs=None):
"""Open an XML tag"""
if not attrs:
self._write('<%s>' % name)
else:
self._write('<' + name)
for (name, value) in sorted(attrs.items()):
self._write(
' %s=%s' % (name, quoteattr(scientificformat(value))))
self._write('>')
self.indentlevel += 1 | python | def start_tag(self, name, attrs=None):
if not attrs:
self._write('<%s>' % name)
else:
self._write('<' + name)
for (name, value) in sorted(attrs.items()):
self._write(
' %s=%s' % (name, quoteattr(scientificformat(value))))
self._write('>')
self.indentlevel += 1 | [
"def",
"start_tag",
"(",
"self",
",",
"name",
",",
"attrs",
"=",
"None",
")",
":",
"if",
"not",
"attrs",
":",
"self",
".",
"_write",
"(",
"'<%s>'",
"%",
"name",
")",
"else",
":",
"self",
".",
"_write",
"(",
"'<'",
"+",
"name",
")",
"for",
"(",
"name",
",",
"value",
")",
"in",
"sorted",
"(",
"attrs",
".",
"items",
"(",
")",
")",
":",
"self",
".",
"_write",
"(",
"' %s=%s'",
"%",
"(",
"name",
",",
"quoteattr",
"(",
"scientificformat",
"(",
"value",
")",
")",
")",
")",
"self",
".",
"_write",
"(",
"'>'",
")",
"self",
".",
"indentlevel",
"+=",
"1"
]
| Open an XML tag | [
"Open",
"an",
"XML",
"tag"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L279-L289 |
gem/oq-engine | openquake/baselib/node.py | StreamingXMLWriter.serialize | def serialize(self, node):
"""Serialize a node object (typically an ElementTree object)"""
if isinstance(node.tag, types.FunctionType):
# this looks like a bug of ElementTree: comments are stored as
# functions!?? see https://hg.python.org/sandbox/python2.7/file/tip/Lib/xml/etree/ElementTree.py#l458
return
if self.nsmap is not None:
tag = self.shorten(node.tag)
else:
tag = node.tag
with warnings.catch_warnings(): # unwanted ElementTree warning
warnings.simplefilter('ignore')
leafnode = not node
# NB: we cannot use len(node) to identify leafs since nodes containing
# an iterator have no length. They are always True, even if empty :-(
if leafnode and node.text is None:
self.emptyElement(tag, node.attrib)
return
self.start_tag(tag, node.attrib)
if node.text is not None:
txt = escape(scientificformat(node.text).strip())
if txt:
self._write(txt)
for subnode in node:
self.serialize(subnode)
self.end_tag(tag) | python | def serialize(self, node):
if isinstance(node.tag, types.FunctionType):
return
if self.nsmap is not None:
tag = self.shorten(node.tag)
else:
tag = node.tag
with warnings.catch_warnings():
warnings.simplefilter('ignore')
leafnode = not node
if leafnode and node.text is None:
self.emptyElement(tag, node.attrib)
return
self.start_tag(tag, node.attrib)
if node.text is not None:
txt = escape(scientificformat(node.text).strip())
if txt:
self._write(txt)
for subnode in node:
self.serialize(subnode)
self.end_tag(tag) | [
"def",
"serialize",
"(",
"self",
",",
"node",
")",
":",
"if",
"isinstance",
"(",
"node",
".",
"tag",
",",
"types",
".",
"FunctionType",
")",
":",
"# this looks like a bug of ElementTree: comments are stored as",
"# functions!?? see https://hg.python.org/sandbox/python2.7/file/tip/Lib/xml/etree/ElementTree.py#l458",
"return",
"if",
"self",
".",
"nsmap",
"is",
"not",
"None",
":",
"tag",
"=",
"self",
".",
"shorten",
"(",
"node",
".",
"tag",
")",
"else",
":",
"tag",
"=",
"node",
".",
"tag",
"with",
"warnings",
".",
"catch_warnings",
"(",
")",
":",
"# unwanted ElementTree warning",
"warnings",
".",
"simplefilter",
"(",
"'ignore'",
")",
"leafnode",
"=",
"not",
"node",
"# NB: we cannot use len(node) to identify leafs since nodes containing",
"# an iterator have no length. They are always True, even if empty :-(",
"if",
"leafnode",
"and",
"node",
".",
"text",
"is",
"None",
":",
"self",
".",
"emptyElement",
"(",
"tag",
",",
"node",
".",
"attrib",
")",
"return",
"self",
".",
"start_tag",
"(",
"tag",
",",
"node",
".",
"attrib",
")",
"if",
"node",
".",
"text",
"is",
"not",
"None",
":",
"txt",
"=",
"escape",
"(",
"scientificformat",
"(",
"node",
".",
"text",
")",
".",
"strip",
"(",
")",
")",
"if",
"txt",
":",
"self",
".",
"_write",
"(",
"txt",
")",
"for",
"subnode",
"in",
"node",
":",
"self",
".",
"serialize",
"(",
"subnode",
")",
"self",
".",
"end_tag",
"(",
"tag",
")"
]
| Serialize a node object (typically an ElementTree object) | [
"Serialize",
"a",
"node",
"object",
"(",
"typically",
"an",
"ElementTree",
"object",
")"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L296-L321 |
gem/oq-engine | openquake/baselib/node.py | Node.getnodes | def getnodes(self, name):
"Return the direct subnodes with name 'name'"
for node in self.nodes:
if striptag(node.tag) == name:
yield node | python | def getnodes(self, name):
"Return the direct subnodes with name 'name'"
for node in self.nodes:
if striptag(node.tag) == name:
yield node | [
"def",
"getnodes",
"(",
"self",
",",
"name",
")",
":",
"for",
"node",
"in",
"self",
".",
"nodes",
":",
"if",
"striptag",
"(",
"node",
".",
"tag",
")",
"==",
"name",
":",
"yield",
"node"
]
| Return the direct subnodes with name 'name | [
"Return",
"the",
"direct",
"subnodes",
"with",
"name",
"name"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L458-L462 |
gem/oq-engine | openquake/baselib/node.py | Node.append | def append(self, node):
"Append a new subnode"
if not isinstance(node, self.__class__):
raise TypeError('Expected Node instance, got %r' % node)
self.nodes.append(node) | python | def append(self, node):
"Append a new subnode"
if not isinstance(node, self.__class__):
raise TypeError('Expected Node instance, got %r' % node)
self.nodes.append(node) | [
"def",
"append",
"(",
"self",
",",
"node",
")",
":",
"if",
"not",
"isinstance",
"(",
"node",
",",
"self",
".",
"__class__",
")",
":",
"raise",
"TypeError",
"(",
"'Expected Node instance, got %r'",
"%",
"node",
")",
"self",
".",
"nodes",
".",
"append",
"(",
"node",
")"
]
| Append a new subnode | [
"Append",
"a",
"new",
"subnode"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L464-L468 |
gem/oq-engine | openquake/baselib/node.py | Node.to_str | def to_str(self, expandattrs=True, expandvals=True):
"""
Convert the node into a string, intended for testing/debugging purposes
:param expandattrs:
print the values of the attributes if True, else print only the names
:param expandvals:
print the values if True, else print only the tag names
"""
out = io.BytesIO()
node_display(self, expandattrs, expandvals, out)
return decode(out.getvalue()) | python | def to_str(self, expandattrs=True, expandvals=True):
out = io.BytesIO()
node_display(self, expandattrs, expandvals, out)
return decode(out.getvalue()) | [
"def",
"to_str",
"(",
"self",
",",
"expandattrs",
"=",
"True",
",",
"expandvals",
"=",
"True",
")",
":",
"out",
"=",
"io",
".",
"BytesIO",
"(",
")",
"node_display",
"(",
"self",
",",
"expandattrs",
",",
"expandvals",
",",
"out",
")",
"return",
"decode",
"(",
"out",
".",
"getvalue",
"(",
")",
")"
]
| Convert the node into a string, intended for testing/debugging purposes
:param expandattrs:
print the values of the attributes if True, else print only the names
:param expandvals:
print the values if True, else print only the tag names | [
"Convert",
"the",
"node",
"into",
"a",
"string",
"intended",
"for",
"testing",
"/",
"debugging",
"purposes"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L470-L481 |
gem/oq-engine | openquake/baselib/node.py | ValidatingXmlParser.parse_bytes | def parse_bytes(self, bytestr, isfinal=True):
"""
Parse a byte string. If the string is very large, split it in chuncks
and parse each chunk with isfinal=False, then parse an empty chunk
with isfinal=True.
"""
with self._context():
self.filename = None
self.p.Parse(bytestr, isfinal)
return self._root | python | def parse_bytes(self, bytestr, isfinal=True):
with self._context():
self.filename = None
self.p.Parse(bytestr, isfinal)
return self._root | [
"def",
"parse_bytes",
"(",
"self",
",",
"bytestr",
",",
"isfinal",
"=",
"True",
")",
":",
"with",
"self",
".",
"_context",
"(",
")",
":",
"self",
".",
"filename",
"=",
"None",
"self",
".",
"p",
".",
"Parse",
"(",
"bytestr",
",",
"isfinal",
")",
"return",
"self",
".",
"_root"
]
| Parse a byte string. If the string is very large, split it in chuncks
and parse each chunk with isfinal=False, then parse an empty chunk
with isfinal=True. | [
"Parse",
"a",
"byte",
"string",
".",
"If",
"the",
"string",
"is",
"very",
"large",
"split",
"it",
"in",
"chuncks",
"and",
"parse",
"each",
"chunk",
"with",
"isfinal",
"=",
"False",
"then",
"parse",
"an",
"empty",
"chunk",
"with",
"isfinal",
"=",
"True",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L815-L824 |
gem/oq-engine | openquake/baselib/node.py | ValidatingXmlParser.parse_file | def parse_file(self, file_or_fname):
"""
Parse a file or a filename
"""
with self._context():
if hasattr(file_or_fname, 'read'):
self.filename = getattr(
file_or_fname, 'name', file_or_fname.__class__.__name__)
self.p.ParseFile(file_or_fname)
else:
self.filename = file_or_fname
with open(file_or_fname, 'rb') as f:
self.p.ParseFile(f)
return self._root | python | def parse_file(self, file_or_fname):
with self._context():
if hasattr(file_or_fname, 'read'):
self.filename = getattr(
file_or_fname, 'name', file_or_fname.__class__.__name__)
self.p.ParseFile(file_or_fname)
else:
self.filename = file_or_fname
with open(file_or_fname, 'rb') as f:
self.p.ParseFile(f)
return self._root | [
"def",
"parse_file",
"(",
"self",
",",
"file_or_fname",
")",
":",
"with",
"self",
".",
"_context",
"(",
")",
":",
"if",
"hasattr",
"(",
"file_or_fname",
",",
"'read'",
")",
":",
"self",
".",
"filename",
"=",
"getattr",
"(",
"file_or_fname",
",",
"'name'",
",",
"file_or_fname",
".",
"__class__",
".",
"__name__",
")",
"self",
".",
"p",
".",
"ParseFile",
"(",
"file_or_fname",
")",
"else",
":",
"self",
".",
"filename",
"=",
"file_or_fname",
"with",
"open",
"(",
"file_or_fname",
",",
"'rb'",
")",
"as",
"f",
":",
"self",
".",
"p",
".",
"ParseFile",
"(",
"f",
")",
"return",
"self",
".",
"_root"
]
| Parse a file or a filename | [
"Parse",
"a",
"file",
"or",
"a",
"filename"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L826-L839 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/completeness/cumulative_rate_analysis.py | SimpleCumulativeRate.completeness | def completeness(self, catalogue, config, saveplot=False, filetype='png',
timeout=120):
'''
:param catalogue:
Earthquake catalogue as instance of
:class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param dict config:
Configuration parameters of the algorithm, containing the
following information:
'magnitude_bin' Size of magnitude bin (non-negative float)
'time_bin' Size (in dec. years) of the time window (non-negative
float)
'increment_lock' Boolean to indicate whether to ensure
completeness magnitudes always decrease with more
recent bins
:returns:
2-column table indicating year of completeness and corresponding
magnitude numpy.ndarray
'''
if saveplot and not isinstance(saveplot, str):
raise ValueError('To save the figures enter a filename: ')
# Get magntitude bins
magnitude_bins = self._get_magnitudes_from_spacing(
catalogue.data['magnitude'],
config['magnitude_bin'])
dec_time = catalogue.get_decimal_time()
completeness_table = np.zeros([len(magnitude_bins) - 1, 2],
dtype=float)
min_year = float(np.min(catalogue.data['year']))
max_year = float(np.max(catalogue.data['year'])) + 1.0
has_completeness = np.zeros(len(magnitude_bins) - 1, dtype=bool)
for iloc in range(0, len(magnitude_bins) - 1):
lower_mag = magnitude_bins[iloc]
upper_mag = magnitude_bins[iloc + 1]
idx = np.logical_and(catalogue.data['magnitude'] >= lower_mag,
catalogue.data['magnitude'] < upper_mag)
cumvals = np.cumsum(np.ones(np.sum(idx)))
plt.plot(dec_time[idx], cumvals, '.')
plt.xlim(min_year, max_year + 5)
title_string = 'Magnitude %5.2f to %5.2f' % (lower_mag, upper_mag)
plt.title(title_string)
pts = pylab.ginput(1, timeout=timeout)[0]
if pts[0] <= max_year:
# Magnitude bin has no completeness!
has_completeness[iloc] = True
completeness_table[iloc, 0] = np.floor(pts[0])
completeness_table[iloc, 1] = magnitude_bins[iloc]
print(completeness_table[iloc, :], has_completeness[iloc])
if config['increment_lock'] and (iloc > 0) and \
(completeness_table[iloc, 0] > completeness_table[iloc - 1, 0]):
completeness_table[iloc, 0] = \
completeness_table[iloc - 1, 0]
# Add marker line to indicate completeness point
marker_line = np.array([
[0., completeness_table[iloc, 0]],
[cumvals[-1], completeness_table[iloc, 0]]])
plt.plot(marker_line[:, 0], marker_line[:, 1], 'r-')
if saveplot:
filename = saveplot + '_' + ('%5.2f' % lower_mag) + (
'%5.2f' % upper_mag) + '.' + filetype
plt.savefig(filename, format=filetype)
plt.close()
return completeness_table[has_completeness, :] | python | def completeness(self, catalogue, config, saveplot=False, filetype='png',
timeout=120):
if saveplot and not isinstance(saveplot, str):
raise ValueError('To save the figures enter a filename: ')
magnitude_bins = self._get_magnitudes_from_spacing(
catalogue.data['magnitude'],
config['magnitude_bin'])
dec_time = catalogue.get_decimal_time()
completeness_table = np.zeros([len(magnitude_bins) - 1, 2],
dtype=float)
min_year = float(np.min(catalogue.data['year']))
max_year = float(np.max(catalogue.data['year'])) + 1.0
has_completeness = np.zeros(len(magnitude_bins) - 1, dtype=bool)
for iloc in range(0, len(magnitude_bins) - 1):
lower_mag = magnitude_bins[iloc]
upper_mag = magnitude_bins[iloc + 1]
idx = np.logical_and(catalogue.data['magnitude'] >= lower_mag,
catalogue.data['magnitude'] < upper_mag)
cumvals = np.cumsum(np.ones(np.sum(idx)))
plt.plot(dec_time[idx], cumvals, '.')
plt.xlim(min_year, max_year + 5)
title_string = 'Magnitude %5.2f to %5.2f' % (lower_mag, upper_mag)
plt.title(title_string)
pts = pylab.ginput(1, timeout=timeout)[0]
if pts[0] <= max_year:
has_completeness[iloc] = True
completeness_table[iloc, 0] = np.floor(pts[0])
completeness_table[iloc, 1] = magnitude_bins[iloc]
print(completeness_table[iloc, :], has_completeness[iloc])
if config['increment_lock'] and (iloc > 0) and \
(completeness_table[iloc, 0] > completeness_table[iloc - 1, 0]):
completeness_table[iloc, 0] = \
completeness_table[iloc - 1, 0]
marker_line = np.array([
[0., completeness_table[iloc, 0]],
[cumvals[-1], completeness_table[iloc, 0]]])
plt.plot(marker_line[:, 0], marker_line[:, 1], 'r-')
if saveplot:
filename = saveplot + '_' + ('%5.2f' % lower_mag) + (
'%5.2f' % upper_mag) + '.' + filetype
plt.savefig(filename, format=filetype)
plt.close()
return completeness_table[has_completeness, :] | [
"def",
"completeness",
"(",
"self",
",",
"catalogue",
",",
"config",
",",
"saveplot",
"=",
"False",
",",
"filetype",
"=",
"'png'",
",",
"timeout",
"=",
"120",
")",
":",
"if",
"saveplot",
"and",
"not",
"isinstance",
"(",
"saveplot",
",",
"str",
")",
":",
"raise",
"ValueError",
"(",
"'To save the figures enter a filename: '",
")",
"# Get magntitude bins",
"magnitude_bins",
"=",
"self",
".",
"_get_magnitudes_from_spacing",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
",",
"config",
"[",
"'magnitude_bin'",
"]",
")",
"dec_time",
"=",
"catalogue",
".",
"get_decimal_time",
"(",
")",
"completeness_table",
"=",
"np",
".",
"zeros",
"(",
"[",
"len",
"(",
"magnitude_bins",
")",
"-",
"1",
",",
"2",
"]",
",",
"dtype",
"=",
"float",
")",
"min_year",
"=",
"float",
"(",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
")",
"max_year",
"=",
"float",
"(",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
")",
"+",
"1.0",
"has_completeness",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"magnitude_bins",
")",
"-",
"1",
",",
"dtype",
"=",
"bool",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"magnitude_bins",
")",
"-",
"1",
")",
":",
"lower_mag",
"=",
"magnitude_bins",
"[",
"iloc",
"]",
"upper_mag",
"=",
"magnitude_bins",
"[",
"iloc",
"+",
"1",
"]",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
">=",
"lower_mag",
",",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
"<",
"upper_mag",
")",
"cumvals",
"=",
"np",
".",
"cumsum",
"(",
"np",
".",
"ones",
"(",
"np",
".",
"sum",
"(",
"idx",
")",
")",
")",
"plt",
".",
"plot",
"(",
"dec_time",
"[",
"idx",
"]",
",",
"cumvals",
",",
"'.'",
")",
"plt",
".",
"xlim",
"(",
"min_year",
",",
"max_year",
"+",
"5",
")",
"title_string",
"=",
"'Magnitude %5.2f to %5.2f'",
"%",
"(",
"lower_mag",
",",
"upper_mag",
")",
"plt",
".",
"title",
"(",
"title_string",
")",
"pts",
"=",
"pylab",
".",
"ginput",
"(",
"1",
",",
"timeout",
"=",
"timeout",
")",
"[",
"0",
"]",
"if",
"pts",
"[",
"0",
"]",
"<=",
"max_year",
":",
"# Magnitude bin has no completeness!",
"has_completeness",
"[",
"iloc",
"]",
"=",
"True",
"completeness_table",
"[",
"iloc",
",",
"0",
"]",
"=",
"np",
".",
"floor",
"(",
"pts",
"[",
"0",
"]",
")",
"completeness_table",
"[",
"iloc",
",",
"1",
"]",
"=",
"magnitude_bins",
"[",
"iloc",
"]",
"print",
"(",
"completeness_table",
"[",
"iloc",
",",
":",
"]",
",",
"has_completeness",
"[",
"iloc",
"]",
")",
"if",
"config",
"[",
"'increment_lock'",
"]",
"and",
"(",
"iloc",
">",
"0",
")",
"and",
"(",
"completeness_table",
"[",
"iloc",
",",
"0",
"]",
">",
"completeness_table",
"[",
"iloc",
"-",
"1",
",",
"0",
"]",
")",
":",
"completeness_table",
"[",
"iloc",
",",
"0",
"]",
"=",
"completeness_table",
"[",
"iloc",
"-",
"1",
",",
"0",
"]",
"# Add marker line to indicate completeness point",
"marker_line",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.",
",",
"completeness_table",
"[",
"iloc",
",",
"0",
"]",
"]",
",",
"[",
"cumvals",
"[",
"-",
"1",
"]",
",",
"completeness_table",
"[",
"iloc",
",",
"0",
"]",
"]",
"]",
")",
"plt",
".",
"plot",
"(",
"marker_line",
"[",
":",
",",
"0",
"]",
",",
"marker_line",
"[",
":",
",",
"1",
"]",
",",
"'r-'",
")",
"if",
"saveplot",
":",
"filename",
"=",
"saveplot",
"+",
"'_'",
"+",
"(",
"'%5.2f'",
"%",
"lower_mag",
")",
"+",
"(",
"'%5.2f'",
"%",
"upper_mag",
")",
"+",
"'.'",
"+",
"filetype",
"plt",
".",
"savefig",
"(",
"filename",
",",
"format",
"=",
"filetype",
")",
"plt",
".",
"close",
"(",
")",
"return",
"completeness_table",
"[",
"has_completeness",
",",
":",
"]"
]
| :param catalogue:
Earthquake catalogue as instance of
:class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param dict config:
Configuration parameters of the algorithm, containing the
following information:
'magnitude_bin' Size of magnitude bin (non-negative float)
'time_bin' Size (in dec. years) of the time window (non-negative
float)
'increment_lock' Boolean to indicate whether to ensure
completeness magnitudes always decrease with more
recent bins
:returns:
2-column table indicating year of completeness and corresponding
magnitude numpy.ndarray | [
":",
"param",
"catalogue",
":",
"Earthquake",
"catalogue",
"as",
"instance",
"of",
":",
"class",
":",
"openquake",
".",
"hmtk",
".",
"seismicity",
".",
"catalogue",
".",
"Catalogue",
":",
"param",
"dict",
"config",
":",
"Configuration",
"parameters",
"of",
"the",
"algorithm",
"containing",
"the",
"following",
"information",
":",
"magnitude_bin",
"Size",
"of",
"magnitude",
"bin",
"(",
"non",
"-",
"negative",
"float",
")",
"time_bin",
"Size",
"(",
"in",
"dec",
".",
"years",
")",
"of",
"the",
"time",
"window",
"(",
"non",
"-",
"negative",
"float",
")",
"increment_lock",
"Boolean",
"to",
"indicate",
"whether",
"to",
"ensure",
"completeness",
"magnitudes",
"always",
"decrease",
"with",
"more",
"recent",
"bins",
":",
"returns",
":",
"2",
"-",
"column",
"table",
"indicating",
"year",
"of",
"completeness",
"and",
"corresponding",
"magnitude",
"numpy",
".",
"ndarray"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/completeness/cumulative_rate_analysis.py#L67-L130 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/completeness/cumulative_rate_analysis.py | SimpleCumulativeRate._get_magnitudes_from_spacing | def _get_magnitudes_from_spacing(self, magnitudes, delta_m):
'''If a single magnitude spacing is input then create the bins
:param numpy.ndarray magnitudes:
Vector of earthquake magnitudes
:param float delta_m:
Magnitude bin width
:returns: Vector of magnitude bin edges (numpy.ndarray)
'''
min_mag = np.min(magnitudes)
max_mag = np.max(magnitudes)
if (max_mag - min_mag) < delta_m:
raise ValueError('Bin width greater than magnitude range!')
mag_bins = np.arange(np.floor(min_mag), np.ceil(max_mag), delta_m)
# Check to see if there are magnitudes in lower and upper bins
is_mag = np.logical_and(mag_bins - max_mag < delta_m,
min_mag - mag_bins < delta_m)
mag_bins = mag_bins[is_mag]
return mag_bins | python | def _get_magnitudes_from_spacing(self, magnitudes, delta_m):
min_mag = np.min(magnitudes)
max_mag = np.max(magnitudes)
if (max_mag - min_mag) < delta_m:
raise ValueError('Bin width greater than magnitude range!')
mag_bins = np.arange(np.floor(min_mag), np.ceil(max_mag), delta_m)
is_mag = np.logical_and(mag_bins - max_mag < delta_m,
min_mag - mag_bins < delta_m)
mag_bins = mag_bins[is_mag]
return mag_bins | [
"def",
"_get_magnitudes_from_spacing",
"(",
"self",
",",
"magnitudes",
",",
"delta_m",
")",
":",
"min_mag",
"=",
"np",
".",
"min",
"(",
"magnitudes",
")",
"max_mag",
"=",
"np",
".",
"max",
"(",
"magnitudes",
")",
"if",
"(",
"max_mag",
"-",
"min_mag",
")",
"<",
"delta_m",
":",
"raise",
"ValueError",
"(",
"'Bin width greater than magnitude range!'",
")",
"mag_bins",
"=",
"np",
".",
"arange",
"(",
"np",
".",
"floor",
"(",
"min_mag",
")",
",",
"np",
".",
"ceil",
"(",
"max_mag",
")",
",",
"delta_m",
")",
"# Check to see if there are magnitudes in lower and upper bins",
"is_mag",
"=",
"np",
".",
"logical_and",
"(",
"mag_bins",
"-",
"max_mag",
"<",
"delta_m",
",",
"min_mag",
"-",
"mag_bins",
"<",
"delta_m",
")",
"mag_bins",
"=",
"mag_bins",
"[",
"is_mag",
"]",
"return",
"mag_bins"
]
| If a single magnitude spacing is input then create the bins
:param numpy.ndarray magnitudes:
Vector of earthquake magnitudes
:param float delta_m:
Magnitude bin width
:returns: Vector of magnitude bin edges (numpy.ndarray) | [
"If",
"a",
"single",
"magnitude",
"spacing",
"is",
"input",
"then",
"create",
"the",
"bins"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/completeness/cumulative_rate_analysis.py#L132-L152 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | _merge_data | def _merge_data(dat1, dat2):
"""
Merge two data dictionaries containing catalogue data
:parameter dictionary dat1:
Catalogue data dictionary
:parameter dictionary dat2:
Catalogue data dictionary
:returns:
A catalogue data dictionary containing the information originally
included in dat1 and dat2
"""
cnt = 0
for key in dat1:
flg1 = len(dat1[key]) > 0
flg2 = len(dat2[key]) > 0
if flg1 != flg2:
cnt += 1
if cnt:
raise Warning('Cannot merge catalogues with different' +
' attributes')
return None
else:
for key in dat1:
if isinstance(dat1[key], np.ndarray):
dat1[key] = np.concatenate((dat1[key], dat2[key]), axis=0)
elif isinstance(dat1[key], list):
dat1[key] += dat2[key]
else:
raise ValueError('Unknown type')
return dat1 | python | def _merge_data(dat1, dat2):
cnt = 0
for key in dat1:
flg1 = len(dat1[key]) > 0
flg2 = len(dat2[key]) > 0
if flg1 != flg2:
cnt += 1
if cnt:
raise Warning('Cannot merge catalogues with different' +
' attributes')
return None
else:
for key in dat1:
if isinstance(dat1[key], np.ndarray):
dat1[key] = np.concatenate((dat1[key], dat2[key]), axis=0)
elif isinstance(dat1[key], list):
dat1[key] += dat2[key]
else:
raise ValueError('Unknown type')
return dat1 | [
"def",
"_merge_data",
"(",
"dat1",
",",
"dat2",
")",
":",
"cnt",
"=",
"0",
"for",
"key",
"in",
"dat1",
":",
"flg1",
"=",
"len",
"(",
"dat1",
"[",
"key",
"]",
")",
">",
"0",
"flg2",
"=",
"len",
"(",
"dat2",
"[",
"key",
"]",
")",
">",
"0",
"if",
"flg1",
"!=",
"flg2",
":",
"cnt",
"+=",
"1",
"if",
"cnt",
":",
"raise",
"Warning",
"(",
"'Cannot merge catalogues with different'",
"+",
"' attributes'",
")",
"return",
"None",
"else",
":",
"for",
"key",
"in",
"dat1",
":",
"if",
"isinstance",
"(",
"dat1",
"[",
"key",
"]",
",",
"np",
".",
"ndarray",
")",
":",
"dat1",
"[",
"key",
"]",
"=",
"np",
".",
"concatenate",
"(",
"(",
"dat1",
"[",
"key",
"]",
",",
"dat2",
"[",
"key",
"]",
")",
",",
"axis",
"=",
"0",
")",
"elif",
"isinstance",
"(",
"dat1",
"[",
"key",
"]",
",",
"list",
")",
":",
"dat1",
"[",
"key",
"]",
"+=",
"dat2",
"[",
"key",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"'Unknown type'",
")",
"return",
"dat1"
]
| Merge two data dictionaries containing catalogue data
:parameter dictionary dat1:
Catalogue data dictionary
:parameter dictionary dat2:
Catalogue data dictionary
:returns:
A catalogue data dictionary containing the information originally
included in dat1 and dat2 | [
"Merge",
"two",
"data",
"dictionaries",
"containing",
"catalogue",
"data"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L566-L600 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue._get_row_str | def _get_row_str(self, i):
"""
Returns a string representation of the key information in a row
"""
row_data = ["{:s}".format(self.data['eventID'][i]),
"{:g}".format(self.data['year'][i]),
"{:g}".format(self.data['month'][i]),
"{:g}".format(self.data['day'][i]),
"{:g}".format(self.data['hour'][i]),
"{:g}".format(self.data['minute'][i]),
"{:.1f}".format(self.data['second'][i]),
"{:.3f}".format(self.data['longitude'][i]),
"{:.3f}".format(self.data['latitude'][i]),
"{:.1f}".format(self.data['depth'][i]),
"{:.1f}".format(self.data['magnitude'][i])]
return " ".join(row_data) | python | def _get_row_str(self, i):
row_data = ["{:s}".format(self.data['eventID'][i]),
"{:g}".format(self.data['year'][i]),
"{:g}".format(self.data['month'][i]),
"{:g}".format(self.data['day'][i]),
"{:g}".format(self.data['hour'][i]),
"{:g}".format(self.data['minute'][i]),
"{:.1f}".format(self.data['second'][i]),
"{:.3f}".format(self.data['longitude'][i]),
"{:.3f}".format(self.data['latitude'][i]),
"{:.1f}".format(self.data['depth'][i]),
"{:.1f}".format(self.data['magnitude'][i])]
return " ".join(row_data) | [
"def",
"_get_row_str",
"(",
"self",
",",
"i",
")",
":",
"row_data",
"=",
"[",
"\"{:s}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'eventID'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:g}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'year'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:g}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'month'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:g}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'day'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:g}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'hour'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:g}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'minute'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:.1f}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'second'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:.3f}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'longitude'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:.3f}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'latitude'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:.1f}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'depth'",
"]",
"[",
"i",
"]",
")",
",",
"\"{:.1f}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"'magnitude'",
"]",
"[",
"i",
"]",
")",
"]",
"return",
"\" \"",
".",
"join",
"(",
"row_data",
")"
]
| Returns a string representation of the key information in a row | [
"Returns",
"a",
"string",
"representation",
"of",
"the",
"key",
"information",
"in",
"a",
"row"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L138-L153 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.write_catalogue | def write_catalogue(self, output_file, key_list=SORTED_ATTRIBUTE_LIST):
"""
Writes the catalogue to file using HTMK format (CSV).
:param output_file:
Name of the output file
:param key_list:
Optional list of attribute keys to be exported
"""
with open(output_file, 'w') as of:
writer = csv.DictWriter(of, fieldnames=key_list)
writer.writeheader()
for i in range(self.get_number_events()):
row_dict = {}
for key in key_list:
if len(self.data[key]) > 0:
data = self.data[key][i]
if key in self.INT_ATTRIBUTE_LIST:
if np.isnan(data):
data = ''
else:
data = int(data)
if key in self.FLOAT_ATTRIBUTE_LIST:
if np.isnan(data):
data = ''
else:
data = float(data)
row_dict[key] = data
writer.writerow(row_dict) | python | def write_catalogue(self, output_file, key_list=SORTED_ATTRIBUTE_LIST):
with open(output_file, 'w') as of:
writer = csv.DictWriter(of, fieldnames=key_list)
writer.writeheader()
for i in range(self.get_number_events()):
row_dict = {}
for key in key_list:
if len(self.data[key]) > 0:
data = self.data[key][i]
if key in self.INT_ATTRIBUTE_LIST:
if np.isnan(data):
data = ''
else:
data = int(data)
if key in self.FLOAT_ATTRIBUTE_LIST:
if np.isnan(data):
data = ''
else:
data = float(data)
row_dict[key] = data
writer.writerow(row_dict) | [
"def",
"write_catalogue",
"(",
"self",
",",
"output_file",
",",
"key_list",
"=",
"SORTED_ATTRIBUTE_LIST",
")",
":",
"with",
"open",
"(",
"output_file",
",",
"'w'",
")",
"as",
"of",
":",
"writer",
"=",
"csv",
".",
"DictWriter",
"(",
"of",
",",
"fieldnames",
"=",
"key_list",
")",
"writer",
".",
"writeheader",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"get_number_events",
"(",
")",
")",
":",
"row_dict",
"=",
"{",
"}",
"for",
"key",
"in",
"key_list",
":",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"key",
"]",
")",
">",
"0",
":",
"data",
"=",
"self",
".",
"data",
"[",
"key",
"]",
"[",
"i",
"]",
"if",
"key",
"in",
"self",
".",
"INT_ATTRIBUTE_LIST",
":",
"if",
"np",
".",
"isnan",
"(",
"data",
")",
":",
"data",
"=",
"''",
"else",
":",
"data",
"=",
"int",
"(",
"data",
")",
"if",
"key",
"in",
"self",
".",
"FLOAT_ATTRIBUTE_LIST",
":",
"if",
"np",
".",
"isnan",
"(",
"data",
")",
":",
"data",
"=",
"''",
"else",
":",
"data",
"=",
"float",
"(",
"data",
")",
"row_dict",
"[",
"key",
"]",
"=",
"data",
"writer",
".",
"writerow",
"(",
"row_dict",
")"
]
| Writes the catalogue to file using HTMK format (CSV).
:param output_file:
Name of the output file
:param key_list:
Optional list of attribute keys to be exported | [
"Writes",
"the",
"catalogue",
"to",
"file",
"using",
"HTMK",
"format",
"(",
"CSV",
")",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L192-L221 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.load_to_array | def load_to_array(self, keys):
"""
This loads the data contained in the catalogue into a numpy array. The
method works only for float data
:param keys:
A list of keys to be uploaded into the array
:type list:
"""
# Preallocate the numpy array
data = np.empty((len(self.data[keys[0]]), len(keys)))
for i in range(0, len(self.data[keys[0]])):
for j, key in enumerate(keys):
data[i, j] = self.data[key][i]
return data | python | def load_to_array(self, keys):
data = np.empty((len(self.data[keys[0]]), len(keys)))
for i in range(0, len(self.data[keys[0]])):
for j, key in enumerate(keys):
data[i, j] = self.data[key][i]
return data | [
"def",
"load_to_array",
"(",
"self",
",",
"keys",
")",
":",
"# Preallocate the numpy array",
"data",
"=",
"np",
".",
"empty",
"(",
"(",
"len",
"(",
"self",
".",
"data",
"[",
"keys",
"[",
"0",
"]",
"]",
")",
",",
"len",
"(",
"keys",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"self",
".",
"data",
"[",
"keys",
"[",
"0",
"]",
"]",
")",
")",
":",
"for",
"j",
",",
"key",
"in",
"enumerate",
"(",
"keys",
")",
":",
"data",
"[",
"i",
",",
"j",
"]",
"=",
"self",
".",
"data",
"[",
"key",
"]",
"[",
"i",
"]",
"return",
"data"
]
| This loads the data contained in the catalogue into a numpy array. The
method works only for float data
:param keys:
A list of keys to be uploaded into the array
:type list: | [
"This",
"loads",
"the",
"data",
"contained",
"in",
"the",
"catalogue",
"into",
"a",
"numpy",
"array",
".",
"The",
"method",
"works",
"only",
"for",
"float",
"data"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L223-L237 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.load_from_array | def load_from_array(self, keys, data_array):
"""
This loads the data contained in an array into the catalogue object
:param keys:
A list of keys explaining the content of the columns in the array
:type list:
"""
if len(keys) != np.shape(data_array)[1]:
raise ValueError('Key list does not match shape of array!')
for i, key in enumerate(keys):
if key in self.INT_ATTRIBUTE_LIST:
self.data[key] = data_array[:, i].astype(int)
else:
self.data[key] = data_array[:, i]
if key not in self.TOTAL_ATTRIBUTE_LIST:
print('Key %s not a recognised catalogue attribute' % key)
self.update_end_year() | python | def load_from_array(self, keys, data_array):
if len(keys) != np.shape(data_array)[1]:
raise ValueError('Key list does not match shape of array!')
for i, key in enumerate(keys):
if key in self.INT_ATTRIBUTE_LIST:
self.data[key] = data_array[:, i].astype(int)
else:
self.data[key] = data_array[:, i]
if key not in self.TOTAL_ATTRIBUTE_LIST:
print('Key %s not a recognised catalogue attribute' % key)
self.update_end_year() | [
"def",
"load_from_array",
"(",
"self",
",",
"keys",
",",
"data_array",
")",
":",
"if",
"len",
"(",
"keys",
")",
"!=",
"np",
".",
"shape",
"(",
"data_array",
")",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"'Key list does not match shape of array!'",
")",
"for",
"i",
",",
"key",
"in",
"enumerate",
"(",
"keys",
")",
":",
"if",
"key",
"in",
"self",
".",
"INT_ATTRIBUTE_LIST",
":",
"self",
".",
"data",
"[",
"key",
"]",
"=",
"data_array",
"[",
":",
",",
"i",
"]",
".",
"astype",
"(",
"int",
")",
"else",
":",
"self",
".",
"data",
"[",
"key",
"]",
"=",
"data_array",
"[",
":",
",",
"i",
"]",
"if",
"key",
"not",
"in",
"self",
".",
"TOTAL_ATTRIBUTE_LIST",
":",
"print",
"(",
"'Key %s not a recognised catalogue attribute'",
"%",
"key",
")",
"self",
".",
"update_end_year",
"(",
")"
]
| This loads the data contained in an array into the catalogue object
:param keys:
A list of keys explaining the content of the columns in the array
:type list: | [
"This",
"loads",
"the",
"data",
"contained",
"in",
"an",
"array",
"into",
"the",
"catalogue",
"object"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L239-L259 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.catalogue_mt_filter | def catalogue_mt_filter(self, mt_table, flag=None):
"""
Filter the catalogue using a magnitude-time table. The table has
two columns and n-rows.
:param nump.ndarray mt_table:
Magnitude time table with n-rows where column 1 is year and column
2 is magnitude
"""
if flag is None:
# No flag defined, therefore all events are initially valid
flag = np.ones(self.get_number_events(), dtype=bool)
for comp_val in mt_table:
id0 = np.logical_and(self.data['year'].astype(float) < comp_val[0],
self.data['magnitude'] < comp_val[1])
print(id0)
flag[id0] = False
if not np.all(flag):
self.purge_catalogue(flag) | python | def catalogue_mt_filter(self, mt_table, flag=None):
if flag is None:
flag = np.ones(self.get_number_events(), dtype=bool)
for comp_val in mt_table:
id0 = np.logical_and(self.data['year'].astype(float) < comp_val[0],
self.data['magnitude'] < comp_val[1])
print(id0)
flag[id0] = False
if not np.all(flag):
self.purge_catalogue(flag) | [
"def",
"catalogue_mt_filter",
"(",
"self",
",",
"mt_table",
",",
"flag",
"=",
"None",
")",
":",
"if",
"flag",
"is",
"None",
":",
"# No flag defined, therefore all events are initially valid",
"flag",
"=",
"np",
".",
"ones",
"(",
"self",
".",
"get_number_events",
"(",
")",
",",
"dtype",
"=",
"bool",
")",
"for",
"comp_val",
"in",
"mt_table",
":",
"id0",
"=",
"np",
".",
"logical_and",
"(",
"self",
".",
"data",
"[",
"'year'",
"]",
".",
"astype",
"(",
"float",
")",
"<",
"comp_val",
"[",
"0",
"]",
",",
"self",
".",
"data",
"[",
"'magnitude'",
"]",
"<",
"comp_val",
"[",
"1",
"]",
")",
"print",
"(",
"id0",
")",
"flag",
"[",
"id0",
"]",
"=",
"False",
"if",
"not",
"np",
".",
"all",
"(",
"flag",
")",
":",
"self",
".",
"purge_catalogue",
"(",
"flag",
")"
]
| Filter the catalogue using a magnitude-time table. The table has
two columns and n-rows.
:param nump.ndarray mt_table:
Magnitude time table with n-rows where column 1 is year and column
2 is magnitude | [
"Filter",
"the",
"catalogue",
"using",
"a",
"magnitude",
"-",
"time",
"table",
".",
"The",
"table",
"has",
"two",
"columns",
"and",
"n",
"-",
"rows",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L282-L302 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_bounding_box | def get_bounding_box(self):
"""
Returns the bounding box of the catalogue
:returns: (West, East, South, North)
"""
return (np.min(self.data["longitude"]),
np.max(self.data["longitude"]),
np.min(self.data["latitude"]),
np.max(self.data["latitude"])) | python | def get_bounding_box(self):
return (np.min(self.data["longitude"]),
np.max(self.data["longitude"]),
np.min(self.data["latitude"]),
np.max(self.data["latitude"])) | [
"def",
"get_bounding_box",
"(",
"self",
")",
":",
"return",
"(",
"np",
".",
"min",
"(",
"self",
".",
"data",
"[",
"\"longitude\"",
"]",
")",
",",
"np",
".",
"max",
"(",
"self",
".",
"data",
"[",
"\"longitude\"",
"]",
")",
",",
"np",
".",
"min",
"(",
"self",
".",
"data",
"[",
"\"latitude\"",
"]",
")",
",",
"np",
".",
"max",
"(",
"self",
".",
"data",
"[",
"\"latitude\"",
"]",
")",
")"
]
| Returns the bounding box of the catalogue
:returns: (West, East, South, North) | [
"Returns",
"the",
"bounding",
"box",
"of",
"the",
"catalogue"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L304-L313 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_observed_mmax_sigma | def get_observed_mmax_sigma(self, default=None):
"""
:returns: the sigma for the maximum observed magnitude
"""
if not isinstance(self.data['sigmaMagnitude'], np.ndarray):
obsmaxsig = default
else:
obsmaxsig = self.data['sigmaMagnitude'][
np.argmax(self.data['magnitude'])]
return obsmaxsig | python | def get_observed_mmax_sigma(self, default=None):
if not isinstance(self.data['sigmaMagnitude'], np.ndarray):
obsmaxsig = default
else:
obsmaxsig = self.data['sigmaMagnitude'][
np.argmax(self.data['magnitude'])]
return obsmaxsig | [
"def",
"get_observed_mmax_sigma",
"(",
"self",
",",
"default",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
",",
"np",
".",
"ndarray",
")",
":",
"obsmaxsig",
"=",
"default",
"else",
":",
"obsmaxsig",
"=",
"self",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
"[",
"np",
".",
"argmax",
"(",
"self",
".",
"data",
"[",
"'magnitude'",
"]",
")",
"]",
"return",
"obsmaxsig"
]
| :returns: the sigma for the maximum observed magnitude | [
":",
"returns",
":",
"the",
"sigma",
"for",
"the",
"maximum",
"observed",
"magnitude"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L315-L324 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_decimal_time | def get_decimal_time(self):
'''
Returns the time of the catalogue as a decimal
'''
return decimal_time(self.data['year'],
self.data['month'],
self.data['day'],
self.data['hour'],
self.data['minute'],
self.data['second']) | python | def get_decimal_time(self):
return decimal_time(self.data['year'],
self.data['month'],
self.data['day'],
self.data['hour'],
self.data['minute'],
self.data['second']) | [
"def",
"get_decimal_time",
"(",
"self",
")",
":",
"return",
"decimal_time",
"(",
"self",
".",
"data",
"[",
"'year'",
"]",
",",
"self",
".",
"data",
"[",
"'month'",
"]",
",",
"self",
".",
"data",
"[",
"'day'",
"]",
",",
"self",
".",
"data",
"[",
"'hour'",
"]",
",",
"self",
".",
"data",
"[",
"'minute'",
"]",
",",
"self",
".",
"data",
"[",
"'second'",
"]",
")"
]
| Returns the time of the catalogue as a decimal | [
"Returns",
"the",
"time",
"of",
"the",
"catalogue",
"as",
"a",
"decimal"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L326-L335 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.sort_catalogue_chronologically | def sort_catalogue_chronologically(self):
'''
Sorts the catalogue into chronological order
'''
dec_time = self.get_decimal_time()
idx = np.argsort(dec_time)
if np.all((idx[1:] - idx[:-1]) > 0.):
# Catalogue was already in chronological order
return
self.select_catalogue_events(idx) | python | def sort_catalogue_chronologically(self):
dec_time = self.get_decimal_time()
idx = np.argsort(dec_time)
if np.all((idx[1:] - idx[:-1]) > 0.):
return
self.select_catalogue_events(idx) | [
"def",
"sort_catalogue_chronologically",
"(",
"self",
")",
":",
"dec_time",
"=",
"self",
".",
"get_decimal_time",
"(",
")",
"idx",
"=",
"np",
".",
"argsort",
"(",
"dec_time",
")",
"if",
"np",
".",
"all",
"(",
"(",
"idx",
"[",
"1",
":",
"]",
"-",
"idx",
"[",
":",
"-",
"1",
"]",
")",
">",
"0.",
")",
":",
"# Catalogue was already in chronological order",
"return",
"self",
".",
"select_catalogue_events",
"(",
"idx",
")"
]
| Sorts the catalogue into chronological order | [
"Sorts",
"the",
"catalogue",
"into",
"chronological",
"order"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L353-L362 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.purge_catalogue | def purge_catalogue(self, flag_vector):
'''
Purges present catalogue with invalid events defined by flag_vector
:param numpy.ndarray flag_vector:
Boolean vector showing if events are selected (True) or not (False)
'''
id0 = np.where(flag_vector)[0]
self.select_catalogue_events(id0)
self.get_number_events() | python | def purge_catalogue(self, flag_vector):
id0 = np.where(flag_vector)[0]
self.select_catalogue_events(id0)
self.get_number_events() | [
"def",
"purge_catalogue",
"(",
"self",
",",
"flag_vector",
")",
":",
"id0",
"=",
"np",
".",
"where",
"(",
"flag_vector",
")",
"[",
"0",
"]",
"self",
".",
"select_catalogue_events",
"(",
"id0",
")",
"self",
".",
"get_number_events",
"(",
")"
]
| Purges present catalogue with invalid events defined by flag_vector
:param numpy.ndarray flag_vector:
Boolean vector showing if events are selected (True) or not (False) | [
"Purges",
"present",
"catalogue",
"with",
"invalid",
"events",
"defined",
"by",
"flag_vector"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L364-L374 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.select_catalogue_events | def select_catalogue_events(self, id0):
'''
Orders the events in the catalogue according to an indexing vector.
:param np.ndarray id0:
Pointer array indicating the locations of selected events
'''
for key in self.data:
if isinstance(
self.data[key], np.ndarray) and len(self.data[key]) > 0:
# Dictionary element is numpy array - use logical indexing
self.data[key] = self.data[key][id0]
elif isinstance(
self.data[key], list) and len(self.data[key]) > 0:
# Dictionary element is list
self.data[key] = [self.data[key][iloc] for iloc in id0]
else:
continue | python | def select_catalogue_events(self, id0):
for key in self.data:
if isinstance(
self.data[key], np.ndarray) and len(self.data[key]) > 0:
self.data[key] = self.data[key][id0]
elif isinstance(
self.data[key], list) and len(self.data[key]) > 0:
self.data[key] = [self.data[key][iloc] for iloc in id0]
else:
continue | [
"def",
"select_catalogue_events",
"(",
"self",
",",
"id0",
")",
":",
"for",
"key",
"in",
"self",
".",
"data",
":",
"if",
"isinstance",
"(",
"self",
".",
"data",
"[",
"key",
"]",
",",
"np",
".",
"ndarray",
")",
"and",
"len",
"(",
"self",
".",
"data",
"[",
"key",
"]",
")",
">",
"0",
":",
"# Dictionary element is numpy array - use logical indexing",
"self",
".",
"data",
"[",
"key",
"]",
"=",
"self",
".",
"data",
"[",
"key",
"]",
"[",
"id0",
"]",
"elif",
"isinstance",
"(",
"self",
".",
"data",
"[",
"key",
"]",
",",
"list",
")",
"and",
"len",
"(",
"self",
".",
"data",
"[",
"key",
"]",
")",
">",
"0",
":",
"# Dictionary element is list",
"self",
".",
"data",
"[",
"key",
"]",
"=",
"[",
"self",
".",
"data",
"[",
"key",
"]",
"[",
"iloc",
"]",
"for",
"iloc",
"in",
"id0",
"]",
"else",
":",
"continue"
]
| Orders the events in the catalogue according to an indexing vector.
:param np.ndarray id0:
Pointer array indicating the locations of selected events | [
"Orders",
"the",
"events",
"in",
"the",
"catalogue",
"according",
"to",
"an",
"indexing",
"vector",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L376-L393 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_depth_distribution | def get_depth_distribution(self, depth_bins, normalisation=False,
bootstrap=None):
'''
Gets the depth distribution of the earthquake catalogue to return a
single histogram. Depths may be normalised. If uncertainties are found
in the catalogue the distrbution may be bootstrap sampled
:param numpy.ndarray depth_bins:
getBin edges for the depths
:param bool normalisation:
Choose to normalise the results such that the total contributions
sum to 1.0 (True) or not (False)
:param int bootstrap:
Number of bootstrap samples
:returns:
Histogram of depth values
'''
if len(self.data['depth']) == 0:
# If depth information is missing
raise ValueError('Depths missing in catalogue')
if len(self.data['depthError']) == 0:
self.data['depthError'] = np.zeros(self.get_number_events(),
dtype=float)
return bootstrap_histogram_1D(self.data['depth'],
depth_bins,
self.data['depthError'],
normalisation=normalisation,
number_bootstraps=bootstrap,
boundaries=(0., None)) | python | def get_depth_distribution(self, depth_bins, normalisation=False,
bootstrap=None):
if len(self.data['depth']) == 0:
raise ValueError('Depths missing in catalogue')
if len(self.data['depthError']) == 0:
self.data['depthError'] = np.zeros(self.get_number_events(),
dtype=float)
return bootstrap_histogram_1D(self.data['depth'],
depth_bins,
self.data['depthError'],
normalisation=normalisation,
number_bootstraps=bootstrap,
boundaries=(0., None)) | [
"def",
"get_depth_distribution",
"(",
"self",
",",
"depth_bins",
",",
"normalisation",
"=",
"False",
",",
"bootstrap",
"=",
"None",
")",
":",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"'depth'",
"]",
")",
"==",
"0",
":",
"# If depth information is missing",
"raise",
"ValueError",
"(",
"'Depths missing in catalogue'",
")",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"'depthError'",
"]",
")",
"==",
"0",
":",
"self",
".",
"data",
"[",
"'depthError'",
"]",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"get_number_events",
"(",
")",
",",
"dtype",
"=",
"float",
")",
"return",
"bootstrap_histogram_1D",
"(",
"self",
".",
"data",
"[",
"'depth'",
"]",
",",
"depth_bins",
",",
"self",
".",
"data",
"[",
"'depthError'",
"]",
",",
"normalisation",
"=",
"normalisation",
",",
"number_bootstraps",
"=",
"bootstrap",
",",
"boundaries",
"=",
"(",
"0.",
",",
"None",
")",
")"
]
| Gets the depth distribution of the earthquake catalogue to return a
single histogram. Depths may be normalised. If uncertainties are found
in the catalogue the distrbution may be bootstrap sampled
:param numpy.ndarray depth_bins:
getBin edges for the depths
:param bool normalisation:
Choose to normalise the results such that the total contributions
sum to 1.0 (True) or not (False)
:param int bootstrap:
Number of bootstrap samples
:returns:
Histogram of depth values | [
"Gets",
"the",
"depth",
"distribution",
"of",
"the",
"earthquake",
"catalogue",
"to",
"return",
"a",
"single",
"histogram",
".",
"Depths",
"may",
"be",
"normalised",
".",
"If",
"uncertainties",
"are",
"found",
"in",
"the",
"catalogue",
"the",
"distrbution",
"may",
"be",
"bootstrap",
"sampled"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L395-L429 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_depth_pmf | def get_depth_pmf(self, depth_bins, default_depth=5.0, bootstrap=None):
"""
Returns the depth distribution of the catalogue as a probability mass
function
"""
if len(self.data['depth']) == 0:
# If depth information is missing
return PMF([(1.0, default_depth)])
# Get the depth distribution
depth_hist = self.get_depth_distribution(depth_bins,
normalisation=True,
bootstrap=bootstrap)
# If the histogram does not sum to 1.0 then remove the difference
# from the lowest bin
depth_hist = np.around(depth_hist, 3)
while depth_hist.sum() - 1.0:
depth_hist[-1] -= depth_hist.sum() - 1.0
depth_hist = np.around(depth_hist, 3)
pmf_list = []
for iloc, prob in enumerate(depth_hist):
pmf_list.append((prob,
(depth_bins[iloc] + depth_bins[iloc + 1]) / 2.0))
return PMF(pmf_list) | python | def get_depth_pmf(self, depth_bins, default_depth=5.0, bootstrap=None):
if len(self.data['depth']) == 0:
return PMF([(1.0, default_depth)])
depth_hist = self.get_depth_distribution(depth_bins,
normalisation=True,
bootstrap=bootstrap)
depth_hist = np.around(depth_hist, 3)
while depth_hist.sum() - 1.0:
depth_hist[-1] -= depth_hist.sum() - 1.0
depth_hist = np.around(depth_hist, 3)
pmf_list = []
for iloc, prob in enumerate(depth_hist):
pmf_list.append((prob,
(depth_bins[iloc] + depth_bins[iloc + 1]) / 2.0))
return PMF(pmf_list) | [
"def",
"get_depth_pmf",
"(",
"self",
",",
"depth_bins",
",",
"default_depth",
"=",
"5.0",
",",
"bootstrap",
"=",
"None",
")",
":",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"'depth'",
"]",
")",
"==",
"0",
":",
"# If depth information is missing",
"return",
"PMF",
"(",
"[",
"(",
"1.0",
",",
"default_depth",
")",
"]",
")",
"# Get the depth distribution",
"depth_hist",
"=",
"self",
".",
"get_depth_distribution",
"(",
"depth_bins",
",",
"normalisation",
"=",
"True",
",",
"bootstrap",
"=",
"bootstrap",
")",
"# If the histogram does not sum to 1.0 then remove the difference",
"# from the lowest bin",
"depth_hist",
"=",
"np",
".",
"around",
"(",
"depth_hist",
",",
"3",
")",
"while",
"depth_hist",
".",
"sum",
"(",
")",
"-",
"1.0",
":",
"depth_hist",
"[",
"-",
"1",
"]",
"-=",
"depth_hist",
".",
"sum",
"(",
")",
"-",
"1.0",
"depth_hist",
"=",
"np",
".",
"around",
"(",
"depth_hist",
",",
"3",
")",
"pmf_list",
"=",
"[",
"]",
"for",
"iloc",
",",
"prob",
"in",
"enumerate",
"(",
"depth_hist",
")",
":",
"pmf_list",
".",
"append",
"(",
"(",
"prob",
",",
"(",
"depth_bins",
"[",
"iloc",
"]",
"+",
"depth_bins",
"[",
"iloc",
"+",
"1",
"]",
")",
"/",
"2.0",
")",
")",
"return",
"PMF",
"(",
"pmf_list",
")"
]
| Returns the depth distribution of the catalogue as a probability mass
function | [
"Returns",
"the",
"depth",
"distribution",
"of",
"the",
"catalogue",
"as",
"a",
"probability",
"mass",
"function"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L431-L454 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_magnitude_depth_distribution | def get_magnitude_depth_distribution(self, magnitude_bins, depth_bins,
normalisation=False, bootstrap=None):
'''
Returns a 2-D magnitude-depth histogram for the catalogue
:param numpy.ndarray magnitude_bins:
Bin edges for the magnitudes
:param numpy.ndarray depth_bins:
Bin edges for the depths
:param bool normalisation:
Choose to normalise the results such that the total contributions
sum to 1.0 (True) or not (False)
:param int bootstrap:
Number of bootstrap samples
:returns:
2D histogram of events in magnitude-depth bins
'''
if len(self.data['depth']) == 0:
# If depth information is missing
raise ValueError('Depths missing in catalogue')
if len(self.data['depthError']) == 0:
self.data['depthError'] = np.zeros(self.get_number_events(),
dtype=float)
if len(self.data['sigmaMagnitude']) == 0:
self.data['sigmaMagnitude'] = np.zeros(self.get_number_events(),
dtype=float)
return bootstrap_histogram_2D(self.data['magnitude'],
self.data['depth'],
magnitude_bins,
depth_bins,
boundaries=[(0., None), (None, None)],
xsigma=self.data['sigmaMagnitude'],
ysigma=self.data['depthError'],
normalisation=normalisation,
number_bootstraps=bootstrap) | python | def get_magnitude_depth_distribution(self, magnitude_bins, depth_bins,
normalisation=False, bootstrap=None):
if len(self.data['depth']) == 0:
raise ValueError('Depths missing in catalogue')
if len(self.data['depthError']) == 0:
self.data['depthError'] = np.zeros(self.get_number_events(),
dtype=float)
if len(self.data['sigmaMagnitude']) == 0:
self.data['sigmaMagnitude'] = np.zeros(self.get_number_events(),
dtype=float)
return bootstrap_histogram_2D(self.data['magnitude'],
self.data['depth'],
magnitude_bins,
depth_bins,
boundaries=[(0., None), (None, None)],
xsigma=self.data['sigmaMagnitude'],
ysigma=self.data['depthError'],
normalisation=normalisation,
number_bootstraps=bootstrap) | [
"def",
"get_magnitude_depth_distribution",
"(",
"self",
",",
"magnitude_bins",
",",
"depth_bins",
",",
"normalisation",
"=",
"False",
",",
"bootstrap",
"=",
"None",
")",
":",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"'depth'",
"]",
")",
"==",
"0",
":",
"# If depth information is missing",
"raise",
"ValueError",
"(",
"'Depths missing in catalogue'",
")",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"'depthError'",
"]",
")",
"==",
"0",
":",
"self",
".",
"data",
"[",
"'depthError'",
"]",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"get_number_events",
"(",
")",
",",
"dtype",
"=",
"float",
")",
"if",
"len",
"(",
"self",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
")",
"==",
"0",
":",
"self",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"get_number_events",
"(",
")",
",",
"dtype",
"=",
"float",
")",
"return",
"bootstrap_histogram_2D",
"(",
"self",
".",
"data",
"[",
"'magnitude'",
"]",
",",
"self",
".",
"data",
"[",
"'depth'",
"]",
",",
"magnitude_bins",
",",
"depth_bins",
",",
"boundaries",
"=",
"[",
"(",
"0.",
",",
"None",
")",
",",
"(",
"None",
",",
"None",
")",
"]",
",",
"xsigma",
"=",
"self",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
",",
"ysigma",
"=",
"self",
".",
"data",
"[",
"'depthError'",
"]",
",",
"normalisation",
"=",
"normalisation",
",",
"number_bootstraps",
"=",
"bootstrap",
")"
]
| Returns a 2-D magnitude-depth histogram for the catalogue
:param numpy.ndarray magnitude_bins:
Bin edges for the magnitudes
:param numpy.ndarray depth_bins:
Bin edges for the depths
:param bool normalisation:
Choose to normalise the results such that the total contributions
sum to 1.0 (True) or not (False)
:param int bootstrap:
Number of bootstrap samples
:returns:
2D histogram of events in magnitude-depth bins | [
"Returns",
"a",
"2",
"-",
"D",
"magnitude",
"-",
"depth",
"histogram",
"for",
"the",
"catalogue"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L456-L497 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.get_magnitude_time_distribution | def get_magnitude_time_distribution(self, magnitude_bins, time_bins,
normalisation=False, bootstrap=None):
'''
Returns a 2-D histogram indicating the number of earthquakes in a
set of time-magnitude bins. Time is in decimal years!
:param numpy.ndarray magnitude_bins:
Bin edges for the magnitudes
:param numpy.ndarray time_bins:
Bin edges for the times
:param bool normalisation:
Choose to normalise the results such that the total contributions
sum to 1.0 (True) or not (False)
:param int bootstrap:
Number of bootstrap samples
:returns:
2D histogram of events in magnitude-year bins
'''
return bootstrap_histogram_2D(
self.get_decimal_time(),
self.data['magnitude'],
time_bins,
magnitude_bins,
xsigma=np.zeros(self.get_number_events()),
ysigma=self.data['sigmaMagnitude'],
normalisation=normalisation,
number_bootstraps=bootstrap) | python | def get_magnitude_time_distribution(self, magnitude_bins, time_bins,
normalisation=False, bootstrap=None):
return bootstrap_histogram_2D(
self.get_decimal_time(),
self.data['magnitude'],
time_bins,
magnitude_bins,
xsigma=np.zeros(self.get_number_events()),
ysigma=self.data['sigmaMagnitude'],
normalisation=normalisation,
number_bootstraps=bootstrap) | [
"def",
"get_magnitude_time_distribution",
"(",
"self",
",",
"magnitude_bins",
",",
"time_bins",
",",
"normalisation",
"=",
"False",
",",
"bootstrap",
"=",
"None",
")",
":",
"return",
"bootstrap_histogram_2D",
"(",
"self",
".",
"get_decimal_time",
"(",
")",
",",
"self",
".",
"data",
"[",
"'magnitude'",
"]",
",",
"time_bins",
",",
"magnitude_bins",
",",
"xsigma",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"get_number_events",
"(",
")",
")",
",",
"ysigma",
"=",
"self",
".",
"data",
"[",
"'sigmaMagnitude'",
"]",
",",
"normalisation",
"=",
"normalisation",
",",
"number_bootstraps",
"=",
"bootstrap",
")"
]
| Returns a 2-D histogram indicating the number of earthquakes in a
set of time-magnitude bins. Time is in decimal years!
:param numpy.ndarray magnitude_bins:
Bin edges for the magnitudes
:param numpy.ndarray time_bins:
Bin edges for the times
:param bool normalisation:
Choose to normalise the results such that the total contributions
sum to 1.0 (True) or not (False)
:param int bootstrap:
Number of bootstrap samples
:returns:
2D histogram of events in magnitude-year bins | [
"Returns",
"a",
"2",
"-",
"D",
"histogram",
"indicating",
"the",
"number",
"of",
"earthquakes",
"in",
"a",
"set",
"of",
"time",
"-",
"magnitude",
"bins",
".",
"Time",
"is",
"in",
"decimal",
"years!"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L499-L529 |
gem/oq-engine | openquake/hmtk/seismicity/catalogue.py | Catalogue.concatenate | def concatenate(self, catalogue):
"""
This method attaches one catalogue to the current one
:parameter catalogue:
An instance of :class:`htmk.seismicity.catalogue.Catalogue`
"""
atts = getattr(self, 'data')
attn = getattr(catalogue, 'data')
data = _merge_data(atts, attn)
if data is not None:
setattr(self, 'data', data)
for attrib in vars(self):
atts = getattr(self, attrib)
attn = getattr(catalogue, attrib)
if attrib is 'end_year':
setattr(self, attrib, max(atts, attn))
elif attrib is 'start_year':
setattr(self, attrib, min(atts, attn))
elif attrib is 'data':
pass
elif attrib is 'number_earthquakes':
setattr(self, attrib, atts + attn)
elif attrib is 'processes':
if atts != attn:
raise ValueError('The catalogues cannot be merged' +
' since the they have' +
' a different processing history')
else:
raise ValueError('unknown attribute: %s' % attrib)
self.sort_catalogue_chronologically() | python | def concatenate(self, catalogue):
atts = getattr(self, 'data')
attn = getattr(catalogue, 'data')
data = _merge_data(atts, attn)
if data is not None:
setattr(self, 'data', data)
for attrib in vars(self):
atts = getattr(self, attrib)
attn = getattr(catalogue, attrib)
if attrib is 'end_year':
setattr(self, attrib, max(atts, attn))
elif attrib is 'start_year':
setattr(self, attrib, min(atts, attn))
elif attrib is 'data':
pass
elif attrib is 'number_earthquakes':
setattr(self, attrib, atts + attn)
elif attrib is 'processes':
if atts != attn:
raise ValueError('The catalogues cannot be merged' +
' since the they have' +
' a different processing history')
else:
raise ValueError('unknown attribute: %s' % attrib)
self.sort_catalogue_chronologically() | [
"def",
"concatenate",
"(",
"self",
",",
"catalogue",
")",
":",
"atts",
"=",
"getattr",
"(",
"self",
",",
"'data'",
")",
"attn",
"=",
"getattr",
"(",
"catalogue",
",",
"'data'",
")",
"data",
"=",
"_merge_data",
"(",
"atts",
",",
"attn",
")",
"if",
"data",
"is",
"not",
"None",
":",
"setattr",
"(",
"self",
",",
"'data'",
",",
"data",
")",
"for",
"attrib",
"in",
"vars",
"(",
"self",
")",
":",
"atts",
"=",
"getattr",
"(",
"self",
",",
"attrib",
")",
"attn",
"=",
"getattr",
"(",
"catalogue",
",",
"attrib",
")",
"if",
"attrib",
"is",
"'end_year'",
":",
"setattr",
"(",
"self",
",",
"attrib",
",",
"max",
"(",
"atts",
",",
"attn",
")",
")",
"elif",
"attrib",
"is",
"'start_year'",
":",
"setattr",
"(",
"self",
",",
"attrib",
",",
"min",
"(",
"atts",
",",
"attn",
")",
")",
"elif",
"attrib",
"is",
"'data'",
":",
"pass",
"elif",
"attrib",
"is",
"'number_earthquakes'",
":",
"setattr",
"(",
"self",
",",
"attrib",
",",
"atts",
"+",
"attn",
")",
"elif",
"attrib",
"is",
"'processes'",
":",
"if",
"atts",
"!=",
"attn",
":",
"raise",
"ValueError",
"(",
"'The catalogues cannot be merged'",
"+",
"' since the they have'",
"+",
"' a different processing history'",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'unknown attribute: %s'",
"%",
"attrib",
")",
"self",
".",
"sort_catalogue_chronologically",
"(",
")"
]
| This method attaches one catalogue to the current one
:parameter catalogue:
An instance of :class:`htmk.seismicity.catalogue.Catalogue` | [
"This",
"method",
"attaches",
"one",
"catalogue",
"to",
"the",
"current",
"one"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L531-L563 |
gem/oq-engine | openquake/calculators/classical_bcr.py | classical_bcr | def classical_bcr(riskinputs, riskmodel, param, monitor):
"""
Compute and return the average losses for each asset.
:param riskinputs:
:class:`openquake.risklib.riskinput.RiskInput` objects
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
"""
R = riskinputs[0].hazard_getter.num_rlzs
result = AccumDict(accum=numpy.zeros((R, 3), F32))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor):
for asset, (eal_orig, eal_retro, bcr) in zip(
ri.assets, out['structural']):
aval = asset['value-structural']
result[asset['ordinal']][out.rlzi] = numpy.array([
eal_orig * aval, eal_retro * aval, bcr])
return {'bcr_data': result} | python | def classical_bcr(riskinputs, riskmodel, param, monitor):
R = riskinputs[0].hazard_getter.num_rlzs
result = AccumDict(accum=numpy.zeros((R, 3), F32))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor):
for asset, (eal_orig, eal_retro, bcr) in zip(
ri.assets, out['structural']):
aval = asset['value-structural']
result[asset['ordinal']][out.rlzi] = numpy.array([
eal_orig * aval, eal_retro * aval, bcr])
return {'bcr_data': result} | [
"def",
"classical_bcr",
"(",
"riskinputs",
",",
"riskmodel",
",",
"param",
",",
"monitor",
")",
":",
"R",
"=",
"riskinputs",
"[",
"0",
"]",
".",
"hazard_getter",
".",
"num_rlzs",
"result",
"=",
"AccumDict",
"(",
"accum",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"R",
",",
"3",
")",
",",
"F32",
")",
")",
"for",
"ri",
"in",
"riskinputs",
":",
"for",
"out",
"in",
"riskmodel",
".",
"gen_outputs",
"(",
"ri",
",",
"monitor",
")",
":",
"for",
"asset",
",",
"(",
"eal_orig",
",",
"eal_retro",
",",
"bcr",
")",
"in",
"zip",
"(",
"ri",
".",
"assets",
",",
"out",
"[",
"'structural'",
"]",
")",
":",
"aval",
"=",
"asset",
"[",
"'value-structural'",
"]",
"result",
"[",
"asset",
"[",
"'ordinal'",
"]",
"]",
"[",
"out",
".",
"rlzi",
"]",
"=",
"numpy",
".",
"array",
"(",
"[",
"eal_orig",
"*",
"aval",
",",
"eal_retro",
"*",
"aval",
",",
"bcr",
"]",
")",
"return",
"{",
"'bcr_data'",
":",
"result",
"}"
]
| Compute and return the average losses for each asset.
:param riskinputs:
:class:`openquake.risklib.riskinput.RiskInput` objects
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance | [
"Compute",
"and",
"return",
"the",
"average",
"losses",
"for",
"each",
"asset",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/classical_bcr.py#L31-L53 |
gem/oq-engine | openquake/engine/engine.py | expose_outputs | def expose_outputs(dstore, owner=getpass.getuser(), status='complete'):
"""
Build a correspondence between the outputs in the datastore and the
ones in the database.
:param dstore: datastore
"""
oq = dstore['oqparam']
exportable = set(ekey[0] for ekey in export.export)
calcmode = oq.calculation_mode
dskeys = set(dstore) & exportable # exportable datastore keys
dskeys.add('fullreport')
rlzs = dstore['csm_info'].rlzs
if len(rlzs) > 1:
dskeys.add('realizations')
if len(dstore['csm_info/sg_data']) > 1: # export sourcegroups.csv
dskeys.add('sourcegroups')
hdf5 = dstore.hdf5
if 'hcurves-stats' in hdf5 or 'hcurves-rlzs' in hdf5:
if oq.hazard_stats() or oq.individual_curves or len(rlzs) == 1:
dskeys.add('hcurves')
if oq.uniform_hazard_spectra:
dskeys.add('uhs') # export them
if oq.hazard_maps:
dskeys.add('hmaps') # export them
if 'avg_losses-stats' in dstore or (
'avg_losses-rlzs' in dstore and len(rlzs)):
dskeys.add('avg_losses-stats')
if 'curves-rlzs' in dstore and len(rlzs) == 1:
dskeys.add('loss_curves-rlzs')
if 'curves-stats' in dstore and len(rlzs) > 1:
dskeys.add('loss_curves-stats')
if oq.conditional_loss_poes: # expose loss_maps outputs
if 'loss_curves-stats' in dstore:
dskeys.add('loss_maps-stats')
if 'all_loss_ratios' in dskeys:
dskeys.remove('all_loss_ratios') # export only specific IDs
if 'ruptures' in dskeys and 'scenario' in calcmode:
exportable.remove('ruptures') # do not export, as requested by Vitor
if 'rup_loss_table' in dskeys: # keep it hidden for the moment
dskeys.remove('rup_loss_table')
if 'hmaps' in dskeys and not oq.hazard_maps:
dskeys.remove('hmaps') # do not export the hazard maps
if logs.dbcmd('get_job', dstore.calc_id) is None:
# the calculation has not been imported in the db yet
logs.dbcmd('import_job', dstore.calc_id, oq.calculation_mode,
oq.description + ' [parent]', owner, status,
oq.hazard_calculation_id, dstore.datadir)
keysize = []
for key in sorted(dskeys & exportable):
try:
size_mb = dstore.get_attr(key, 'nbytes') / MB
except (KeyError, AttributeError):
size_mb = None
keysize.append((key, size_mb))
ds_size = os.path.getsize(dstore.filename) / MB
logs.dbcmd('create_outputs', dstore.calc_id, keysize, ds_size) | python | def expose_outputs(dstore, owner=getpass.getuser(), status='complete'):
oq = dstore['oqparam']
exportable = set(ekey[0] for ekey in export.export)
calcmode = oq.calculation_mode
dskeys = set(dstore) & exportable
dskeys.add('fullreport')
rlzs = dstore['csm_info'].rlzs
if len(rlzs) > 1:
dskeys.add('realizations')
if len(dstore['csm_info/sg_data']) > 1:
dskeys.add('sourcegroups')
hdf5 = dstore.hdf5
if 'hcurves-stats' in hdf5 or 'hcurves-rlzs' in hdf5:
if oq.hazard_stats() or oq.individual_curves or len(rlzs) == 1:
dskeys.add('hcurves')
if oq.uniform_hazard_spectra:
dskeys.add('uhs')
if oq.hazard_maps:
dskeys.add('hmaps')
if 'avg_losses-stats' in dstore or (
'avg_losses-rlzs' in dstore and len(rlzs)):
dskeys.add('avg_losses-stats')
if 'curves-rlzs' in dstore and len(rlzs) == 1:
dskeys.add('loss_curves-rlzs')
if 'curves-stats' in dstore and len(rlzs) > 1:
dskeys.add('loss_curves-stats')
if oq.conditional_loss_poes:
if 'loss_curves-stats' in dstore:
dskeys.add('loss_maps-stats')
if 'all_loss_ratios' in dskeys:
dskeys.remove('all_loss_ratios')
if 'ruptures' in dskeys and 'scenario' in calcmode:
exportable.remove('ruptures')
if 'rup_loss_table' in dskeys:
dskeys.remove('rup_loss_table')
if 'hmaps' in dskeys and not oq.hazard_maps:
dskeys.remove('hmaps')
if logs.dbcmd('get_job', dstore.calc_id) is None:
logs.dbcmd('import_job', dstore.calc_id, oq.calculation_mode,
oq.description + ' [parent]', owner, status,
oq.hazard_calculation_id, dstore.datadir)
keysize = []
for key in sorted(dskeys & exportable):
try:
size_mb = dstore.get_attr(key, 'nbytes') / MB
except (KeyError, AttributeError):
size_mb = None
keysize.append((key, size_mb))
ds_size = os.path.getsize(dstore.filename) / MB
logs.dbcmd('create_outputs', dstore.calc_id, keysize, ds_size) | [
"def",
"expose_outputs",
"(",
"dstore",
",",
"owner",
"=",
"getpass",
".",
"getuser",
"(",
")",
",",
"status",
"=",
"'complete'",
")",
":",
"oq",
"=",
"dstore",
"[",
"'oqparam'",
"]",
"exportable",
"=",
"set",
"(",
"ekey",
"[",
"0",
"]",
"for",
"ekey",
"in",
"export",
".",
"export",
")",
"calcmode",
"=",
"oq",
".",
"calculation_mode",
"dskeys",
"=",
"set",
"(",
"dstore",
")",
"&",
"exportable",
"# exportable datastore keys",
"dskeys",
".",
"add",
"(",
"'fullreport'",
")",
"rlzs",
"=",
"dstore",
"[",
"'csm_info'",
"]",
".",
"rlzs",
"if",
"len",
"(",
"rlzs",
")",
">",
"1",
":",
"dskeys",
".",
"add",
"(",
"'realizations'",
")",
"if",
"len",
"(",
"dstore",
"[",
"'csm_info/sg_data'",
"]",
")",
">",
"1",
":",
"# export sourcegroups.csv",
"dskeys",
".",
"add",
"(",
"'sourcegroups'",
")",
"hdf5",
"=",
"dstore",
".",
"hdf5",
"if",
"'hcurves-stats'",
"in",
"hdf5",
"or",
"'hcurves-rlzs'",
"in",
"hdf5",
":",
"if",
"oq",
".",
"hazard_stats",
"(",
")",
"or",
"oq",
".",
"individual_curves",
"or",
"len",
"(",
"rlzs",
")",
"==",
"1",
":",
"dskeys",
".",
"add",
"(",
"'hcurves'",
")",
"if",
"oq",
".",
"uniform_hazard_spectra",
":",
"dskeys",
".",
"add",
"(",
"'uhs'",
")",
"# export them",
"if",
"oq",
".",
"hazard_maps",
":",
"dskeys",
".",
"add",
"(",
"'hmaps'",
")",
"# export them",
"if",
"'avg_losses-stats'",
"in",
"dstore",
"or",
"(",
"'avg_losses-rlzs'",
"in",
"dstore",
"and",
"len",
"(",
"rlzs",
")",
")",
":",
"dskeys",
".",
"add",
"(",
"'avg_losses-stats'",
")",
"if",
"'curves-rlzs'",
"in",
"dstore",
"and",
"len",
"(",
"rlzs",
")",
"==",
"1",
":",
"dskeys",
".",
"add",
"(",
"'loss_curves-rlzs'",
")",
"if",
"'curves-stats'",
"in",
"dstore",
"and",
"len",
"(",
"rlzs",
")",
">",
"1",
":",
"dskeys",
".",
"add",
"(",
"'loss_curves-stats'",
")",
"if",
"oq",
".",
"conditional_loss_poes",
":",
"# expose loss_maps outputs",
"if",
"'loss_curves-stats'",
"in",
"dstore",
":",
"dskeys",
".",
"add",
"(",
"'loss_maps-stats'",
")",
"if",
"'all_loss_ratios'",
"in",
"dskeys",
":",
"dskeys",
".",
"remove",
"(",
"'all_loss_ratios'",
")",
"# export only specific IDs",
"if",
"'ruptures'",
"in",
"dskeys",
"and",
"'scenario'",
"in",
"calcmode",
":",
"exportable",
".",
"remove",
"(",
"'ruptures'",
")",
"# do not export, as requested by Vitor",
"if",
"'rup_loss_table'",
"in",
"dskeys",
":",
"# keep it hidden for the moment",
"dskeys",
".",
"remove",
"(",
"'rup_loss_table'",
")",
"if",
"'hmaps'",
"in",
"dskeys",
"and",
"not",
"oq",
".",
"hazard_maps",
":",
"dskeys",
".",
"remove",
"(",
"'hmaps'",
")",
"# do not export the hazard maps",
"if",
"logs",
".",
"dbcmd",
"(",
"'get_job'",
",",
"dstore",
".",
"calc_id",
")",
"is",
"None",
":",
"# the calculation has not been imported in the db yet",
"logs",
".",
"dbcmd",
"(",
"'import_job'",
",",
"dstore",
".",
"calc_id",
",",
"oq",
".",
"calculation_mode",
",",
"oq",
".",
"description",
"+",
"' [parent]'",
",",
"owner",
",",
"status",
",",
"oq",
".",
"hazard_calculation_id",
",",
"dstore",
".",
"datadir",
")",
"keysize",
"=",
"[",
"]",
"for",
"key",
"in",
"sorted",
"(",
"dskeys",
"&",
"exportable",
")",
":",
"try",
":",
"size_mb",
"=",
"dstore",
".",
"get_attr",
"(",
"key",
",",
"'nbytes'",
")",
"/",
"MB",
"except",
"(",
"KeyError",
",",
"AttributeError",
")",
":",
"size_mb",
"=",
"None",
"keysize",
".",
"append",
"(",
"(",
"key",
",",
"size_mb",
")",
")",
"ds_size",
"=",
"os",
".",
"path",
".",
"getsize",
"(",
"dstore",
".",
"filename",
")",
"/",
"MB",
"logs",
".",
"dbcmd",
"(",
"'create_outputs'",
",",
"dstore",
".",
"calc_id",
",",
"keysize",
",",
"ds_size",
")"
]
| Build a correspondence between the outputs in the datastore and the
ones in the database.
:param dstore: datastore | [
"Build",
"a",
"correspondence",
"between",
"the",
"outputs",
"in",
"the",
"datastore",
"and",
"the",
"ones",
"in",
"the",
"database",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L119-L175 |
gem/oq-engine | openquake/engine/engine.py | raiseMasterKilled | def raiseMasterKilled(signum, _stack):
"""
When a SIGTERM is received, raise the MasterKilled
exception with an appropriate error message.
:param int signum: the number of the received signal
:param _stack: the current frame object, ignored
"""
# Disable further CTRL-C to allow tasks revocation when Celery is used
if OQ_DISTRIBUTE.startswith('celery'):
signal.signal(signal.SIGINT, inhibitSigInt)
msg = 'Received a signal %d' % signum
if signum in (signal.SIGTERM, signal.SIGINT):
msg = 'The openquake master process was killed manually'
# kill the calculation only if os.getppid() != _PPID, i.e. the controlling
# terminal died; in the workers, do nothing
# NB: there is no SIGHUP on Windows
if hasattr(signal, 'SIGHUP'):
if signum == signal.SIGHUP:
if os.getppid() == _PPID:
return
else:
msg = 'The openquake master lost its controlling terminal'
raise MasterKilled(msg) | python | def raiseMasterKilled(signum, _stack):
if OQ_DISTRIBUTE.startswith('celery'):
signal.signal(signal.SIGINT, inhibitSigInt)
msg = 'Received a signal %d' % signum
if signum in (signal.SIGTERM, signal.SIGINT):
msg = 'The openquake master process was killed manually'
if hasattr(signal, 'SIGHUP'):
if signum == signal.SIGHUP:
if os.getppid() == _PPID:
return
else:
msg = 'The openquake master lost its controlling terminal'
raise MasterKilled(msg) | [
"def",
"raiseMasterKilled",
"(",
"signum",
",",
"_stack",
")",
":",
"# Disable further CTRL-C to allow tasks revocation when Celery is used",
"if",
"OQ_DISTRIBUTE",
".",
"startswith",
"(",
"'celery'",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"inhibitSigInt",
")",
"msg",
"=",
"'Received a signal %d'",
"%",
"signum",
"if",
"signum",
"in",
"(",
"signal",
".",
"SIGTERM",
",",
"signal",
".",
"SIGINT",
")",
":",
"msg",
"=",
"'The openquake master process was killed manually'",
"# kill the calculation only if os.getppid() != _PPID, i.e. the controlling",
"# terminal died; in the workers, do nothing",
"# NB: there is no SIGHUP on Windows",
"if",
"hasattr",
"(",
"signal",
",",
"'SIGHUP'",
")",
":",
"if",
"signum",
"==",
"signal",
".",
"SIGHUP",
":",
"if",
"os",
".",
"getppid",
"(",
")",
"==",
"_PPID",
":",
"return",
"else",
":",
"msg",
"=",
"'The openquake master lost its controlling terminal'",
"raise",
"MasterKilled",
"(",
"msg",
")"
]
| When a SIGTERM is received, raise the MasterKilled
exception with an appropriate error message.
:param int signum: the number of the received signal
:param _stack: the current frame object, ignored | [
"When",
"a",
"SIGTERM",
"is",
"received",
"raise",
"the",
"MasterKilled",
"exception",
"with",
"an",
"appropriate",
"error",
"message",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L186-L212 |
gem/oq-engine | openquake/engine/engine.py | job_from_file | def job_from_file(job_ini, job_id, username, **kw):
"""
Create a full job profile from a job config file.
:param job_ini:
Path to a job.ini file
:param job_id:
ID of the created job
:param username:
The user who will own this job profile and all results
:param kw:
Extra parameters including `calculation_mode` and `exposure_file`
:returns:
an oqparam instance
"""
hc_id = kw.get('hazard_calculation_id')
try:
oq = readinput.get_oqparam(job_ini, hc_id=hc_id)
except Exception:
logs.dbcmd('finish', job_id, 'failed')
raise
if 'calculation_mode' in kw:
oq.calculation_mode = kw.pop('calculation_mode')
if 'description' in kw:
oq.description = kw.pop('description')
if 'exposure_file' in kw: # hack used in commands.engine
fnames = kw.pop('exposure_file').split()
if fnames:
oq.inputs['exposure'] = fnames
elif 'exposure' in oq.inputs:
del oq.inputs['exposure']
logs.dbcmd('update_job', job_id,
dict(calculation_mode=oq.calculation_mode,
description=oq.description,
user_name=username,
hazard_calculation_id=hc_id))
return oq | python | def job_from_file(job_ini, job_id, username, **kw):
hc_id = kw.get('hazard_calculation_id')
try:
oq = readinput.get_oqparam(job_ini, hc_id=hc_id)
except Exception:
logs.dbcmd('finish', job_id, 'failed')
raise
if 'calculation_mode' in kw:
oq.calculation_mode = kw.pop('calculation_mode')
if 'description' in kw:
oq.description = kw.pop('description')
if 'exposure_file' in kw:
fnames = kw.pop('exposure_file').split()
if fnames:
oq.inputs['exposure'] = fnames
elif 'exposure' in oq.inputs:
del oq.inputs['exposure']
logs.dbcmd('update_job', job_id,
dict(calculation_mode=oq.calculation_mode,
description=oq.description,
user_name=username,
hazard_calculation_id=hc_id))
return oq | [
"def",
"job_from_file",
"(",
"job_ini",
",",
"job_id",
",",
"username",
",",
"*",
"*",
"kw",
")",
":",
"hc_id",
"=",
"kw",
".",
"get",
"(",
"'hazard_calculation_id'",
")",
"try",
":",
"oq",
"=",
"readinput",
".",
"get_oqparam",
"(",
"job_ini",
",",
"hc_id",
"=",
"hc_id",
")",
"except",
"Exception",
":",
"logs",
".",
"dbcmd",
"(",
"'finish'",
",",
"job_id",
",",
"'failed'",
")",
"raise",
"if",
"'calculation_mode'",
"in",
"kw",
":",
"oq",
".",
"calculation_mode",
"=",
"kw",
".",
"pop",
"(",
"'calculation_mode'",
")",
"if",
"'description'",
"in",
"kw",
":",
"oq",
".",
"description",
"=",
"kw",
".",
"pop",
"(",
"'description'",
")",
"if",
"'exposure_file'",
"in",
"kw",
":",
"# hack used in commands.engine",
"fnames",
"=",
"kw",
".",
"pop",
"(",
"'exposure_file'",
")",
".",
"split",
"(",
")",
"if",
"fnames",
":",
"oq",
".",
"inputs",
"[",
"'exposure'",
"]",
"=",
"fnames",
"elif",
"'exposure'",
"in",
"oq",
".",
"inputs",
":",
"del",
"oq",
".",
"inputs",
"[",
"'exposure'",
"]",
"logs",
".",
"dbcmd",
"(",
"'update_job'",
",",
"job_id",
",",
"dict",
"(",
"calculation_mode",
"=",
"oq",
".",
"calculation_mode",
",",
"description",
"=",
"oq",
".",
"description",
",",
"user_name",
"=",
"username",
",",
"hazard_calculation_id",
"=",
"hc_id",
")",
")",
"return",
"oq"
]
| Create a full job profile from a job config file.
:param job_ini:
Path to a job.ini file
:param job_id:
ID of the created job
:param username:
The user who will own this job profile and all results
:param kw:
Extra parameters including `calculation_mode` and `exposure_file`
:returns:
an oqparam instance | [
"Create",
"a",
"full",
"job",
"profile",
"from",
"a",
"job",
"config",
"file",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L230-L266 |
gem/oq-engine | openquake/engine/engine.py | poll_queue | def poll_queue(job_id, pid, poll_time):
"""
Check the queue of executing/submitted jobs and exit when there is
a free slot.
"""
if config.distribution.serialize_jobs:
first_time = True
while True:
jobs = logs.dbcmd(GET_JOBS)
failed = [job.id for job in jobs if not psutil.pid_exists(job.pid)]
if failed:
logs.dbcmd("UPDATE job SET status='failed', is_running=0 "
"WHERE id in (?X)", failed)
elif any(job.id < job_id for job in jobs):
if first_time:
logs.LOG.warn('Waiting for jobs %s', [j.id for j in jobs])
logs.dbcmd('update_job', job_id,
{'status': 'submitted', 'pid': pid})
first_time = False
time.sleep(poll_time)
else:
break
logs.dbcmd('update_job', job_id, {'status': 'executing', 'pid': _PID}) | python | def poll_queue(job_id, pid, poll_time):
if config.distribution.serialize_jobs:
first_time = True
while True:
jobs = logs.dbcmd(GET_JOBS)
failed = [job.id for job in jobs if not psutil.pid_exists(job.pid)]
if failed:
logs.dbcmd("UPDATE job SET status='failed', is_running=0 "
"WHERE id in (?X)", failed)
elif any(job.id < job_id for job in jobs):
if first_time:
logs.LOG.warn('Waiting for jobs %s', [j.id for j in jobs])
logs.dbcmd('update_job', job_id,
{'status': 'submitted', 'pid': pid})
first_time = False
time.sleep(poll_time)
else:
break
logs.dbcmd('update_job', job_id, {'status': 'executing', 'pid': _PID}) | [
"def",
"poll_queue",
"(",
"job_id",
",",
"pid",
",",
"poll_time",
")",
":",
"if",
"config",
".",
"distribution",
".",
"serialize_jobs",
":",
"first_time",
"=",
"True",
"while",
"True",
":",
"jobs",
"=",
"logs",
".",
"dbcmd",
"(",
"GET_JOBS",
")",
"failed",
"=",
"[",
"job",
".",
"id",
"for",
"job",
"in",
"jobs",
"if",
"not",
"psutil",
".",
"pid_exists",
"(",
"job",
".",
"pid",
")",
"]",
"if",
"failed",
":",
"logs",
".",
"dbcmd",
"(",
"\"UPDATE job SET status='failed', is_running=0 \"",
"\"WHERE id in (?X)\"",
",",
"failed",
")",
"elif",
"any",
"(",
"job",
".",
"id",
"<",
"job_id",
"for",
"job",
"in",
"jobs",
")",
":",
"if",
"first_time",
":",
"logs",
".",
"LOG",
".",
"warn",
"(",
"'Waiting for jobs %s'",
",",
"[",
"j",
".",
"id",
"for",
"j",
"in",
"jobs",
"]",
")",
"logs",
".",
"dbcmd",
"(",
"'update_job'",
",",
"job_id",
",",
"{",
"'status'",
":",
"'submitted'",
",",
"'pid'",
":",
"pid",
"}",
")",
"first_time",
"=",
"False",
"time",
".",
"sleep",
"(",
"poll_time",
")",
"else",
":",
"break",
"logs",
".",
"dbcmd",
"(",
"'update_job'",
",",
"job_id",
",",
"{",
"'status'",
":",
"'executing'",
",",
"'pid'",
":",
"_PID",
"}",
")"
]
| Check the queue of executing/submitted jobs and exit when there is
a free slot. | [
"Check",
"the",
"queue",
"of",
"executing",
"/",
"submitted",
"jobs",
"and",
"exit",
"when",
"there",
"is",
"a",
"free",
"slot",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L269-L291 |
gem/oq-engine | openquake/engine/engine.py | run_calc | def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
"""
Run a calculation.
:param job_id:
ID of the current job
:param oqparam:
:class:`openquake.commonlib.oqvalidation.OqParam` instance
:param exports:
A comma-separated string of export types.
"""
setproctitle('oq-job-%d' % job_id)
calc = base.calculators(oqparam, calc_id=job_id)
logging.info('%s running %s [--hc=%s]',
getpass.getuser(),
calc.oqparam.inputs['job_ini'],
calc.oqparam.hazard_calculation_id)
logging.info('Using engine version %s', __version__)
msg = check_obsolete_version(oqparam.calculation_mode)
if msg:
logs.LOG.warn(msg)
if OQ_DISTRIBUTE.startswith(('celery', 'zmq')):
set_concurrent_tasks_default(job_id)
calc.from_engine = True
tb = 'None\n'
try:
if not oqparam.hazard_calculation_id:
if 'input_zip' in oqparam.inputs: # starting from an archive
with open(oqparam.inputs['input_zip'], 'rb') as arch:
data = numpy.array(arch.read())
else:
logs.LOG.info('zipping the input files')
bio = io.BytesIO()
oqzip.zip_job(oqparam.inputs['job_ini'], bio, (), oqparam,
logging.debug)
data = numpy.array(bio.getvalue())
del bio
calc.datastore['input/zip'] = data
calc.datastore.set_attrs('input/zip', nbytes=data.nbytes)
del data # save memory
poll_queue(job_id, _PID, poll_time=15)
t0 = time.time()
calc.run(exports=exports,
hazard_calculation_id=hazard_calculation_id,
close=False, **kw)
logs.LOG.info('Exposing the outputs to the database')
expose_outputs(calc.datastore)
duration = time.time() - t0
calc._monitor.flush()
records = views.performance_view(calc.datastore)
logs.dbcmd('save_performance', job_id, records)
calc.datastore.close()
logs.LOG.info('Calculation %d finished correctly in %d seconds',
job_id, duration)
logs.dbcmd('finish', job_id, 'complete')
except BaseException as exc:
if isinstance(exc, MasterKilled):
msg = 'aborted'
else:
msg = 'failed'
tb = traceback.format_exc()
try:
logs.LOG.critical(tb)
logs.dbcmd('finish', job_id, msg)
except BaseException: # an OperationalError may always happen
sys.stderr.write(tb)
raise
finally:
# if there was an error in the calculation, this part may fail;
# in such a situation, we simply log the cleanup error without
# taking further action, so that the real error can propagate
try:
if OQ_DISTRIBUTE.startswith('celery'):
celery_cleanup(TERMINATE)
except BaseException:
# log the finalization error only if there is no real error
if tb == 'None\n':
logs.LOG.error('finalizing', exc_info=True)
return calc | python | def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
setproctitle('oq-job-%d' % job_id)
calc = base.calculators(oqparam, calc_id=job_id)
logging.info('%s running %s [--hc=%s]',
getpass.getuser(),
calc.oqparam.inputs['job_ini'],
calc.oqparam.hazard_calculation_id)
logging.info('Using engine version %s', __version__)
msg = check_obsolete_version(oqparam.calculation_mode)
if msg:
logs.LOG.warn(msg)
if OQ_DISTRIBUTE.startswith(('celery', 'zmq')):
set_concurrent_tasks_default(job_id)
calc.from_engine = True
tb = 'None\n'
try:
if not oqparam.hazard_calculation_id:
if 'input_zip' in oqparam.inputs:
with open(oqparam.inputs['input_zip'], 'rb') as arch:
data = numpy.array(arch.read())
else:
logs.LOG.info('zipping the input files')
bio = io.BytesIO()
oqzip.zip_job(oqparam.inputs['job_ini'], bio, (), oqparam,
logging.debug)
data = numpy.array(bio.getvalue())
del bio
calc.datastore['input/zip'] = data
calc.datastore.set_attrs('input/zip', nbytes=data.nbytes)
del data
poll_queue(job_id, _PID, poll_time=15)
t0 = time.time()
calc.run(exports=exports,
hazard_calculation_id=hazard_calculation_id,
close=False, **kw)
logs.LOG.info('Exposing the outputs to the database')
expose_outputs(calc.datastore)
duration = time.time() - t0
calc._monitor.flush()
records = views.performance_view(calc.datastore)
logs.dbcmd('save_performance', job_id, records)
calc.datastore.close()
logs.LOG.info('Calculation %d finished correctly in %d seconds',
job_id, duration)
logs.dbcmd('finish', job_id, 'complete')
except BaseException as exc:
if isinstance(exc, MasterKilled):
msg = 'aborted'
else:
msg = 'failed'
tb = traceback.format_exc()
try:
logs.LOG.critical(tb)
logs.dbcmd('finish', job_id, msg)
except BaseException:
sys.stderr.write(tb)
raise
finally:
try:
if OQ_DISTRIBUTE.startswith('celery'):
celery_cleanup(TERMINATE)
except BaseException:
if tb == 'None\n':
logs.LOG.error('finalizing', exc_info=True)
return calc | [
"def",
"run_calc",
"(",
"job_id",
",",
"oqparam",
",",
"exports",
",",
"hazard_calculation_id",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"setproctitle",
"(",
"'oq-job-%d'",
"%",
"job_id",
")",
"calc",
"=",
"base",
".",
"calculators",
"(",
"oqparam",
",",
"calc_id",
"=",
"job_id",
")",
"logging",
".",
"info",
"(",
"'%s running %s [--hc=%s]'",
",",
"getpass",
".",
"getuser",
"(",
")",
",",
"calc",
".",
"oqparam",
".",
"inputs",
"[",
"'job_ini'",
"]",
",",
"calc",
".",
"oqparam",
".",
"hazard_calculation_id",
")",
"logging",
".",
"info",
"(",
"'Using engine version %s'",
",",
"__version__",
")",
"msg",
"=",
"check_obsolete_version",
"(",
"oqparam",
".",
"calculation_mode",
")",
"if",
"msg",
":",
"logs",
".",
"LOG",
".",
"warn",
"(",
"msg",
")",
"if",
"OQ_DISTRIBUTE",
".",
"startswith",
"(",
"(",
"'celery'",
",",
"'zmq'",
")",
")",
":",
"set_concurrent_tasks_default",
"(",
"job_id",
")",
"calc",
".",
"from_engine",
"=",
"True",
"tb",
"=",
"'None\\n'",
"try",
":",
"if",
"not",
"oqparam",
".",
"hazard_calculation_id",
":",
"if",
"'input_zip'",
"in",
"oqparam",
".",
"inputs",
":",
"# starting from an archive",
"with",
"open",
"(",
"oqparam",
".",
"inputs",
"[",
"'input_zip'",
"]",
",",
"'rb'",
")",
"as",
"arch",
":",
"data",
"=",
"numpy",
".",
"array",
"(",
"arch",
".",
"read",
"(",
")",
")",
"else",
":",
"logs",
".",
"LOG",
".",
"info",
"(",
"'zipping the input files'",
")",
"bio",
"=",
"io",
".",
"BytesIO",
"(",
")",
"oqzip",
".",
"zip_job",
"(",
"oqparam",
".",
"inputs",
"[",
"'job_ini'",
"]",
",",
"bio",
",",
"(",
")",
",",
"oqparam",
",",
"logging",
".",
"debug",
")",
"data",
"=",
"numpy",
".",
"array",
"(",
"bio",
".",
"getvalue",
"(",
")",
")",
"del",
"bio",
"calc",
".",
"datastore",
"[",
"'input/zip'",
"]",
"=",
"data",
"calc",
".",
"datastore",
".",
"set_attrs",
"(",
"'input/zip'",
",",
"nbytes",
"=",
"data",
".",
"nbytes",
")",
"del",
"data",
"# save memory",
"poll_queue",
"(",
"job_id",
",",
"_PID",
",",
"poll_time",
"=",
"15",
")",
"t0",
"=",
"time",
".",
"time",
"(",
")",
"calc",
".",
"run",
"(",
"exports",
"=",
"exports",
",",
"hazard_calculation_id",
"=",
"hazard_calculation_id",
",",
"close",
"=",
"False",
",",
"*",
"*",
"kw",
")",
"logs",
".",
"LOG",
".",
"info",
"(",
"'Exposing the outputs to the database'",
")",
"expose_outputs",
"(",
"calc",
".",
"datastore",
")",
"duration",
"=",
"time",
".",
"time",
"(",
")",
"-",
"t0",
"calc",
".",
"_monitor",
".",
"flush",
"(",
")",
"records",
"=",
"views",
".",
"performance_view",
"(",
"calc",
".",
"datastore",
")",
"logs",
".",
"dbcmd",
"(",
"'save_performance'",
",",
"job_id",
",",
"records",
")",
"calc",
".",
"datastore",
".",
"close",
"(",
")",
"logs",
".",
"LOG",
".",
"info",
"(",
"'Calculation %d finished correctly in %d seconds'",
",",
"job_id",
",",
"duration",
")",
"logs",
".",
"dbcmd",
"(",
"'finish'",
",",
"job_id",
",",
"'complete'",
")",
"except",
"BaseException",
"as",
"exc",
":",
"if",
"isinstance",
"(",
"exc",
",",
"MasterKilled",
")",
":",
"msg",
"=",
"'aborted'",
"else",
":",
"msg",
"=",
"'failed'",
"tb",
"=",
"traceback",
".",
"format_exc",
"(",
")",
"try",
":",
"logs",
".",
"LOG",
".",
"critical",
"(",
"tb",
")",
"logs",
".",
"dbcmd",
"(",
"'finish'",
",",
"job_id",
",",
"msg",
")",
"except",
"BaseException",
":",
"# an OperationalError may always happen",
"sys",
".",
"stderr",
".",
"write",
"(",
"tb",
")",
"raise",
"finally",
":",
"# if there was an error in the calculation, this part may fail;",
"# in such a situation, we simply log the cleanup error without",
"# taking further action, so that the real error can propagate",
"try",
":",
"if",
"OQ_DISTRIBUTE",
".",
"startswith",
"(",
"'celery'",
")",
":",
"celery_cleanup",
"(",
"TERMINATE",
")",
"except",
"BaseException",
":",
"# log the finalization error only if there is no real error",
"if",
"tb",
"==",
"'None\\n'",
":",
"logs",
".",
"LOG",
".",
"error",
"(",
"'finalizing'",
",",
"exc_info",
"=",
"True",
")",
"return",
"calc"
]
| Run a calculation.
:param job_id:
ID of the current job
:param oqparam:
:class:`openquake.commonlib.oqvalidation.OqParam` instance
:param exports:
A comma-separated string of export types. | [
"Run",
"a",
"calculation",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L294-L373 |
gem/oq-engine | openquake/engine/engine.py | version_triple | def version_triple(tag):
"""
returns: a triple of integers from a version tag
"""
groups = re.match(r'v?(\d+)\.(\d+)\.(\d+)', tag).groups()
return tuple(int(n) for n in groups) | python | def version_triple(tag):
groups = re.match(r'v?(\d+)\.(\d+)\.(\d+)', tag).groups()
return tuple(int(n) for n in groups) | [
"def",
"version_triple",
"(",
"tag",
")",
":",
"groups",
"=",
"re",
".",
"match",
"(",
"r'v?(\\d+)\\.(\\d+)\\.(\\d+)'",
",",
"tag",
")",
".",
"groups",
"(",
")",
"return",
"tuple",
"(",
"int",
"(",
"n",
")",
"for",
"n",
"in",
"groups",
")"
]
| returns: a triple of integers from a version tag | [
"returns",
":",
"a",
"triple",
"of",
"integers",
"from",
"a",
"version",
"tag"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L376-L381 |
gem/oq-engine | openquake/engine/engine.py | check_obsolete_version | def check_obsolete_version(calculation_mode='WebUI'):
"""
Check if there is a newer version of the engine.
:param calculation_mode:
- the calculation mode when called from the engine
- an empty string when called from the WebUI
:returns:
- a message if the running version of the engine is obsolete
- the empty string if the engine is updated
- None if the check could not be performed (i.e. github is down)
"""
if os.environ.get('JENKINS_URL') or os.environ.get('TRAVIS'):
# avoid flooding our API server with requests from CI systems
return
headers = {'User-Agent': 'OpenQuake Engine %s;%s;%s;%s' %
(__version__, calculation_mode, platform.platform(),
config.distribution.oq_distribute)}
try:
req = Request(OQ_API + '/engine/latest', headers=headers)
# NB: a timeout < 1 does not work
data = urlopen(req, timeout=1).read() # bytes
tag_name = json.loads(decode(data))['tag_name']
current = version_triple(__version__)
latest = version_triple(tag_name)
except Exception: # page not available or wrong version tag
return
if current < latest:
return ('Version %s of the engine is available, but you are '
'still using version %s' % (tag_name, __version__))
else:
return '' | python | def check_obsolete_version(calculation_mode='WebUI'):
if os.environ.get('JENKINS_URL') or os.environ.get('TRAVIS'):
return
headers = {'User-Agent': 'OpenQuake Engine %s;%s;%s;%s' %
(__version__, calculation_mode, platform.platform(),
config.distribution.oq_distribute)}
try:
req = Request(OQ_API + '/engine/latest', headers=headers)
data = urlopen(req, timeout=1).read()
tag_name = json.loads(decode(data))['tag_name']
current = version_triple(__version__)
latest = version_triple(tag_name)
except Exception:
return
if current < latest:
return ('Version %s of the engine is available, but you are '
'still using version %s' % (tag_name, __version__))
else:
return '' | [
"def",
"check_obsolete_version",
"(",
"calculation_mode",
"=",
"'WebUI'",
")",
":",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'JENKINS_URL'",
")",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"'TRAVIS'",
")",
":",
"# avoid flooding our API server with requests from CI systems",
"return",
"headers",
"=",
"{",
"'User-Agent'",
":",
"'OpenQuake Engine %s;%s;%s;%s'",
"%",
"(",
"__version__",
",",
"calculation_mode",
",",
"platform",
".",
"platform",
"(",
")",
",",
"config",
".",
"distribution",
".",
"oq_distribute",
")",
"}",
"try",
":",
"req",
"=",
"Request",
"(",
"OQ_API",
"+",
"'/engine/latest'",
",",
"headers",
"=",
"headers",
")",
"# NB: a timeout < 1 does not work",
"data",
"=",
"urlopen",
"(",
"req",
",",
"timeout",
"=",
"1",
")",
".",
"read",
"(",
")",
"# bytes",
"tag_name",
"=",
"json",
".",
"loads",
"(",
"decode",
"(",
"data",
")",
")",
"[",
"'tag_name'",
"]",
"current",
"=",
"version_triple",
"(",
"__version__",
")",
"latest",
"=",
"version_triple",
"(",
"tag_name",
")",
"except",
"Exception",
":",
"# page not available or wrong version tag",
"return",
"if",
"current",
"<",
"latest",
":",
"return",
"(",
"'Version %s of the engine is available, but you are '",
"'still using version %s'",
"%",
"(",
"tag_name",
",",
"__version__",
")",
")",
"else",
":",
"return",
"''"
]
| Check if there is a newer version of the engine.
:param calculation_mode:
- the calculation mode when called from the engine
- an empty string when called from the WebUI
:returns:
- a message if the running version of the engine is obsolete
- the empty string if the engine is updated
- None if the check could not be performed (i.e. github is down) | [
"Check",
"if",
"there",
"is",
"a",
"newer",
"version",
"of",
"the",
"engine",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L384-L416 |
gem/oq-engine | openquake/baselib/python3compat.py | encode | def encode(val):
"""
Encode a string assuming the encoding is UTF-8.
:param: a unicode or bytes object
:returns: bytes
"""
if isinstance(val, (list, tuple)): # encode a list or tuple of strings
return [encode(v) for v in val]
elif isinstance(val, str):
return val.encode('utf-8')
else:
# assume it was an already encoded object
return val | python | def encode(val):
if isinstance(val, (list, tuple)):
return [encode(v) for v in val]
elif isinstance(val, str):
return val.encode('utf-8')
else:
return val | [
"def",
"encode",
"(",
"val",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"# encode a list or tuple of strings",
"return",
"[",
"encode",
"(",
"v",
")",
"for",
"v",
"in",
"val",
"]",
"elif",
"isinstance",
"(",
"val",
",",
"str",
")",
":",
"return",
"val",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"# assume it was an already encoded object",
"return",
"val"
]
| Encode a string assuming the encoding is UTF-8.
:param: a unicode or bytes object
:returns: bytes | [
"Encode",
"a",
"string",
"assuming",
"the",
"encoding",
"is",
"UTF",
"-",
"8",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/python3compat.py#L28-L41 |
gem/oq-engine | openquake/baselib/python3compat.py | raise_ | def raise_(tp, value=None, tb=None):
"""
A function that matches the Python 2.x ``raise`` statement. This
allows re-raising exceptions with the cls value and traceback on
Python 2 and 3.
"""
if value is not None and isinstance(tp, Exception):
raise TypeError("instance exception may not have a separate value")
if value is not None:
exc = tp(value)
else:
exc = tp
if exc.__traceback__ is not tb:
raise exc.with_traceback(tb)
raise exc | python | def raise_(tp, value=None, tb=None):
if value is not None and isinstance(tp, Exception):
raise TypeError("instance exception may not have a separate value")
if value is not None:
exc = tp(value)
else:
exc = tp
if exc.__traceback__ is not tb:
raise exc.with_traceback(tb)
raise exc | [
"def",
"raise_",
"(",
"tp",
",",
"value",
"=",
"None",
",",
"tb",
"=",
"None",
")",
":",
"if",
"value",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"tp",
",",
"Exception",
")",
":",
"raise",
"TypeError",
"(",
"\"instance exception may not have a separate value\"",
")",
"if",
"value",
"is",
"not",
"None",
":",
"exc",
"=",
"tp",
"(",
"value",
")",
"else",
":",
"exc",
"=",
"tp",
"if",
"exc",
".",
"__traceback__",
"is",
"not",
"tb",
":",
"raise",
"exc",
".",
"with_traceback",
"(",
"tb",
")",
"raise",
"exc"
]
| A function that matches the Python 2.x ``raise`` statement. This
allows re-raising exceptions with the cls value and traceback on
Python 2 and 3. | [
"A",
"function",
"that",
"matches",
"the",
"Python",
"2",
".",
"x",
"raise",
"statement",
".",
"This",
"allows",
"re",
"-",
"raising",
"exceptions",
"with",
"the",
"cls",
"value",
"and",
"traceback",
"on",
"Python",
"2",
"and",
"3",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/python3compat.py#L70-L84 |
gem/oq-engine | openquake/commands/dbserver.py | dbserver | def dbserver(cmd, dbhostport=None,
dbpath=os.path.expanduser(config.dbserver.file)):
"""
start/stop/restart the database server, or return its status
"""
if config.dbserver.multi_user and getpass.getuser() != 'openquake':
sys.exit('oq dbserver only works in single user mode')
status = dbs.get_status()
if cmd == 'status':
print('dbserver ' + status)
elif cmd == 'stop':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT) # this is trapped by the DbServer
else:
print('dbserver already stopped')
elif cmd == 'start':
if status == 'not-running':
dbs.run_server(dbpath, dbhostport)
else:
print('dbserver already running')
elif cmd == 'restart':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT)
dbs.run_server(dbpath, dbhostport) | python | def dbserver(cmd, dbhostport=None,
dbpath=os.path.expanduser(config.dbserver.file)):
if config.dbserver.multi_user and getpass.getuser() != 'openquake':
sys.exit('oq dbserver only works in single user mode')
status = dbs.get_status()
if cmd == 'status':
print('dbserver ' + status)
elif cmd == 'stop':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT)
else:
print('dbserver already stopped')
elif cmd == 'start':
if status == 'not-running':
dbs.run_server(dbpath, dbhostport)
else:
print('dbserver already running')
elif cmd == 'restart':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT)
dbs.run_server(dbpath, dbhostport) | [
"def",
"dbserver",
"(",
"cmd",
",",
"dbhostport",
"=",
"None",
",",
"dbpath",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"config",
".",
"dbserver",
".",
"file",
")",
")",
":",
"if",
"config",
".",
"dbserver",
".",
"multi_user",
"and",
"getpass",
".",
"getuser",
"(",
")",
"!=",
"'openquake'",
":",
"sys",
".",
"exit",
"(",
"'oq dbserver only works in single user mode'",
")",
"status",
"=",
"dbs",
".",
"get_status",
"(",
")",
"if",
"cmd",
"==",
"'status'",
":",
"print",
"(",
"'dbserver '",
"+",
"status",
")",
"elif",
"cmd",
"==",
"'stop'",
":",
"if",
"status",
"==",
"'running'",
":",
"pid",
"=",
"logs",
".",
"dbcmd",
"(",
"'getpid'",
")",
"os",
".",
"kill",
"(",
"pid",
",",
"signal",
".",
"SIGINT",
")",
"# this is trapped by the DbServer",
"else",
":",
"print",
"(",
"'dbserver already stopped'",
")",
"elif",
"cmd",
"==",
"'start'",
":",
"if",
"status",
"==",
"'not-running'",
":",
"dbs",
".",
"run_server",
"(",
"dbpath",
",",
"dbhostport",
")",
"else",
":",
"print",
"(",
"'dbserver already running'",
")",
"elif",
"cmd",
"==",
"'restart'",
":",
"if",
"status",
"==",
"'running'",
":",
"pid",
"=",
"logs",
".",
"dbcmd",
"(",
"'getpid'",
")",
"os",
".",
"kill",
"(",
"pid",
",",
"signal",
".",
"SIGINT",
")",
"dbs",
".",
"run_server",
"(",
"dbpath",
",",
"dbhostport",
")"
]
| start/stop/restart the database server, or return its status | [
"start",
"/",
"stop",
"/",
"restart",
"the",
"database",
"server",
"or",
"return",
"its",
"status"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/dbserver.py#L28-L54 |
gem/oq-engine | openquake/commands/plot_pyro.py | plot_pyro | def plot_pyro(calc_id=-1):
"""
Plot the pyroclastic cloud and the assets
"""
# NB: matplotlib is imported inside since it is a costly import
import matplotlib.pyplot as p
dstore = util.read(calc_id)
sitecol = dstore['sitecol']
asset_risk = dstore['asset_risk'].value
pyro, = numpy.where(dstore['multi_peril']['PYRO'] == 1)
lons = sitecol.lons[pyro]
lats = sitecol.lats[pyro]
p.scatter(lons, lats, marker='o', color='red')
building_pyro, = numpy.where(asset_risk['building-PYRO'] == 1)
lons = sitecol.lons[building_pyro]
lats = sitecol.lats[building_pyro]
p.scatter(lons, lats, marker='.', color='green')
p.show() | python | def plot_pyro(calc_id=-1):
import matplotlib.pyplot as p
dstore = util.read(calc_id)
sitecol = dstore['sitecol']
asset_risk = dstore['asset_risk'].value
pyro, = numpy.where(dstore['multi_peril']['PYRO'] == 1)
lons = sitecol.lons[pyro]
lats = sitecol.lats[pyro]
p.scatter(lons, lats, marker='o', color='red')
building_pyro, = numpy.where(asset_risk['building-PYRO'] == 1)
lons = sitecol.lons[building_pyro]
lats = sitecol.lats[building_pyro]
p.scatter(lons, lats, marker='.', color='green')
p.show() | [
"def",
"plot_pyro",
"(",
"calc_id",
"=",
"-",
"1",
")",
":",
"# NB: matplotlib is imported inside since it is a costly import",
"import",
"matplotlib",
".",
"pyplot",
"as",
"p",
"dstore",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"sitecol",
"=",
"dstore",
"[",
"'sitecol'",
"]",
"asset_risk",
"=",
"dstore",
"[",
"'asset_risk'",
"]",
".",
"value",
"pyro",
",",
"=",
"numpy",
".",
"where",
"(",
"dstore",
"[",
"'multi_peril'",
"]",
"[",
"'PYRO'",
"]",
"==",
"1",
")",
"lons",
"=",
"sitecol",
".",
"lons",
"[",
"pyro",
"]",
"lats",
"=",
"sitecol",
".",
"lats",
"[",
"pyro",
"]",
"p",
".",
"scatter",
"(",
"lons",
",",
"lats",
",",
"marker",
"=",
"'o'",
",",
"color",
"=",
"'red'",
")",
"building_pyro",
",",
"=",
"numpy",
".",
"where",
"(",
"asset_risk",
"[",
"'building-PYRO'",
"]",
"==",
"1",
")",
"lons",
"=",
"sitecol",
".",
"lons",
"[",
"building_pyro",
"]",
"lats",
"=",
"sitecol",
".",
"lats",
"[",
"building_pyro",
"]",
"p",
".",
"scatter",
"(",
"lons",
",",
"lats",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'green'",
")",
"p",
".",
"show",
"(",
")"
]
| Plot the pyroclastic cloud and the assets | [
"Plot",
"the",
"pyroclastic",
"cloud",
"and",
"the",
"assets"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_pyro.py#L24-L42 |
gem/oq-engine | openquake/hazardlib/geo/polygon.py | get_resampled_coordinates | def get_resampled_coordinates(lons, lats):
"""
Resample polygon line segments and return the coordinates of the new
vertices. This limits distortions when projecting a polygon onto a
spherical surface.
Parameters define longitudes and latitudes of a point collection in the
form of lists or numpy arrays.
:return:
A tuple of two numpy arrays: longitudes and latitudes
of resampled vertices.
"""
num_coords = len(lons)
assert num_coords == len(lats)
lons1 = numpy.array(lons)
lats1 = numpy.array(lats)
lons2 = numpy.concatenate((lons1[1:], lons1[:1]))
lats2 = numpy.concatenate((lats1[1:], lats1[:1]))
distances = geodetic.geodetic_distance(lons1, lats1, lons2, lats2)
resampled_lons = [lons[0]]
resampled_lats = [lats[0]]
for i in range(num_coords):
next_point = (i + 1) % num_coords
lon1, lat1 = lons[i], lats[i]
lon2, lat2 = lons[next_point], lats[next_point]
distance = distances[i]
num_points = int(distance / UPSAMPLING_STEP_KM) + 1
if num_points >= 2:
# We need to increase the resolution of this arc by adding new
# points.
new_lons, new_lats, _ = geodetic.npoints_between(
lon1, lat1, 0, lon2, lat2, 0, num_points)
resampled_lons.extend(new_lons[1:])
resampled_lats.extend(new_lats[1:])
else:
resampled_lons.append(lon2)
resampled_lats.append(lat2)
# NB: we cut off the last point because it repeats the first one
return numpy.array(resampled_lons[:-1]), numpy.array(resampled_lats[:-1]) | python | def get_resampled_coordinates(lons, lats):
num_coords = len(lons)
assert num_coords == len(lats)
lons1 = numpy.array(lons)
lats1 = numpy.array(lats)
lons2 = numpy.concatenate((lons1[1:], lons1[:1]))
lats2 = numpy.concatenate((lats1[1:], lats1[:1]))
distances = geodetic.geodetic_distance(lons1, lats1, lons2, lats2)
resampled_lons = [lons[0]]
resampled_lats = [lats[0]]
for i in range(num_coords):
next_point = (i + 1) % num_coords
lon1, lat1 = lons[i], lats[i]
lon2, lat2 = lons[next_point], lats[next_point]
distance = distances[i]
num_points = int(distance / UPSAMPLING_STEP_KM) + 1
if num_points >= 2:
new_lons, new_lats, _ = geodetic.npoints_between(
lon1, lat1, 0, lon2, lat2, 0, num_points)
resampled_lons.extend(new_lons[1:])
resampled_lats.extend(new_lats[1:])
else:
resampled_lons.append(lon2)
resampled_lats.append(lat2)
return numpy.array(resampled_lons[:-1]), numpy.array(resampled_lats[:-1]) | [
"def",
"get_resampled_coordinates",
"(",
"lons",
",",
"lats",
")",
":",
"num_coords",
"=",
"len",
"(",
"lons",
")",
"assert",
"num_coords",
"==",
"len",
"(",
"lats",
")",
"lons1",
"=",
"numpy",
".",
"array",
"(",
"lons",
")",
"lats1",
"=",
"numpy",
".",
"array",
"(",
"lats",
")",
"lons2",
"=",
"numpy",
".",
"concatenate",
"(",
"(",
"lons1",
"[",
"1",
":",
"]",
",",
"lons1",
"[",
":",
"1",
"]",
")",
")",
"lats2",
"=",
"numpy",
".",
"concatenate",
"(",
"(",
"lats1",
"[",
"1",
":",
"]",
",",
"lats1",
"[",
":",
"1",
"]",
")",
")",
"distances",
"=",
"geodetic",
".",
"geodetic_distance",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
"resampled_lons",
"=",
"[",
"lons",
"[",
"0",
"]",
"]",
"resampled_lats",
"=",
"[",
"lats",
"[",
"0",
"]",
"]",
"for",
"i",
"in",
"range",
"(",
"num_coords",
")",
":",
"next_point",
"=",
"(",
"i",
"+",
"1",
")",
"%",
"num_coords",
"lon1",
",",
"lat1",
"=",
"lons",
"[",
"i",
"]",
",",
"lats",
"[",
"i",
"]",
"lon2",
",",
"lat2",
"=",
"lons",
"[",
"next_point",
"]",
",",
"lats",
"[",
"next_point",
"]",
"distance",
"=",
"distances",
"[",
"i",
"]",
"num_points",
"=",
"int",
"(",
"distance",
"/",
"UPSAMPLING_STEP_KM",
")",
"+",
"1",
"if",
"num_points",
">=",
"2",
":",
"# We need to increase the resolution of this arc by adding new",
"# points.",
"new_lons",
",",
"new_lats",
",",
"_",
"=",
"geodetic",
".",
"npoints_between",
"(",
"lon1",
",",
"lat1",
",",
"0",
",",
"lon2",
",",
"lat2",
",",
"0",
",",
"num_points",
")",
"resampled_lons",
".",
"extend",
"(",
"new_lons",
"[",
"1",
":",
"]",
")",
"resampled_lats",
".",
"extend",
"(",
"new_lats",
"[",
"1",
":",
"]",
")",
"else",
":",
"resampled_lons",
".",
"append",
"(",
"lon2",
")",
"resampled_lats",
".",
"append",
"(",
"lat2",
")",
"# NB: we cut off the last point because it repeats the first one",
"return",
"numpy",
".",
"array",
"(",
"resampled_lons",
"[",
":",
"-",
"1",
"]",
")",
",",
"numpy",
".",
"array",
"(",
"resampled_lats",
"[",
":",
"-",
"1",
"]",
")"
]
| Resample polygon line segments and return the coordinates of the new
vertices. This limits distortions when projecting a polygon onto a
spherical surface.
Parameters define longitudes and latitudes of a point collection in the
form of lists or numpy arrays.
:return:
A tuple of two numpy arrays: longitudes and latitudes
of resampled vertices. | [
"Resample",
"polygon",
"line",
"segments",
"and",
"return",
"the",
"coordinates",
"of",
"the",
"new",
"vertices",
".",
"This",
"limits",
"distortions",
"when",
"projecting",
"a",
"polygon",
"onto",
"a",
"spherical",
"surface",
"."
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/polygon.py#L249-L291 |
gem/oq-engine | openquake/hazardlib/geo/surface/gridded.py | GriddedSurface.surface_nodes | def surface_nodes(self):
"""
:param points: a list of Point objects
:returns: a Node of kind 'griddedSurface'
"""
line = []
for point in self.mesh:
line.append(point.longitude)
line.append(point.latitude)
line.append(point.depth)
return [Node('griddedSurface', nodes=[Node('gml:posList', {}, line)])] | python | def surface_nodes(self):
line = []
for point in self.mesh:
line.append(point.longitude)
line.append(point.latitude)
line.append(point.depth)
return [Node('griddedSurface', nodes=[Node('gml:posList', {}, line)])] | [
"def",
"surface_nodes",
"(",
"self",
")",
":",
"line",
"=",
"[",
"]",
"for",
"point",
"in",
"self",
".",
"mesh",
":",
"line",
".",
"append",
"(",
"point",
".",
"longitude",
")",
"line",
".",
"append",
"(",
"point",
".",
"latitude",
")",
"line",
".",
"append",
"(",
"point",
".",
"depth",
")",
"return",
"[",
"Node",
"(",
"'griddedSurface'",
",",
"nodes",
"=",
"[",
"Node",
"(",
"'gml:posList'",
",",
"{",
"}",
",",
"line",
")",
"]",
")",
"]"
]
| :param points: a list of Point objects
:returns: a Node of kind 'griddedSurface' | [
":",
"param",
"points",
":",
"a",
"list",
"of",
"Point",
"objects",
":",
"returns",
":",
"a",
"Node",
"of",
"kind",
"griddedSurface"
]
| train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/gridded.py#L46-L56 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.