repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
gem/oq-engine
openquake/server/db/actions.py
get_executing_jobs
def get_executing_jobs(db): """ :param db: a :class:`openquake.server.dbapi.Db` instance :returns: (id, pid, user_name, start_time) tuples """ fields = 'id,pid,user_name,start_time' running = List() running._fields = fields.split(',') query = ('''-- executing jobs SELECT %s FROM job WHERE status='executing' ORDER BY id desc''' % fields) rows = db(query) for r in rows: # if r.pid is 0 it means that such information # is not available in the database if r.pid and psutil.pid_exists(r.pid): running.append(r) return running
python
def get_executing_jobs(db): fields = 'id,pid,user_name,start_time' running = List() running._fields = fields.split(',') query = ( % fields) rows = db(query) for r in rows: if r.pid and psutil.pid_exists(r.pid): running.append(r) return running
[ "def", "get_executing_jobs", "(", "db", ")", ":", "fields", "=", "'id,pid,user_name,start_time'", "running", "=", "List", "(", ")", "running", ".", "_fields", "=", "fields", ".", "split", "(", "','", ")", "query", "=", "(", "'''-- executing jobs\nSELECT %s FROM job WHERE status='executing' ORDER BY id desc'''", "%", "fields", ")", "rows", "=", "db", "(", "query", ")", "for", "r", "in", "rows", ":", "# if r.pid is 0 it means that such information", "# is not available in the database", "if", "r", ".", "pid", "and", "psutil", ".", "pid_exists", "(", "r", ".", "pid", ")", ":", "running", ".", "append", "(", "r", ")", "return", "running" ]
:param db: a :class:`openquake.server.dbapi.Db` instance :returns: (id, pid, user_name, start_time) tuples
[ ":", "param", "db", ":", "a", ":", "class", ":", "openquake", ".", "server", ".", "dbapi", ".", "Db", "instance", ":", "returns", ":", "(", "id", "pid", "user_name", "start_time", ")", "tuples" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L679-L698
gem/oq-engine
openquake/commands/prepare_site_model.py
calculate_z1pt0
def calculate_z1pt0(vs30): ''' Reads an array of vs30 values (in m/s) and returns the depth to the 1.0 km/s velocity horizon (in m) Ref: Chiou & Youngs (2014) California model :param vs30: the shear wave velocity (in m/s) at a depth of 30m ''' c1 = 571 ** 4. c2 = 1360.0 ** 4. return numpy.exp((-7.15 / 4.0) * numpy.log((vs30 ** 4. + c1) / (c2 + c1)))
python
def calculate_z1pt0(vs30): c1 = 571 ** 4. c2 = 1360.0 ** 4. return numpy.exp((-7.15 / 4.0) * numpy.log((vs30 ** 4. + c1) / (c2 + c1)))
[ "def", "calculate_z1pt0", "(", "vs30", ")", ":", "c1", "=", "571", "**", "4.", "c2", "=", "1360.0", "**", "4.", "return", "numpy", ".", "exp", "(", "(", "-", "7.15", "/", "4.0", ")", "*", "numpy", ".", "log", "(", "(", "vs30", "**", "4.", "+", "c1", ")", "/", "(", "c2", "+", "c1", ")", ")", ")" ]
Reads an array of vs30 values (in m/s) and returns the depth to the 1.0 km/s velocity horizon (in m) Ref: Chiou & Youngs (2014) California model :param vs30: the shear wave velocity (in m/s) at a depth of 30m
[ "Reads", "an", "array", "of", "vs30", "values", "(", "in", "m", "/", "s", ")", "and", "returns", "the", "depth", "to", "the", "1", ".", "0", "km", "/", "s", "velocity", "horizon", "(", "in", "m", ")", "Ref", ":", "Chiou", "&", "Youngs", "(", "2014", ")", "California", "model", ":", "param", "vs30", ":", "the", "shear", "wave", "velocity", "(", "in", "m", "/", "s", ")", "at", "a", "depth", "of", "30m" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/prepare_site_model.py#L33-L42
gem/oq-engine
openquake/commands/prepare_site_model.py
calculate_z2pt5_ngaw2
def calculate_z2pt5_ngaw2(vs30): ''' Reads an array of vs30 values (in m/s) and returns the depth to the 2.5 km/s velocity horizon (in km) Ref: Campbell, K.W. & Bozorgnia, Y., 2014. 'NGA-West2 ground motion model for the average horizontal components of PGA, PGV, and 5pct damped linear acceleration response spectra.' Earthquake Spectra, 30(3), pp.1087–1114. :param vs30: the shear wave velocity (in m/s) at a depth of 30 m ''' c1 = 7.089 c2 = -1.144 z2pt5 = numpy.exp(c1 + numpy.log(vs30) * c2) return z2pt5
python
def calculate_z2pt5_ngaw2(vs30): c1 = 7.089 c2 = -1.144 z2pt5 = numpy.exp(c1 + numpy.log(vs30) * c2) return z2pt5
[ "def", "calculate_z2pt5_ngaw2", "(", "vs30", ")", ":", "c1", "=", "7.089", "c2", "=", "-", "1.144", "z2pt5", "=", "numpy", ".", "exp", "(", "c1", "+", "numpy", ".", "log", "(", "vs30", ")", "*", "c2", ")", "return", "z2pt5" ]
Reads an array of vs30 values (in m/s) and returns the depth to the 2.5 km/s velocity horizon (in km) Ref: Campbell, K.W. & Bozorgnia, Y., 2014. 'NGA-West2 ground motion model for the average horizontal components of PGA, PGV, and 5pct damped linear acceleration response spectra.' Earthquake Spectra, 30(3), pp.1087–1114. :param vs30: the shear wave velocity (in m/s) at a depth of 30 m
[ "Reads", "an", "array", "of", "vs30", "values", "(", "in", "m", "/", "s", ")", "and", "returns", "the", "depth", "to", "the", "2", ".", "5", "km", "/", "s", "velocity", "horizon", "(", "in", "km", ")", "Ref", ":", "Campbell", "K", ".", "W", ".", "&", "Bozorgnia", "Y", ".", "2014", ".", "NGA", "-", "West2", "ground", "motion", "model", "for", "the", "average", "horizontal", "components", "of", "PGA", "PGV", "and", "5pct", "damped", "linear", "acceleration", "response", "spectra", ".", "Earthquake", "Spectra", "30", "(", "3", ")", "pp", ".", "1087–1114", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/prepare_site_model.py#L45-L59
gem/oq-engine
openquake/commands/prepare_site_model.py
read_vs30
def read_vs30(fnames): """ :param fnames: a list of CSV files with fields lon,lat,vs30 :returns: a vs30 array of dtype vs30dt """ data = [] for fname in fnames: for line in open(fname, encoding='utf-8-sig'): data.append(tuple(line.split(','))) return numpy.array(data, vs30_dt)
python
def read_vs30(fnames): data = [] for fname in fnames: for line in open(fname, encoding='utf-8-sig'): data.append(tuple(line.split(','))) return numpy.array(data, vs30_dt)
[ "def", "read_vs30", "(", "fnames", ")", ":", "data", "=", "[", "]", "for", "fname", "in", "fnames", ":", "for", "line", "in", "open", "(", "fname", ",", "encoding", "=", "'utf-8-sig'", ")", ":", "data", ".", "append", "(", "tuple", "(", "line", ".", "split", "(", "','", ")", ")", ")", "return", "numpy", ".", "array", "(", "data", ",", "vs30_dt", ")" ]
:param fnames: a list of CSV files with fields lon,lat,vs30 :returns: a vs30 array of dtype vs30dt
[ ":", "param", "fnames", ":", "a", "list", "of", "CSV", "files", "with", "fields", "lon", "lat", "vs30", ":", "returns", ":", "a", "vs30", "array", "of", "dtype", "vs30dt" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/prepare_site_model.py#L62-L71
gem/oq-engine
openquake/commands/prepare_site_model.py
prepare_site_model
def prepare_site_model(exposure_xml, sites_csv, vs30_csv, z1pt0, z2pt5, vs30measured, grid_spacing=0, assoc_distance=5, output='site_model.csv'): """ Prepare a site_model.csv file from exposure xml files/site csv files, vs30 csv files and a grid spacing which can be 0 (meaning no grid). For each site the closest vs30 parameter is used. The command can also generate (on demand) the additional fields z1pt0, z2pt5 and vs30measured which may be needed by your hazard model, depending on the required GSIMs. """ hdf5 = datastore.hdf5new() req_site_params = {'vs30'} fields = ['lon', 'lat', 'vs30'] if z1pt0: req_site_params.add('z1pt0') fields.append('z1pt0') if z2pt5: req_site_params.add('z2pt5') fields.append('z2pt5') if vs30measured: req_site_params.add('vs30measured') fields.append('vs30measured') with performance.Monitor(hdf5.path, hdf5, measuremem=True) as mon: if exposure_xml: mesh, assets_by_site = Exposure.read( exposure_xml, check_dupl=False).get_mesh_assets_by_site() mon.hdf5['assetcol'] = assetcol = site.SiteCollection.from_points( mesh.lons, mesh.lats, req_site_params=req_site_params) if grid_spacing: grid = mesh.get_convex_hull().dilate( grid_spacing).discretize(grid_spacing) haz_sitecol = site.SiteCollection.from_points( grid.lons, grid.lats, req_site_params=req_site_params) logging.info( 'Associating exposure grid with %d locations to %d ' 'exposure sites', len(haz_sitecol), len(assets_by_site)) haz_sitecol, assets_by, discarded = assoc( assets_by_site, haz_sitecol, grid_spacing * SQRT2, 'filter') if len(discarded): logging.info('Discarded %d sites with assets ' '[use oq plot_assets]', len(discarded)) mon.hdf5['discarded'] = numpy.array(discarded) haz_sitecol.make_complete() else: haz_sitecol = assetcol discarded = [] elif sites_csv: lons, lats = [], [] for fname in sites_csv: with open(fname) as csv: for line in csv: if line.startswith('lon,lat'): # possible header continue lon, lat = line.split(',')[:2] lons.append(valid.longitude(lon)) lats.append(valid.latitude(lat)) haz_sitecol = site.SiteCollection.from_points( lons, lats, req_site_params=req_site_params) if grid_spacing: grid = mesh.get_convex_hull().dilate( grid_spacing).discretize(grid_spacing) haz_sitecol = site.SiteCollection.from_points( grid.lons, grid.lats, req_site_params=req_site_params) else: raise RuntimeError('Missing exposures or missing sites') vs30orig = read_vs30(vs30_csv) logging.info('Associating %d hazard sites to %d site parameters', len(haz_sitecol), len(vs30orig)) sitecol, vs30, _ = assoc( vs30orig, haz_sitecol, assoc_distance, 'warn') sitecol.array['vs30'] = vs30['vs30'] if z1pt0: sitecol.array['z1pt0'] = calculate_z1pt0(vs30['vs30']) if z2pt5: sitecol.array['z2pt5'] = calculate_z2pt5_ngaw2(vs30['vs30']) if vs30measured: sitecol.array['vs30measured'] = False # it is inferred mon.hdf5['sitecol'] = sitecol write_csv(output, sitecol.array[fields]) logging.info('Saved %d rows in %s' % (len(sitecol), output)) logging.info(mon) return sitecol
python
def prepare_site_model(exposure_xml, sites_csv, vs30_csv, z1pt0, z2pt5, vs30measured, grid_spacing=0, assoc_distance=5, output='site_model.csv'): hdf5 = datastore.hdf5new() req_site_params = {'vs30'} fields = ['lon', 'lat', 'vs30'] if z1pt0: req_site_params.add('z1pt0') fields.append('z1pt0') if z2pt5: req_site_params.add('z2pt5') fields.append('z2pt5') if vs30measured: req_site_params.add('vs30measured') fields.append('vs30measured') with performance.Monitor(hdf5.path, hdf5, measuremem=True) as mon: if exposure_xml: mesh, assets_by_site = Exposure.read( exposure_xml, check_dupl=False).get_mesh_assets_by_site() mon.hdf5['assetcol'] = assetcol = site.SiteCollection.from_points( mesh.lons, mesh.lats, req_site_params=req_site_params) if grid_spacing: grid = mesh.get_convex_hull().dilate( grid_spacing).discretize(grid_spacing) haz_sitecol = site.SiteCollection.from_points( grid.lons, grid.lats, req_site_params=req_site_params) logging.info( 'Associating exposure grid with %d locations to %d ' 'exposure sites', len(haz_sitecol), len(assets_by_site)) haz_sitecol, assets_by, discarded = assoc( assets_by_site, haz_sitecol, grid_spacing * SQRT2, 'filter') if len(discarded): logging.info('Discarded %d sites with assets ' '[use oq plot_assets]', len(discarded)) mon.hdf5['discarded'] = numpy.array(discarded) haz_sitecol.make_complete() else: haz_sitecol = assetcol discarded = [] elif sites_csv: lons, lats = [], [] for fname in sites_csv: with open(fname) as csv: for line in csv: if line.startswith('lon,lat'): continue lon, lat = line.split(',')[:2] lons.append(valid.longitude(lon)) lats.append(valid.latitude(lat)) haz_sitecol = site.SiteCollection.from_points( lons, lats, req_site_params=req_site_params) if grid_spacing: grid = mesh.get_convex_hull().dilate( grid_spacing).discretize(grid_spacing) haz_sitecol = site.SiteCollection.from_points( grid.lons, grid.lats, req_site_params=req_site_params) else: raise RuntimeError('Missing exposures or missing sites') vs30orig = read_vs30(vs30_csv) logging.info('Associating %d hazard sites to %d site parameters', len(haz_sitecol), len(vs30orig)) sitecol, vs30, _ = assoc( vs30orig, haz_sitecol, assoc_distance, 'warn') sitecol.array['vs30'] = vs30['vs30'] if z1pt0: sitecol.array['z1pt0'] = calculate_z1pt0(vs30['vs30']) if z2pt5: sitecol.array['z2pt5'] = calculate_z2pt5_ngaw2(vs30['vs30']) if vs30measured: sitecol.array['vs30measured'] = False mon.hdf5['sitecol'] = sitecol write_csv(output, sitecol.array[fields]) logging.info('Saved %d rows in %s' % (len(sitecol), output)) logging.info(mon) return sitecol
[ "def", "prepare_site_model", "(", "exposure_xml", ",", "sites_csv", ",", "vs30_csv", ",", "z1pt0", ",", "z2pt5", ",", "vs30measured", ",", "grid_spacing", "=", "0", ",", "assoc_distance", "=", "5", ",", "output", "=", "'site_model.csv'", ")", ":", "hdf5", "=", "datastore", ".", "hdf5new", "(", ")", "req_site_params", "=", "{", "'vs30'", "}", "fields", "=", "[", "'lon'", ",", "'lat'", ",", "'vs30'", "]", "if", "z1pt0", ":", "req_site_params", ".", "add", "(", "'z1pt0'", ")", "fields", ".", "append", "(", "'z1pt0'", ")", "if", "z2pt5", ":", "req_site_params", ".", "add", "(", "'z2pt5'", ")", "fields", ".", "append", "(", "'z2pt5'", ")", "if", "vs30measured", ":", "req_site_params", ".", "add", "(", "'vs30measured'", ")", "fields", ".", "append", "(", "'vs30measured'", ")", "with", "performance", ".", "Monitor", "(", "hdf5", ".", "path", ",", "hdf5", ",", "measuremem", "=", "True", ")", "as", "mon", ":", "if", "exposure_xml", ":", "mesh", ",", "assets_by_site", "=", "Exposure", ".", "read", "(", "exposure_xml", ",", "check_dupl", "=", "False", ")", ".", "get_mesh_assets_by_site", "(", ")", "mon", ".", "hdf5", "[", "'assetcol'", "]", "=", "assetcol", "=", "site", ".", "SiteCollection", ".", "from_points", "(", "mesh", ".", "lons", ",", "mesh", ".", "lats", ",", "req_site_params", "=", "req_site_params", ")", "if", "grid_spacing", ":", "grid", "=", "mesh", ".", "get_convex_hull", "(", ")", ".", "dilate", "(", "grid_spacing", ")", ".", "discretize", "(", "grid_spacing", ")", "haz_sitecol", "=", "site", ".", "SiteCollection", ".", "from_points", "(", "grid", ".", "lons", ",", "grid", ".", "lats", ",", "req_site_params", "=", "req_site_params", ")", "logging", ".", "info", "(", "'Associating exposure grid with %d locations to %d '", "'exposure sites'", ",", "len", "(", "haz_sitecol", ")", ",", "len", "(", "assets_by_site", ")", ")", "haz_sitecol", ",", "assets_by", ",", "discarded", "=", "assoc", "(", "assets_by_site", ",", "haz_sitecol", ",", "grid_spacing", "*", "SQRT2", ",", "'filter'", ")", "if", "len", "(", "discarded", ")", ":", "logging", ".", "info", "(", "'Discarded %d sites with assets '", "'[use oq plot_assets]'", ",", "len", "(", "discarded", ")", ")", "mon", ".", "hdf5", "[", "'discarded'", "]", "=", "numpy", ".", "array", "(", "discarded", ")", "haz_sitecol", ".", "make_complete", "(", ")", "else", ":", "haz_sitecol", "=", "assetcol", "discarded", "=", "[", "]", "elif", "sites_csv", ":", "lons", ",", "lats", "=", "[", "]", ",", "[", "]", "for", "fname", "in", "sites_csv", ":", "with", "open", "(", "fname", ")", "as", "csv", ":", "for", "line", "in", "csv", ":", "if", "line", ".", "startswith", "(", "'lon,lat'", ")", ":", "# possible header", "continue", "lon", ",", "lat", "=", "line", ".", "split", "(", "','", ")", "[", ":", "2", "]", "lons", ".", "append", "(", "valid", ".", "longitude", "(", "lon", ")", ")", "lats", ".", "append", "(", "valid", ".", "latitude", "(", "lat", ")", ")", "haz_sitecol", "=", "site", ".", "SiteCollection", ".", "from_points", "(", "lons", ",", "lats", ",", "req_site_params", "=", "req_site_params", ")", "if", "grid_spacing", ":", "grid", "=", "mesh", ".", "get_convex_hull", "(", ")", ".", "dilate", "(", "grid_spacing", ")", ".", "discretize", "(", "grid_spacing", ")", "haz_sitecol", "=", "site", ".", "SiteCollection", ".", "from_points", "(", "grid", ".", "lons", ",", "grid", ".", "lats", ",", "req_site_params", "=", "req_site_params", ")", "else", ":", "raise", "RuntimeError", "(", "'Missing exposures or missing sites'", ")", "vs30orig", "=", "read_vs30", "(", "vs30_csv", ")", "logging", ".", "info", "(", "'Associating %d hazard sites to %d site parameters'", ",", "len", "(", "haz_sitecol", ")", ",", "len", "(", "vs30orig", ")", ")", "sitecol", ",", "vs30", ",", "_", "=", "assoc", "(", "vs30orig", ",", "haz_sitecol", ",", "assoc_distance", ",", "'warn'", ")", "sitecol", ".", "array", "[", "'vs30'", "]", "=", "vs30", "[", "'vs30'", "]", "if", "z1pt0", ":", "sitecol", ".", "array", "[", "'z1pt0'", "]", "=", "calculate_z1pt0", "(", "vs30", "[", "'vs30'", "]", ")", "if", "z2pt5", ":", "sitecol", ".", "array", "[", "'z2pt5'", "]", "=", "calculate_z2pt5_ngaw2", "(", "vs30", "[", "'vs30'", "]", ")", "if", "vs30measured", ":", "sitecol", ".", "array", "[", "'vs30measured'", "]", "=", "False", "# it is inferred", "mon", ".", "hdf5", "[", "'sitecol'", "]", "=", "sitecol", "write_csv", "(", "output", ",", "sitecol", ".", "array", "[", "fields", "]", ")", "logging", ".", "info", "(", "'Saved %d rows in %s'", "%", "(", "len", "(", "sitecol", ")", ",", "output", ")", ")", "logging", ".", "info", "(", "mon", ")", "return", "sitecol" ]
Prepare a site_model.csv file from exposure xml files/site csv files, vs30 csv files and a grid spacing which can be 0 (meaning no grid). For each site the closest vs30 parameter is used. The command can also generate (on demand) the additional fields z1pt0, z2pt5 and vs30measured which may be needed by your hazard model, depending on the required GSIMs.
[ "Prepare", "a", "site_model", ".", "csv", "file", "from", "exposure", "xml", "files", "/", "site", "csv", "files", "vs30", "csv", "files", "and", "a", "grid", "spacing", "which", "can", "be", "0", "(", "meaning", "no", "grid", ")", ".", "For", "each", "site", "the", "closest", "vs30", "parameter", "is", "used", ".", "The", "command", "can", "also", "generate", "(", "on", "demand", ")", "the", "additional", "fields", "z1pt0", "z2pt5", "and", "vs30measured", "which", "may", "be", "needed", "by", "your", "hazard", "model", "depending", "on", "the", "required", "GSIMs", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/prepare_site_model.py#L75-L157
gem/oq-engine
openquake/commands/webui.py
webui
def webui(cmd, hostport='127.0.0.1:8800', skip_browser=False): """ start the webui server in foreground or perform other operation on the django application """ dbpath = os.path.realpath(os.path.expanduser(config.dbserver.file)) if os.path.isfile(dbpath) and not os.access(dbpath, os.W_OK): sys.exit('This command must be run by the proper user: ' 'see the documentation for details') if cmd == 'start': dbserver.ensure_on() # start the dbserver in a subprocess rundjango('runserver', hostport, skip_browser) elif cmd in commands: rundjango(cmd)
python
def webui(cmd, hostport='127.0.0.1:8800', skip_browser=False): dbpath = os.path.realpath(os.path.expanduser(config.dbserver.file)) if os.path.isfile(dbpath) and not os.access(dbpath, os.W_OK): sys.exit('This command must be run by the proper user: ' 'see the documentation for details') if cmd == 'start': dbserver.ensure_on() rundjango('runserver', hostport, skip_browser) elif cmd in commands: rundjango(cmd)
[ "def", "webui", "(", "cmd", ",", "hostport", "=", "'127.0.0.1:8800'", ",", "skip_browser", "=", "False", ")", ":", "dbpath", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "expanduser", "(", "config", ".", "dbserver", ".", "file", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "dbpath", ")", "and", "not", "os", ".", "access", "(", "dbpath", ",", "os", ".", "W_OK", ")", ":", "sys", ".", "exit", "(", "'This command must be run by the proper user: '", "'see the documentation for details'", ")", "if", "cmd", "==", "'start'", ":", "dbserver", ".", "ensure_on", "(", ")", "# start the dbserver in a subprocess", "rundjango", "(", "'runserver'", ",", "hostport", ",", "skip_browser", ")", "elif", "cmd", "in", "commands", ":", "rundjango", "(", "cmd", ")" ]
start the webui server in foreground or perform other operation on the django application
[ "start", "the", "webui", "server", "in", "foreground", "or", "perform", "other", "operation", "on", "the", "django", "application" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/webui.py#L51-L64
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # get the necessary set of coefficients C = self.COEFFS[imt] # compute median sa on rock (vs30=1180m/s). Used for site response # term calculation sa1180 = np.exp(self._get_sa_at_1180(C, imt, sites, rup, dists)) # get the mean value mean = (self._get_basic_term(C, rup, dists) + self._get_faulting_style_term(C, rup) + self._get_site_response_term(C, imt, sites.vs30, sa1180) + self._get_hanging_wall_term(C, dists, rup) + self._get_top_of_rupture_depth_term(C, imt, rup) + self._get_soil_depth_term(C, sites.z1pt0 / METRES_PER_KM, sites.vs30) ) mean += self._get_regional_term(C, imt, sites.vs30, dists.rrup) # get standard deviations stddevs = self._get_stddevs(C, imt, rup, sites, stddev_types, sa1180, dists) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] sa1180 = np.exp(self._get_sa_at_1180(C, imt, sites, rup, dists)) mean = (self._get_basic_term(C, rup, dists) + self._get_faulting_style_term(C, rup) + self._get_site_response_term(C, imt, sites.vs30, sa1180) + self._get_hanging_wall_term(C, dists, rup) + self._get_top_of_rupture_depth_term(C, imt, rup) + self._get_soil_depth_term(C, sites.z1pt0 / METRES_PER_KM, sites.vs30) ) mean += self._get_regional_term(C, imt, sites.vs30, dists.rrup) stddevs = self._get_stddevs(C, imt, rup, sites, stddev_types, sa1180, dists) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# get the necessary set of coefficients", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "# compute median sa on rock (vs30=1180m/s). Used for site response", "# term calculation", "sa1180", "=", "np", ".", "exp", "(", "self", ".", "_get_sa_at_1180", "(", "C", ",", "imt", ",", "sites", ",", "rup", ",", "dists", ")", ")", "# get the mean value", "mean", "=", "(", "self", ".", "_get_basic_term", "(", "C", ",", "rup", ",", "dists", ")", "+", "self", ".", "_get_faulting_style_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_get_site_response_term", "(", "C", ",", "imt", ",", "sites", ".", "vs30", ",", "sa1180", ")", "+", "self", ".", "_get_hanging_wall_term", "(", "C", ",", "dists", ",", "rup", ")", "+", "self", ".", "_get_top_of_rupture_depth_term", "(", "C", ",", "imt", ",", "rup", ")", "+", "self", ".", "_get_soil_depth_term", "(", "C", ",", "sites", ".", "z1pt0", "/", "METRES_PER_KM", ",", "sites", ".", "vs30", ")", ")", "mean", "+=", "self", ".", "_get_regional_term", "(", "C", ",", "imt", ",", "sites", ".", "vs30", ",", "dists", ".", "rrup", ")", "# get standard deviations", "stddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "imt", ",", "rup", ",", "sites", ",", "stddev_types", ",", "sa1180", ",", "dists", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L82-L107
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_sa_at_1180
def _get_sa_at_1180(self, C, imt, sites, rup, dists): """ Compute and return mean imt value for rock conditions (vs30 = 1100 m/s) """ # reference vs30 = 1180 m/s vs30_1180 = np.ones_like(sites.vs30) * 1180. # reference shaking intensity = 0 ref_iml = np.zeros_like(sites.vs30) # fake Z1.0 - Since negative it will be replaced by the default Z1.0 # for the corresponding region fake_z1pt0 = np.ones_like(sites.vs30) * -1 return (self._get_basic_term(C, rup, dists) + self._get_faulting_style_term(C, rup) + self._get_site_response_term(C, imt, vs30_1180, ref_iml) + self._get_hanging_wall_term(C, dists, rup) + self._get_top_of_rupture_depth_term(C, imt, rup) + self._get_soil_depth_term(C, fake_z1pt0, vs30_1180) + self._get_regional_term(C, imt, vs30_1180, dists.rrup) )
python
def _get_sa_at_1180(self, C, imt, sites, rup, dists): vs30_1180 = np.ones_like(sites.vs30) * 1180. ref_iml = np.zeros_like(sites.vs30) fake_z1pt0 = np.ones_like(sites.vs30) * -1 return (self._get_basic_term(C, rup, dists) + self._get_faulting_style_term(C, rup) + self._get_site_response_term(C, imt, vs30_1180, ref_iml) + self._get_hanging_wall_term(C, dists, rup) + self._get_top_of_rupture_depth_term(C, imt, rup) + self._get_soil_depth_term(C, fake_z1pt0, vs30_1180) + self._get_regional_term(C, imt, vs30_1180, dists.rrup) )
[ "def", "_get_sa_at_1180", "(", "self", ",", "C", ",", "imt", ",", "sites", ",", "rup", ",", "dists", ")", ":", "# reference vs30 = 1180 m/s", "vs30_1180", "=", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", "*", "1180.", "# reference shaking intensity = 0", "ref_iml", "=", "np", ".", "zeros_like", "(", "sites", ".", "vs30", ")", "# fake Z1.0 - Since negative it will be replaced by the default Z1.0", "# for the corresponding region", "fake_z1pt0", "=", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", "*", "-", "1", "return", "(", "self", ".", "_get_basic_term", "(", "C", ",", "rup", ",", "dists", ")", "+", "self", ".", "_get_faulting_style_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_get_site_response_term", "(", "C", ",", "imt", ",", "vs30_1180", ",", "ref_iml", ")", "+", "self", ".", "_get_hanging_wall_term", "(", "C", ",", "dists", ",", "rup", ")", "+", "self", ".", "_get_top_of_rupture_depth_term", "(", "C", ",", "imt", ",", "rup", ")", "+", "self", ".", "_get_soil_depth_term", "(", "C", ",", "fake_z1pt0", ",", "vs30_1180", ")", "+", "self", ".", "_get_regional_term", "(", "C", ",", "imt", ",", "vs30_1180", ",", "dists", ".", "rrup", ")", ")" ]
Compute and return mean imt value for rock conditions (vs30 = 1100 m/s)
[ "Compute", "and", "return", "mean", "imt", "value", "for", "rock", "conditions", "(", "vs30", "=", "1100", "m", "/", "s", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L109-L128
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_basic_term
def _get_basic_term(self, C, rup, dists): """ Compute and return basic form, see page 1030. """ # Fictitious depth calculation if rup.mag > 5.: c4m = C['c4'] elif rup.mag > 4.: c4m = C['c4'] - (C['c4']-1.) * (5. - rup.mag) else: c4m = 1. R = np.sqrt(dists.rrup**2. + c4m**2.) # basic form base_term = C['a1'] * np.ones_like(dists.rrup) + C['a17'] * dists.rrup # equation 2 at page 1030 if rup.mag >= C['m1']: base_term += (C['a5'] * (rup.mag - C['m1']) + C['a8'] * (8.5 - rup.mag)**2. + (C['a2'] + C['a3'] * (rup.mag - C['m1'])) * np.log(R)) elif rup.mag >= self.CONSTS['m2']: base_term += (C['a4'] * (rup.mag - C['m1']) + C['a8'] * (8.5 - rup.mag)**2. + (C['a2'] + C['a3'] * (rup.mag - C['m1'])) * np.log(R)) else: base_term += (C['a4'] * (self.CONSTS['m2'] - C['m1']) + C['a8'] * (8.5 - self.CONSTS['m2'])**2. + C['a6'] * (rup.mag - self.CONSTS['m2']) + C['a7'] * (rup.mag - self.CONSTS['m2'])**2. + (C['a2'] + C['a3'] * (self.CONSTS['m2'] - C['m1'])) * np.log(R)) return base_term
python
def _get_basic_term(self, C, rup, dists): if rup.mag > 5.: c4m = C['c4'] elif rup.mag > 4.: c4m = C['c4'] - (C['c4']-1.) * (5. - rup.mag) else: c4m = 1. R = np.sqrt(dists.rrup**2. + c4m**2.) base_term = C['a1'] * np.ones_like(dists.rrup) + C['a17'] * dists.rrup if rup.mag >= C['m1']: base_term += (C['a5'] * (rup.mag - C['m1']) + C['a8'] * (8.5 - rup.mag)**2. + (C['a2'] + C['a3'] * (rup.mag - C['m1'])) * np.log(R)) elif rup.mag >= self.CONSTS['m2']: base_term += (C['a4'] * (rup.mag - C['m1']) + C['a8'] * (8.5 - rup.mag)**2. + (C['a2'] + C['a3'] * (rup.mag - C['m1'])) * np.log(R)) else: base_term += (C['a4'] * (self.CONSTS['m2'] - C['m1']) + C['a8'] * (8.5 - self.CONSTS['m2'])**2. + C['a6'] * (rup.mag - self.CONSTS['m2']) + C['a7'] * (rup.mag - self.CONSTS['m2'])**2. + (C['a2'] + C['a3'] * (self.CONSTS['m2'] - C['m1'])) * np.log(R)) return base_term
[ "def", "_get_basic_term", "(", "self", ",", "C", ",", "rup", ",", "dists", ")", ":", "# Fictitious depth calculation", "if", "rup", ".", "mag", ">", "5.", ":", "c4m", "=", "C", "[", "'c4'", "]", "elif", "rup", ".", "mag", ">", "4.", ":", "c4m", "=", "C", "[", "'c4'", "]", "-", "(", "C", "[", "'c4'", "]", "-", "1.", ")", "*", "(", "5.", "-", "rup", ".", "mag", ")", "else", ":", "c4m", "=", "1.", "R", "=", "np", ".", "sqrt", "(", "dists", ".", "rrup", "**", "2.", "+", "c4m", "**", "2.", ")", "# basic form", "base_term", "=", "C", "[", "'a1'", "]", "*", "np", ".", "ones_like", "(", "dists", ".", "rrup", ")", "+", "C", "[", "'a17'", "]", "*", "dists", ".", "rrup", "# equation 2 at page 1030", "if", "rup", ".", "mag", ">=", "C", "[", "'m1'", "]", ":", "base_term", "+=", "(", "C", "[", "'a5'", "]", "*", "(", "rup", ".", "mag", "-", "C", "[", "'m1'", "]", ")", "+", "C", "[", "'a8'", "]", "*", "(", "8.5", "-", "rup", ".", "mag", ")", "**", "2.", "+", "(", "C", "[", "'a2'", "]", "+", "C", "[", "'a3'", "]", "*", "(", "rup", ".", "mag", "-", "C", "[", "'m1'", "]", ")", ")", "*", "np", ".", "log", "(", "R", ")", ")", "elif", "rup", ".", "mag", ">=", "self", ".", "CONSTS", "[", "'m2'", "]", ":", "base_term", "+=", "(", "C", "[", "'a4'", "]", "*", "(", "rup", ".", "mag", "-", "C", "[", "'m1'", "]", ")", "+", "C", "[", "'a8'", "]", "*", "(", "8.5", "-", "rup", ".", "mag", ")", "**", "2.", "+", "(", "C", "[", "'a2'", "]", "+", "C", "[", "'a3'", "]", "*", "(", "rup", ".", "mag", "-", "C", "[", "'m1'", "]", ")", ")", "*", "np", ".", "log", "(", "R", ")", ")", "else", ":", "base_term", "+=", "(", "C", "[", "'a4'", "]", "*", "(", "self", ".", "CONSTS", "[", "'m2'", "]", "-", "C", "[", "'m1'", "]", ")", "+", "C", "[", "'a8'", "]", "*", "(", "8.5", "-", "self", ".", "CONSTS", "[", "'m2'", "]", ")", "**", "2.", "+", "C", "[", "'a6'", "]", "*", "(", "rup", ".", "mag", "-", "self", ".", "CONSTS", "[", "'m2'", "]", ")", "+", "C", "[", "'a7'", "]", "*", "(", "rup", ".", "mag", "-", "self", ".", "CONSTS", "[", "'m2'", "]", ")", "**", "2.", "+", "(", "C", "[", "'a2'", "]", "+", "C", "[", "'a3'", "]", "*", "(", "self", ".", "CONSTS", "[", "'m2'", "]", "-", "C", "[", "'m1'", "]", ")", ")", "*", "np", ".", "log", "(", "R", ")", ")", "return", "base_term" ]
Compute and return basic form, see page 1030.
[ "Compute", "and", "return", "basic", "form", "see", "page", "1030", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L130-L162
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_faulting_style_term
def _get_faulting_style_term(self, C, rup): """ Compute and return faulting style term, that is the sum of the second and third terms in equation 1, page 74. """ # this implements equations 5 and 6 at page 1032. f7 is the # coefficient for reverse mechanisms while f8 is the correction # factor for normal ruptures if rup.mag > 5.0: f7 = C['a11'] f8 = C['a12'] elif rup.mag >= 4: f7 = C['a11'] * (rup.mag - 4.) f8 = C['a12'] * (rup.mag - 4.) else: f7 = 0.0 f8 = 0.0 # ranges of rake values for each faulting mechanism are specified in # table 2, page 1031 return (f7 * float(rup.rake > 30 and rup.rake < 150) + f8 * float(rup.rake > -150 and rup.rake < -30))
python
def _get_faulting_style_term(self, C, rup): if rup.mag > 5.0: f7 = C['a11'] f8 = C['a12'] elif rup.mag >= 4: f7 = C['a11'] * (rup.mag - 4.) f8 = C['a12'] * (rup.mag - 4.) else: f7 = 0.0 f8 = 0.0 return (f7 * float(rup.rake > 30 and rup.rake < 150) + f8 * float(rup.rake > -150 and rup.rake < -30))
[ "def", "_get_faulting_style_term", "(", "self", ",", "C", ",", "rup", ")", ":", "# this implements equations 5 and 6 at page 1032. f7 is the", "# coefficient for reverse mechanisms while f8 is the correction", "# factor for normal ruptures", "if", "rup", ".", "mag", ">", "5.0", ":", "f7", "=", "C", "[", "'a11'", "]", "f8", "=", "C", "[", "'a12'", "]", "elif", "rup", ".", "mag", ">=", "4", ":", "f7", "=", "C", "[", "'a11'", "]", "*", "(", "rup", ".", "mag", "-", "4.", ")", "f8", "=", "C", "[", "'a12'", "]", "*", "(", "rup", ".", "mag", "-", "4.", ")", "else", ":", "f7", "=", "0.0", "f8", "=", "0.0", "# ranges of rake values for each faulting mechanism are specified in", "# table 2, page 1031", "return", "(", "f7", "*", "float", "(", "rup", ".", "rake", ">", "30", "and", "rup", ".", "rake", "<", "150", ")", "+", "f8", "*", "float", "(", "rup", ".", "rake", ">", "-", "150", "and", "rup", ".", "rake", "<", "-", "30", ")", ")" ]
Compute and return faulting style term, that is the sum of the second and third terms in equation 1, page 74.
[ "Compute", "and", "return", "faulting", "style", "term", "that", "is", "the", "sum", "of", "the", "second", "and", "third", "terms", "in", "equation", "1", "page", "74", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L164-L184
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_vs30star
def _get_vs30star(self, vs30, imt): """ This computes equations 8 and 9 at page 1034 """ # compute the v1 value (see eq. 9, page 1034) if imt.name == "SA": t = imt.period if t <= 0.50: v1 = 1500.0 elif t < 3.0: v1 = np.exp(-0.35 * np.log(t / 0.5) + np.log(1500.)) else: v1 = 800.0 elif imt.name == "PGA": v1 = 1500.0 else: # This covers the PGV case v1 = 1500.0 # set the vs30 star value (see eq. 8, page 1034) vs30_star = np.ones_like(vs30) * vs30 vs30_star[vs30 >= v1] = v1 return vs30_star
python
def _get_vs30star(self, vs30, imt): if imt.name == "SA": t = imt.period if t <= 0.50: v1 = 1500.0 elif t < 3.0: v1 = np.exp(-0.35 * np.log(t / 0.5) + np.log(1500.)) else: v1 = 800.0 elif imt.name == "PGA": v1 = 1500.0 else: v1 = 1500.0 vs30_star = np.ones_like(vs30) * vs30 vs30_star[vs30 >= v1] = v1 return vs30_star
[ "def", "_get_vs30star", "(", "self", ",", "vs30", ",", "imt", ")", ":", "# compute the v1 value (see eq. 9, page 1034)", "if", "imt", ".", "name", "==", "\"SA\"", ":", "t", "=", "imt", ".", "period", "if", "t", "<=", "0.50", ":", "v1", "=", "1500.0", "elif", "t", "<", "3.0", ":", "v1", "=", "np", ".", "exp", "(", "-", "0.35", "*", "np", ".", "log", "(", "t", "/", "0.5", ")", "+", "np", ".", "log", "(", "1500.", ")", ")", "else", ":", "v1", "=", "800.0", "elif", "imt", ".", "name", "==", "\"PGA\"", ":", "v1", "=", "1500.0", "else", ":", "# This covers the PGV case", "v1", "=", "1500.0", "# set the vs30 star value (see eq. 8, page 1034)", "vs30_star", "=", "np", ".", "ones_like", "(", "vs30", ")", "*", "vs30", "vs30_star", "[", "vs30", ">=", "v1", "]", "=", "v1", "return", "vs30_star" ]
This computes equations 8 and 9 at page 1034
[ "This", "computes", "equations", "8", "and", "9", "at", "page", "1034" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L186-L207
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_site_response_term
def _get_site_response_term(self, C, imt, vs30, sa1180): """ Compute and return site response model term see page 1033 """ # vs30 star vs30_star = self._get_vs30star(vs30, imt) # compute the site term site_resp_term = np.zeros_like(vs30) gt_vlin = vs30 >= C['vlin'] lw_vlin = vs30 < C['vlin'] # compute site response term for sites with vs30 greater than vlin vs30_rat = vs30_star / C['vlin'] site_resp_term[gt_vlin] = ((C['a10'] + C['b'] * self.CONSTS['n']) * np.log(vs30_rat[gt_vlin])) # compute site response term for sites with vs30 lower than vlin site_resp_term[lw_vlin] = (C['a10'] * np.log(vs30_rat[lw_vlin]) - C['b'] * np.log(sa1180[lw_vlin] + C['c']) + C['b'] * np.log(sa1180[lw_vlin] + C['c'] * vs30_rat[lw_vlin] ** self.CONSTS['n'])) return site_resp_term
python
def _get_site_response_term(self, C, imt, vs30, sa1180): vs30_star = self._get_vs30star(vs30, imt) site_resp_term = np.zeros_like(vs30) gt_vlin = vs30 >= C['vlin'] lw_vlin = vs30 < C['vlin'] vs30_rat = vs30_star / C['vlin'] site_resp_term[gt_vlin] = ((C['a10'] + C['b'] * self.CONSTS['n']) * np.log(vs30_rat[gt_vlin])) site_resp_term[lw_vlin] = (C['a10'] * np.log(vs30_rat[lw_vlin]) - C['b'] * np.log(sa1180[lw_vlin] + C['c']) + C['b'] * np.log(sa1180[lw_vlin] + C['c'] * vs30_rat[lw_vlin] ** self.CONSTS['n'])) return site_resp_term
[ "def", "_get_site_response_term", "(", "self", ",", "C", ",", "imt", ",", "vs30", ",", "sa1180", ")", ":", "# vs30 star", "vs30_star", "=", "self", ".", "_get_vs30star", "(", "vs30", ",", "imt", ")", "# compute the site term", "site_resp_term", "=", "np", ".", "zeros_like", "(", "vs30", ")", "gt_vlin", "=", "vs30", ">=", "C", "[", "'vlin'", "]", "lw_vlin", "=", "vs30", "<", "C", "[", "'vlin'", "]", "# compute site response term for sites with vs30 greater than vlin", "vs30_rat", "=", "vs30_star", "/", "C", "[", "'vlin'", "]", "site_resp_term", "[", "gt_vlin", "]", "=", "(", "(", "C", "[", "'a10'", "]", "+", "C", "[", "'b'", "]", "*", "self", ".", "CONSTS", "[", "'n'", "]", ")", "*", "np", ".", "log", "(", "vs30_rat", "[", "gt_vlin", "]", ")", ")", "# compute site response term for sites with vs30 lower than vlin", "site_resp_term", "[", "lw_vlin", "]", "=", "(", "C", "[", "'a10'", "]", "*", "np", ".", "log", "(", "vs30_rat", "[", "lw_vlin", "]", ")", "-", "C", "[", "'b'", "]", "*", "np", ".", "log", "(", "sa1180", "[", "lw_vlin", "]", "+", "C", "[", "'c'", "]", ")", "+", "C", "[", "'b'", "]", "*", "np", ".", "log", "(", "sa1180", "[", "lw_vlin", "]", "+", "C", "[", "'c'", "]", "*", "vs30_rat", "[", "lw_vlin", "]", "**", "self", ".", "CONSTS", "[", "'n'", "]", ")", ")", "return", "site_resp_term" ]
Compute and return site response model term see page 1033
[ "Compute", "and", "return", "site", "response", "model", "term", "see", "page", "1033" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L209-L229
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_hanging_wall_term
def _get_hanging_wall_term(self, C, dists, rup): """ Compute and return hanging wall model term, see page 1038. """ if rup.dip == 90.0: return np.zeros_like(dists.rx) else: Fhw = np.zeros_like(dists.rx) Fhw[dists.rx > 0] = 1. # Compute taper t1 T1 = np.ones_like(dists.rx) T1 *= 60./45. if rup.dip <= 30. else (90.-rup.dip)/45.0 # Compute taper t2 (eq 12 at page 1039) - a2hw set to 0.2 as # indicated at page 1041 T2 = np.zeros_like(dists.rx) a2hw = 0.2 if rup.mag > 6.5: T2 += (1. + a2hw * (rup.mag - 6.5)) elif rup.mag > 5.5: T2 += (1. + a2hw * (rup.mag - 6.5) - (1. - a2hw) * (rup.mag - 6.5)**2) else: T2 *= 0. # Compute taper t3 (eq. 13 at page 1039) - r1 and r2 specified at # page 1040 T3 = np.zeros_like(dists.rx) r1 = rup.width * np.cos(np.radians(rup.dip)) r2 = 3. * r1 # idx = dists.rx < r1 T3[idx] = (np.ones_like(dists.rx)[idx] * self.CONSTS['h1'] + self.CONSTS['h2'] * (dists.rx[idx] / r1) + self.CONSTS['h3'] * (dists.rx[idx] / r1)**2) # idx = ((dists.rx >= r1) & (dists.rx <= r2)) T3[idx] = 1. - (dists.rx[idx] - r1) / (r2 - r1) # Compute taper t4 (eq. 14 at page 1040) T4 = np.zeros_like(dists.rx) # if rup.ztor <= 10.: T4 += (1. - rup.ztor**2. / 100.) # Compute T5 (eq 15a at page 1040) - ry1 computed according to # suggestions provided at page 1040 T5 = np.zeros_like(dists.rx) ry1 = dists.rx * np.tan(np.radians(20.)) # idx = (dists.ry0 - ry1) <= 0.0 T5[idx] = 1. # idx = (((dists.ry0 - ry1) > 0.0) & ((dists.ry0 - ry1) < 5.0)) T5[idx] = 1. - (dists.ry0[idx] - ry1[idx]) / 5.0 # Finally, compute the hanging wall term return Fhw*C['a13']*T1*T2*T3*T4*T5
python
def _get_hanging_wall_term(self, C, dists, rup): if rup.dip == 90.0: return np.zeros_like(dists.rx) else: Fhw = np.zeros_like(dists.rx) Fhw[dists.rx > 0] = 1. T1 = np.ones_like(dists.rx) T1 *= 60./45. if rup.dip <= 30. else (90.-rup.dip)/45.0 T2 = np.zeros_like(dists.rx) a2hw = 0.2 if rup.mag > 6.5: T2 += (1. + a2hw * (rup.mag - 6.5)) elif rup.mag > 5.5: T2 += (1. + a2hw * (rup.mag - 6.5) - (1. - a2hw) * (rup.mag - 6.5)**2) else: T2 *= 0. T3 = np.zeros_like(dists.rx) r1 = rup.width * np.cos(np.radians(rup.dip)) r2 = 3. * r1 idx = dists.rx < r1 T3[idx] = (np.ones_like(dists.rx)[idx] * self.CONSTS['h1'] + self.CONSTS['h2'] * (dists.rx[idx] / r1) + self.CONSTS['h3'] * (dists.rx[idx] / r1)**2) idx = ((dists.rx >= r1) & (dists.rx <= r2)) T3[idx] = 1. - (dists.rx[idx] - r1) / (r2 - r1) T4 = np.zeros_like(dists.rx) if rup.ztor <= 10.: T4 += (1. - rup.ztor**2. / 100.) T5 = np.zeros_like(dists.rx) ry1 = dists.rx * np.tan(np.radians(20.)) idx = (dists.ry0 - ry1) <= 0.0 T5[idx] = 1. idx = (((dists.ry0 - ry1) > 0.0) & ((dists.ry0 - ry1) < 5.0)) T5[idx] = 1. - (dists.ry0[idx] - ry1[idx]) / 5.0 return Fhw*C['a13']*T1*T2*T3*T4*T5
[ "def", "_get_hanging_wall_term", "(", "self", ",", "C", ",", "dists", ",", "rup", ")", ":", "if", "rup", ".", "dip", "==", "90.0", ":", "return", "np", ".", "zeros_like", "(", "dists", ".", "rx", ")", "else", ":", "Fhw", "=", "np", ".", "zeros_like", "(", "dists", ".", "rx", ")", "Fhw", "[", "dists", ".", "rx", ">", "0", "]", "=", "1.", "# Compute taper t1", "T1", "=", "np", ".", "ones_like", "(", "dists", ".", "rx", ")", "T1", "*=", "60.", "/", "45.", "if", "rup", ".", "dip", "<=", "30.", "else", "(", "90.", "-", "rup", ".", "dip", ")", "/", "45.0", "# Compute taper t2 (eq 12 at page 1039) - a2hw set to 0.2 as", "# indicated at page 1041", "T2", "=", "np", ".", "zeros_like", "(", "dists", ".", "rx", ")", "a2hw", "=", "0.2", "if", "rup", ".", "mag", ">", "6.5", ":", "T2", "+=", "(", "1.", "+", "a2hw", "*", "(", "rup", ".", "mag", "-", "6.5", ")", ")", "elif", "rup", ".", "mag", ">", "5.5", ":", "T2", "+=", "(", "1.", "+", "a2hw", "*", "(", "rup", ".", "mag", "-", "6.5", ")", "-", "(", "1.", "-", "a2hw", ")", "*", "(", "rup", ".", "mag", "-", "6.5", ")", "**", "2", ")", "else", ":", "T2", "*=", "0.", "# Compute taper t3 (eq. 13 at page 1039) - r1 and r2 specified at", "# page 1040", "T3", "=", "np", ".", "zeros_like", "(", "dists", ".", "rx", ")", "r1", "=", "rup", ".", "width", "*", "np", ".", "cos", "(", "np", ".", "radians", "(", "rup", ".", "dip", ")", ")", "r2", "=", "3.", "*", "r1", "#", "idx", "=", "dists", ".", "rx", "<", "r1", "T3", "[", "idx", "]", "=", "(", "np", ".", "ones_like", "(", "dists", ".", "rx", ")", "[", "idx", "]", "*", "self", ".", "CONSTS", "[", "'h1'", "]", "+", "self", ".", "CONSTS", "[", "'h2'", "]", "*", "(", "dists", ".", "rx", "[", "idx", "]", "/", "r1", ")", "+", "self", ".", "CONSTS", "[", "'h3'", "]", "*", "(", "dists", ".", "rx", "[", "idx", "]", "/", "r1", ")", "**", "2", ")", "#", "idx", "=", "(", "(", "dists", ".", "rx", ">=", "r1", ")", "&", "(", "dists", ".", "rx", "<=", "r2", ")", ")", "T3", "[", "idx", "]", "=", "1.", "-", "(", "dists", ".", "rx", "[", "idx", "]", "-", "r1", ")", "/", "(", "r2", "-", "r1", ")", "# Compute taper t4 (eq. 14 at page 1040)", "T4", "=", "np", ".", "zeros_like", "(", "dists", ".", "rx", ")", "#", "if", "rup", ".", "ztor", "<=", "10.", ":", "T4", "+=", "(", "1.", "-", "rup", ".", "ztor", "**", "2.", "/", "100.", ")", "# Compute T5 (eq 15a at page 1040) - ry1 computed according to", "# suggestions provided at page 1040", "T5", "=", "np", ".", "zeros_like", "(", "dists", ".", "rx", ")", "ry1", "=", "dists", ".", "rx", "*", "np", ".", "tan", "(", "np", ".", "radians", "(", "20.", ")", ")", "#", "idx", "=", "(", "dists", ".", "ry0", "-", "ry1", ")", "<=", "0.0", "T5", "[", "idx", "]", "=", "1.", "#", "idx", "=", "(", "(", "(", "dists", ".", "ry0", "-", "ry1", ")", ">", "0.0", ")", "&", "(", "(", "dists", ".", "ry0", "-", "ry1", ")", "<", "5.0", ")", ")", "T5", "[", "idx", "]", "=", "1.", "-", "(", "dists", ".", "ry0", "[", "idx", "]", "-", "ry1", "[", "idx", "]", ")", "/", "5.0", "# Finally, compute the hanging wall term", "return", "Fhw", "*", "C", "[", "'a13'", "]", "*", "T1", "*", "T2", "*", "T3", "*", "T4", "*", "T5" ]
Compute and return hanging wall model term, see page 1038.
[ "Compute", "and", "return", "hanging", "wall", "model", "term", "see", "page", "1038", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L231-L283
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_top_of_rupture_depth_term
def _get_top_of_rupture_depth_term(self, C, imt, rup): """ Compute and return top of rupture depth term. See paragraph 'Depth-to-Top of Rupture Model', page 1042. """ if rup.ztor >= 20.0: return C['a15'] else: return C['a15'] * rup.ztor / 20.0
python
def _get_top_of_rupture_depth_term(self, C, imt, rup): if rup.ztor >= 20.0: return C['a15'] else: return C['a15'] * rup.ztor / 20.0
[ "def", "_get_top_of_rupture_depth_term", "(", "self", ",", "C", ",", "imt", ",", "rup", ")", ":", "if", "rup", ".", "ztor", ">=", "20.0", ":", "return", "C", "[", "'a15'", "]", "else", ":", "return", "C", "[", "'a15'", "]", "*", "rup", ".", "ztor", "/", "20.0" ]
Compute and return top of rupture depth term. See paragraph 'Depth-to-Top of Rupture Model', page 1042.
[ "Compute", "and", "return", "top", "of", "rupture", "depth", "term", ".", "See", "paragraph", "Depth", "-", "to", "-", "Top", "of", "Rupture", "Model", "page", "1042", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L285-L293
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_soil_depth_term
def _get_soil_depth_term(self, C, z1pt0, vs30): """ Compute and return soil depth term. See page 1042. """ # Get reference z1pt0 z1ref = self._get_z1pt0ref(vs30) # Get z1pt0 z10 = copy.deepcopy(z1pt0) # This is used for the calculation of the motion on reference rock idx = z1pt0 < 0 z10[idx] = z1ref[idx] factor = np.log((z10 + 0.01) / (z1ref + 0.01)) # Here we use a linear interpolation as suggested in the 'Application # guidelines' at page 1044 # Above 700 m/s the trend is flat, but we extend the Vs30 range to # 6,000 m/s (basically the upper limit for mantle shear wave velocity # on earth) to allow extrapolation without throwing an error. f2 = interpolate.interp1d( [0.0, 150, 250, 400, 700, 1000, 6000], [C['a43'], C['a43'], C['a44'], C['a45'], C['a46'], C['a46'], C['a46']], kind='linear') return f2(vs30) * factor
python
def _get_soil_depth_term(self, C, z1pt0, vs30): z1ref = self._get_z1pt0ref(vs30) z10 = copy.deepcopy(z1pt0) idx = z1pt0 < 0 z10[idx] = z1ref[idx] factor = np.log((z10 + 0.01) / (z1ref + 0.01)) f2 = interpolate.interp1d( [0.0, 150, 250, 400, 700, 1000, 6000], [C['a43'], C['a43'], C['a44'], C['a45'], C['a46'], C['a46'], C['a46']], kind='linear') return f2(vs30) * factor
[ "def", "_get_soil_depth_term", "(", "self", ",", "C", ",", "z1pt0", ",", "vs30", ")", ":", "# Get reference z1pt0", "z1ref", "=", "self", ".", "_get_z1pt0ref", "(", "vs30", ")", "# Get z1pt0", "z10", "=", "copy", ".", "deepcopy", "(", "z1pt0", ")", "# This is used for the calculation of the motion on reference rock", "idx", "=", "z1pt0", "<", "0", "z10", "[", "idx", "]", "=", "z1ref", "[", "idx", "]", "factor", "=", "np", ".", "log", "(", "(", "z10", "+", "0.01", ")", "/", "(", "z1ref", "+", "0.01", ")", ")", "# Here we use a linear interpolation as suggested in the 'Application", "# guidelines' at page 1044", "# Above 700 m/s the trend is flat, but we extend the Vs30 range to", "# 6,000 m/s (basically the upper limit for mantle shear wave velocity", "# on earth) to allow extrapolation without throwing an error.", "f2", "=", "interpolate", ".", "interp1d", "(", "[", "0.0", ",", "150", ",", "250", ",", "400", ",", "700", ",", "1000", ",", "6000", "]", ",", "[", "C", "[", "'a43'", "]", ",", "C", "[", "'a43'", "]", ",", "C", "[", "'a44'", "]", ",", "C", "[", "'a45'", "]", ",", "C", "[", "'a46'", "]", ",", "C", "[", "'a46'", "]", ",", "C", "[", "'a46'", "]", "]", ",", "kind", "=", "'linear'", ")", "return", "f2", "(", "vs30", ")", "*", "factor" ]
Compute and return soil depth term. See page 1042.
[ "Compute", "and", "return", "soil", "depth", "term", ".", "See", "page", "1042", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L303-L325
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_stddevs
def _get_stddevs(self, C, imt, rup, sites, stddev_types, sa1180, dists): """ Return standard deviations as described in paragraph 'Equations for standard deviation', page 1046. """ std_intra = self._get_intra_event_std(C, rup.mag, sa1180, sites.vs30, sites.vs30measured, dists.rrup) std_inter = self._get_inter_event_std(C, rup.mag, sa1180, sites.vs30) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt(std_intra ** 2 + std_inter ** 2)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(std_intra) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(std_inter) return stddevs
python
def _get_stddevs(self, C, imt, rup, sites, stddev_types, sa1180, dists): std_intra = self._get_intra_event_std(C, rup.mag, sa1180, sites.vs30, sites.vs30measured, dists.rrup) std_inter = self._get_inter_event_std(C, rup.mag, sa1180, sites.vs30) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt(std_intra ** 2 + std_inter ** 2)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(std_intra) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(std_inter) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "imt", ",", "rup", ",", "sites", ",", "stddev_types", ",", "sa1180", ",", "dists", ")", ":", "std_intra", "=", "self", ".", "_get_intra_event_std", "(", "C", ",", "rup", ".", "mag", ",", "sa1180", ",", "sites", ".", "vs30", ",", "sites", ".", "vs30measured", ",", "dists", ".", "rrup", ")", "std_inter", "=", "self", ".", "_get_inter_event_std", "(", "C", ",", "rup", ".", "mag", ",", "sa1180", ",", "sites", ".", "vs30", ")", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "np", ".", "sqrt", "(", "std_intra", "**", "2", "+", "std_inter", "**", "2", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "std_intra", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "std_inter", ")", "return", "stddevs" ]
Return standard deviations as described in paragraph 'Equations for standard deviation', page 1046.
[ "Return", "standard", "deviations", "as", "described", "in", "paragraph", "Equations", "for", "standard", "deviation", "page", "1046", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L334-L352
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_intra_event_std
def _get_intra_event_std(self, C, mag, sa1180, vs30, vs30measured, rrup): """ Returns Phi as described at pages 1046 and 1047 """ phi_al = self._get_phi_al_regional(C, mag, vs30measured, rrup) derAmp = self._get_derivative(C, sa1180, vs30) phi_amp = 0.4 idx = phi_al < phi_amp if np.any(idx): # In the case of small magnitudes and long periods it is possible # for phi_al to take a value less than phi_amp, which would return # a complex value. According to the GMPE authors in this case # phi_amp should be reduced such that it is fractionally smaller # than phi_al phi_amp = 0.4 * np.ones_like(phi_al) phi_amp[idx] = 0.99 * phi_al[idx] phi_b = np.sqrt(phi_al**2 - phi_amp**2) phi = np.sqrt(phi_b**2 * (1 + derAmp)**2 + phi_amp**2) return phi
python
def _get_intra_event_std(self, C, mag, sa1180, vs30, vs30measured, rrup): phi_al = self._get_phi_al_regional(C, mag, vs30measured, rrup) derAmp = self._get_derivative(C, sa1180, vs30) phi_amp = 0.4 idx = phi_al < phi_amp if np.any(idx): phi_amp = 0.4 * np.ones_like(phi_al) phi_amp[idx] = 0.99 * phi_al[idx] phi_b = np.sqrt(phi_al**2 - phi_amp**2) phi = np.sqrt(phi_b**2 * (1 + derAmp)**2 + phi_amp**2) return phi
[ "def", "_get_intra_event_std", "(", "self", ",", "C", ",", "mag", ",", "sa1180", ",", "vs30", ",", "vs30measured", ",", "rrup", ")", ":", "phi_al", "=", "self", ".", "_get_phi_al_regional", "(", "C", ",", "mag", ",", "vs30measured", ",", "rrup", ")", "derAmp", "=", "self", ".", "_get_derivative", "(", "C", ",", "sa1180", ",", "vs30", ")", "phi_amp", "=", "0.4", "idx", "=", "phi_al", "<", "phi_amp", "if", "np", ".", "any", "(", "idx", ")", ":", "# In the case of small magnitudes and long periods it is possible", "# for phi_al to take a value less than phi_amp, which would return", "# a complex value. According to the GMPE authors in this case", "# phi_amp should be reduced such that it is fractionally smaller", "# than phi_al", "phi_amp", "=", "0.4", "*", "np", ".", "ones_like", "(", "phi_al", ")", "phi_amp", "[", "idx", "]", "=", "0.99", "*", "phi_al", "[", "idx", "]", "phi_b", "=", "np", ".", "sqrt", "(", "phi_al", "**", "2", "-", "phi_amp", "**", "2", ")", "phi", "=", "np", ".", "sqrt", "(", "phi_b", "**", "2", "*", "(", "1", "+", "derAmp", ")", "**", "2", "+", "phi_amp", "**", "2", ")", "return", "phi" ]
Returns Phi as described at pages 1046 and 1047
[ "Returns", "Phi", "as", "described", "at", "pages", "1046", "and", "1047" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L354-L373
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_derivative
def _get_derivative(self, C, sa1180, vs30): """ Returns equation 30 page 1047 """ derAmp = np.zeros_like(vs30) n = self.CONSTS['n'] c = C['c'] b = C['b'] idx = vs30 < C['vlin'] derAmp[idx] = (b * sa1180[idx] * (-1./(sa1180[idx]+c) + 1./(sa1180[idx] + c*(vs30[idx]/C['vlin'])**n))) return derAmp
python
def _get_derivative(self, C, sa1180, vs30): derAmp = np.zeros_like(vs30) n = self.CONSTS['n'] c = C['c'] b = C['b'] idx = vs30 < C['vlin'] derAmp[idx] = (b * sa1180[idx] * (-1./(sa1180[idx]+c) + 1./(sa1180[idx] + c*(vs30[idx]/C['vlin'])**n))) return derAmp
[ "def", "_get_derivative", "(", "self", ",", "C", ",", "sa1180", ",", "vs30", ")", ":", "derAmp", "=", "np", ".", "zeros_like", "(", "vs30", ")", "n", "=", "self", ".", "CONSTS", "[", "'n'", "]", "c", "=", "C", "[", "'c'", "]", "b", "=", "C", "[", "'b'", "]", "idx", "=", "vs30", "<", "C", "[", "'vlin'", "]", "derAmp", "[", "idx", "]", "=", "(", "b", "*", "sa1180", "[", "idx", "]", "*", "(", "-", "1.", "/", "(", "sa1180", "[", "idx", "]", "+", "c", ")", "+", "1.", "/", "(", "sa1180", "[", "idx", "]", "+", "c", "*", "(", "vs30", "[", "idx", "]", "/", "C", "[", "'vlin'", "]", ")", "**", "n", ")", ")", ")", "return", "derAmp" ]
Returns equation 30 page 1047
[ "Returns", "equation", "30", "page", "1047" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L375-L386
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_phi_al_regional
def _get_phi_al_regional(self, C, mag, vs30measured, rrup): """ Returns intra-event (Phi) standard deviation (equation 24, page 1046) """ phi_al = np.ones((len(vs30measured))) s1 = np.ones_like(phi_al) * C['s1e'] s2 = np.ones_like(phi_al) * C['s2e'] s1[vs30measured] = C['s1m'] s2[vs30measured] = C['s2m'] if mag < 4: phi_al *= s1 elif mag <= 6: phi_al *= s1 + (s2 - s1) / 2. * (mag - 4.) else: phi_al *= s2 return phi_al
python
def _get_phi_al_regional(self, C, mag, vs30measured, rrup): phi_al = np.ones((len(vs30measured))) s1 = np.ones_like(phi_al) * C['s1e'] s2 = np.ones_like(phi_al) * C['s2e'] s1[vs30measured] = C['s1m'] s2[vs30measured] = C['s2m'] if mag < 4: phi_al *= s1 elif mag <= 6: phi_al *= s1 + (s2 - s1) / 2. * (mag - 4.) else: phi_al *= s2 return phi_al
[ "def", "_get_phi_al_regional", "(", "self", ",", "C", ",", "mag", ",", "vs30measured", ",", "rrup", ")", ":", "phi_al", "=", "np", ".", "ones", "(", "(", "len", "(", "vs30measured", ")", ")", ")", "s1", "=", "np", ".", "ones_like", "(", "phi_al", ")", "*", "C", "[", "'s1e'", "]", "s2", "=", "np", ".", "ones_like", "(", "phi_al", ")", "*", "C", "[", "'s2e'", "]", "s1", "[", "vs30measured", "]", "=", "C", "[", "'s1m'", "]", "s2", "[", "vs30measured", "]", "=", "C", "[", "'s2m'", "]", "if", "mag", "<", "4", ":", "phi_al", "*=", "s1", "elif", "mag", "<=", "6", ":", "phi_al", "*=", "s1", "+", "(", "s2", "-", "s1", ")", "/", "2.", "*", "(", "mag", "-", "4.", ")", "else", ":", "phi_al", "*=", "s2", "return", "phi_al" ]
Returns intra-event (Phi) standard deviation (equation 24, page 1046)
[ "Returns", "intra", "-", "event", "(", "Phi", ")", "standard", "deviation", "(", "equation", "24", "page", "1046", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L388-L403
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014._get_inter_event_std
def _get_inter_event_std(self, C, mag, sa1180, vs30): """ Returns inter event (tau) standard deviation (equation 25, page 1046) """ if mag < 5: tau_al = C['s3'] elif mag <= 7: tau_al = C['s3'] + (C['s4'] - C['s3']) / 2. * (mag - 5.) else: tau_al = C['s4'] tau_b = tau_al tau = tau_b * (1 + self._get_derivative(C, sa1180, vs30)) return tau
python
def _get_inter_event_std(self, C, mag, sa1180, vs30): if mag < 5: tau_al = C['s3'] elif mag <= 7: tau_al = C['s3'] + (C['s4'] - C['s3']) / 2. * (mag - 5.) else: tau_al = C['s4'] tau_b = tau_al tau = tau_b * (1 + self._get_derivative(C, sa1180, vs30)) return tau
[ "def", "_get_inter_event_std", "(", "self", ",", "C", ",", "mag", ",", "sa1180", ",", "vs30", ")", ":", "if", "mag", "<", "5", ":", "tau_al", "=", "C", "[", "'s3'", "]", "elif", "mag", "<=", "7", ":", "tau_al", "=", "C", "[", "'s3'", "]", "+", "(", "C", "[", "'s4'", "]", "-", "C", "[", "'s3'", "]", ")", "/", "2.", "*", "(", "mag", "-", "5.", ")", "else", ":", "tau_al", "=", "C", "[", "'s4'", "]", "tau_b", "=", "tau_al", "tau", "=", "tau_b", "*", "(", "1", "+", "self", ".", "_get_derivative", "(", "C", ",", "sa1180", ",", "vs30", ")", ")", "return", "tau" ]
Returns inter event (tau) standard deviation (equation 25, page 1046)
[ "Returns", "inter", "event", "(", "tau", ")", "standard", "deviation", "(", "equation", "25", "page", "1046", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L405-L417
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014RegTWN._get_regional_term
def _get_regional_term(self, C, imt, vs30, rrup): """ In accordance with Abrahamson et al. (2014) we assume as the default region California """ vs30star = self._get_vs30star(vs30, imt) return C['a31'] * np.log(vs30star/C['vlin']) + C['a25'] * rrup
python
def _get_regional_term(self, C, imt, vs30, rrup): vs30star = self._get_vs30star(vs30, imt) return C['a31'] * np.log(vs30star/C['vlin']) + C['a25'] * rrup
[ "def", "_get_regional_term", "(", "self", ",", "C", ",", "imt", ",", "vs30", ",", "rrup", ")", ":", "vs30star", "=", "self", ".", "_get_vs30star", "(", "vs30", ",", "imt", ")", "return", "C", "[", "'a31'", "]", "*", "np", ".", "log", "(", "vs30star", "/", "C", "[", "'vlin'", "]", ")", "+", "C", "[", "'a25'", "]", "*", "rrup" ]
In accordance with Abrahamson et al. (2014) we assume as the default region California
[ "In", "accordance", "with", "Abrahamson", "et", "al", ".", "(", "2014", ")", "we", "assume", "as", "the", "default", "region", "California" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L469-L475
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014RegJPN._get_regional_term
def _get_regional_term(self, C, imt, vs30, rrup): """ Compute regional term for Japan. See page 1043 """ f3 = interpolate.interp1d( [150, 250, 350, 450, 600, 850, 1150, 2000], [C['a36'], C['a37'], C['a38'], C['a39'], C['a40'], C['a41'], C['a42'], C['a42']], kind='linear') return f3(vs30) + C['a29'] * rrup
python
def _get_regional_term(self, C, imt, vs30, rrup): f3 = interpolate.interp1d( [150, 250, 350, 450, 600, 850, 1150, 2000], [C['a36'], C['a37'], C['a38'], C['a39'], C['a40'], C['a41'], C['a42'], C['a42']], kind='linear') return f3(vs30) + C['a29'] * rrup
[ "def", "_get_regional_term", "(", "self", ",", "C", ",", "imt", ",", "vs30", ",", "rrup", ")", ":", "f3", "=", "interpolate", ".", "interp1d", "(", "[", "150", ",", "250", ",", "350", ",", "450", ",", "600", ",", "850", ",", "1150", ",", "2000", "]", ",", "[", "C", "[", "'a36'", "]", ",", "C", "[", "'a37'", "]", ",", "C", "[", "'a38'", "]", ",", "C", "[", "'a39'", "]", ",", "C", "[", "'a40'", "]", ",", "C", "[", "'a41'", "]", ",", "C", "[", "'a42'", "]", ",", "C", "[", "'a42'", "]", "]", ",", "kind", "=", "'linear'", ")", "return", "f3", "(", "vs30", ")", "+", "C", "[", "'a29'", "]", "*", "rrup" ]
Compute regional term for Japan. See page 1043
[ "Compute", "regional", "term", "for", "Japan", ".", "See", "page", "1043" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L511-L520
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2014.py
AbrahamsonEtAl2014RegJPN._get_phi_al_regional
def _get_phi_al_regional(self, C, mag, vs30measured, rrup): """ Returns intra-event (Tau) standard deviation (equation 26, page 1046) """ phi_al = np.ones((len(vs30measured))) idx = rrup < 30 phi_al[idx] *= C['s5'] idx = ((rrup <= 80) & (rrup >= 30.)) phi_al[idx] *= C['s5'] + (C['s6'] - C['s5']) / 50. * (rrup[idx] - 30.) idx = rrup > 80 phi_al[idx] *= C['s6'] return phi_al
python
def _get_phi_al_regional(self, C, mag, vs30measured, rrup): phi_al = np.ones((len(vs30measured))) idx = rrup < 30 phi_al[idx] *= C['s5'] idx = ((rrup <= 80) & (rrup >= 30.)) phi_al[idx] *= C['s5'] + (C['s6'] - C['s5']) / 50. * (rrup[idx] - 30.) idx = rrup > 80 phi_al[idx] *= C['s6'] return phi_al
[ "def", "_get_phi_al_regional", "(", "self", ",", "C", ",", "mag", ",", "vs30measured", ",", "rrup", ")", ":", "phi_al", "=", "np", ".", "ones", "(", "(", "len", "(", "vs30measured", ")", ")", ")", "idx", "=", "rrup", "<", "30", "phi_al", "[", "idx", "]", "*=", "C", "[", "'s5'", "]", "idx", "=", "(", "(", "rrup", "<=", "80", ")", "&", "(", "rrup", ">=", "30.", ")", ")", "phi_al", "[", "idx", "]", "*=", "C", "[", "'s5'", "]", "+", "(", "C", "[", "'s6'", "]", "-", "C", "[", "'s5'", "]", ")", "/", "50.", "*", "(", "rrup", "[", "idx", "]", "-", "30.", ")", "idx", "=", "rrup", ">", "80", "phi_al", "[", "idx", "]", "*=", "C", "[", "'s6'", "]", "return", "phi_al" ]
Returns intra-event (Tau) standard deviation (equation 26, page 1046)
[ "Returns", "intra", "-", "event", "(", "Tau", ")", "standard", "deviation", "(", "equation", "26", "page", "1046", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2014.py#L522-L537
gem/oq-engine
openquake/hazardlib/gsim/yu_2013.py
gc
def gc(coeff, mag): """ Returns the set of coefficients to be used for the calculation of GM as a function of earthquake magnitude :param coeff: A dictionary of parameters for the selected IMT :param mag: Magnitude value :returns: The set of coefficients """ if mag > 6.5: a1ca = coeff['ua'] a1cb = coeff['ub'] a1cc = coeff['uc'] a1cd = coeff['ud'] a1ce = coeff['ue'] a2ca = coeff['ia'] a2cb = coeff['ib'] a2cc = coeff['ic'] a2cd = coeff['id'] a2ce = coeff['ie'] else: a1ca = coeff['a'] a1cb = coeff['b'] a1cc = coeff['c'] a1cd = coeff['d'] a1ce = coeff['e'] a2ca = coeff['ma'] a2cb = coeff['mb'] a2cc = coeff['mc'] a2cd = coeff['md'] a2ce = coeff['me'] return a1ca, a1cb, a1cc, a1cd, a1ce, a2ca, a2cb, a2cc, a2cd, a2ce
python
def gc(coeff, mag): if mag > 6.5: a1ca = coeff['ua'] a1cb = coeff['ub'] a1cc = coeff['uc'] a1cd = coeff['ud'] a1ce = coeff['ue'] a2ca = coeff['ia'] a2cb = coeff['ib'] a2cc = coeff['ic'] a2cd = coeff['id'] a2ce = coeff['ie'] else: a1ca = coeff['a'] a1cb = coeff['b'] a1cc = coeff['c'] a1cd = coeff['d'] a1ce = coeff['e'] a2ca = coeff['ma'] a2cb = coeff['mb'] a2cc = coeff['mc'] a2cd = coeff['md'] a2ce = coeff['me'] return a1ca, a1cb, a1cc, a1cd, a1ce, a2ca, a2cb, a2cc, a2cd, a2ce
[ "def", "gc", "(", "coeff", ",", "mag", ")", ":", "if", "mag", ">", "6.5", ":", "a1ca", "=", "coeff", "[", "'ua'", "]", "a1cb", "=", "coeff", "[", "'ub'", "]", "a1cc", "=", "coeff", "[", "'uc'", "]", "a1cd", "=", "coeff", "[", "'ud'", "]", "a1ce", "=", "coeff", "[", "'ue'", "]", "a2ca", "=", "coeff", "[", "'ia'", "]", "a2cb", "=", "coeff", "[", "'ib'", "]", "a2cc", "=", "coeff", "[", "'ic'", "]", "a2cd", "=", "coeff", "[", "'id'", "]", "a2ce", "=", "coeff", "[", "'ie'", "]", "else", ":", "a1ca", "=", "coeff", "[", "'a'", "]", "a1cb", "=", "coeff", "[", "'b'", "]", "a1cc", "=", "coeff", "[", "'c'", "]", "a1cd", "=", "coeff", "[", "'d'", "]", "a1ce", "=", "coeff", "[", "'e'", "]", "a2ca", "=", "coeff", "[", "'ma'", "]", "a2cb", "=", "coeff", "[", "'mb'", "]", "a2cc", "=", "coeff", "[", "'mc'", "]", "a2cd", "=", "coeff", "[", "'md'", "]", "a2ce", "=", "coeff", "[", "'me'", "]", "return", "a1ca", ",", "a1cb", ",", "a1cc", ",", "a1cd", ",", "a1ce", ",", "a2ca", ",", "a2cb", ",", "a2cc", ",", "a2cd", ",", "a2ce" ]
Returns the set of coefficients to be used for the calculation of GM as a function of earthquake magnitude :param coeff: A dictionary of parameters for the selected IMT :param mag: Magnitude value :returns: The set of coefficients
[ "Returns", "the", "set", "of", "coefficients", "to", "be", "used", "for", "the", "calculation", "of", "GM", "as", "a", "function", "of", "earthquake", "magnitude" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/yu_2013.py#L34-L68
gem/oq-engine
openquake/hazardlib/gsim/yu_2013.py
rbf
def rbf(ra, coeff, mag): """ Calculate the median ground motion for a given magnitude and distance :param ra: Distance value [km] :param coeff: The set of coefficients :param mag: Magnitude value :returns: """ a1ca, a1cb, a1cc, a1cd, a1ce, a2ca, a2cb, a2cc, a2cd, a2ce = gc(coeff, mag) term1 = a1ca + a1cb * mag + a1cc * np.log(ra + a1cd*np.exp(a1ce*mag)) term2 = a2ca + a2cb * mag term3 = a2cd*np.exp(a2ce*mag) return np.exp((term1 - term2) / a2cc) - term3
python
def rbf(ra, coeff, mag): a1ca, a1cb, a1cc, a1cd, a1ce, a2ca, a2cb, a2cc, a2cd, a2ce = gc(coeff, mag) term1 = a1ca + a1cb * mag + a1cc * np.log(ra + a1cd*np.exp(a1ce*mag)) term2 = a2ca + a2cb * mag term3 = a2cd*np.exp(a2ce*mag) return np.exp((term1 - term2) / a2cc) - term3
[ "def", "rbf", "(", "ra", ",", "coeff", ",", "mag", ")", ":", "a1ca", ",", "a1cb", ",", "a1cc", ",", "a1cd", ",", "a1ce", ",", "a2ca", ",", "a2cb", ",", "a2cc", ",", "a2cd", ",", "a2ce", "=", "gc", "(", "coeff", ",", "mag", ")", "term1", "=", "a1ca", "+", "a1cb", "*", "mag", "+", "a1cc", "*", "np", ".", "log", "(", "ra", "+", "a1cd", "*", "np", ".", "exp", "(", "a1ce", "*", "mag", ")", ")", "term2", "=", "a2ca", "+", "a2cb", "*", "mag", "term3", "=", "a2cd", "*", "np", ".", "exp", "(", "a2ce", "*", "mag", ")", "return", "np", ".", "exp", "(", "(", "term1", "-", "term2", ")", "/", "a2cc", ")", "-", "term3" ]
Calculate the median ground motion for a given magnitude and distance :param ra: Distance value [km] :param coeff: The set of coefficients :param mag: Magnitude value :returns:
[ "Calculate", "the", "median", "ground", "motion", "for", "a", "given", "magnitude", "and", "distance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/yu_2013.py#L71-L88
gem/oq-engine
openquake/hazardlib/gsim/yu_2013.py
fnc
def fnc(ra, *args): """ Function used in the minimisation problem. :param ra: Semi-axis of the ellipses used in the Yu et al. :returns: The absolute difference between the epicentral distance and the adjusted distance """ # # epicentral distance repi = args[0] # # azimuth theta = args[1] # # magnitude mag = args[2] # # coefficients coeff = args[3] # # compute the difference between epicentral distances rb = rbf(ra, coeff, mag) t1 = ra**2 * (np.sin(np.radians(theta)))**2 t2 = rb**2 * (np.cos(np.radians(theta)))**2 xx = ra * rb / (t1+t2)**0.5 return xx-repi
python
def fnc(ra, *args): repi = args[0] theta = args[1] mag = args[2] coeff = args[3] rb = rbf(ra, coeff, mag) t1 = ra**2 * (np.sin(np.radians(theta)))**2 t2 = rb**2 * (np.cos(np.radians(theta)))**2 xx = ra * rb / (t1+t2)**0.5 return xx-repi
[ "def", "fnc", "(", "ra", ",", "*", "args", ")", ":", "#", "# epicentral distance", "repi", "=", "args", "[", "0", "]", "#", "# azimuth", "theta", "=", "args", "[", "1", "]", "#", "# magnitude", "mag", "=", "args", "[", "2", "]", "#", "# coefficients", "coeff", "=", "args", "[", "3", "]", "#", "# compute the difference between epicentral distances", "rb", "=", "rbf", "(", "ra", ",", "coeff", ",", "mag", ")", "t1", "=", "ra", "**", "2", "*", "(", "np", ".", "sin", "(", "np", ".", "radians", "(", "theta", ")", ")", ")", "**", "2", "t2", "=", "rb", "**", "2", "*", "(", "np", ".", "cos", "(", "np", ".", "radians", "(", "theta", ")", ")", ")", "**", "2", "xx", "=", "ra", "*", "rb", "/", "(", "t1", "+", "t2", ")", "**", "0.5", "return", "xx", "-", "repi" ]
Function used in the minimisation problem. :param ra: Semi-axis of the ellipses used in the Yu et al. :returns: The absolute difference between the epicentral distance and the adjusted distance
[ "Function", "used", "in", "the", "minimisation", "problem", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/yu_2013.py#L91-L119
gem/oq-engine
openquake/hazardlib/gsim/yu_2013.py
get_ras
def get_ras(repi, theta, mag, coeff): """ Computes equivalent distance :param repi: Epicentral distance :param theta: Azimuth value :param mag: Magnitude :param coeff: GMPE coefficients """ rx = 100. ras = 200. # # calculate the difference between epicentral distances dff = fnc(ras, repi, theta, mag, coeff) while abs(dff) > 1e-3: # update the value of distance computed if dff > 0.: ras = ras - rx else: ras = ras + rx dff = fnc(ras, repi, theta, mag, coeff) rx = rx / 2. if rx < 1e-3: break return ras
python
def get_ras(repi, theta, mag, coeff): rx = 100. ras = 200. dff = fnc(ras, repi, theta, mag, coeff) while abs(dff) > 1e-3: if dff > 0.: ras = ras - rx else: ras = ras + rx dff = fnc(ras, repi, theta, mag, coeff) rx = rx / 2. if rx < 1e-3: break return ras
[ "def", "get_ras", "(", "repi", ",", "theta", ",", "mag", ",", "coeff", ")", ":", "rx", "=", "100.", "ras", "=", "200.", "#", "# calculate the difference between epicentral distances", "dff", "=", "fnc", "(", "ras", ",", "repi", ",", "theta", ",", "mag", ",", "coeff", ")", "while", "abs", "(", "dff", ")", ">", "1e-3", ":", "# update the value of distance computed", "if", "dff", ">", "0.", ":", "ras", "=", "ras", "-", "rx", "else", ":", "ras", "=", "ras", "+", "rx", "dff", "=", "fnc", "(", "ras", ",", "repi", ",", "theta", ",", "mag", ",", "coeff", ")", "rx", "=", "rx", "/", "2.", "if", "rx", "<", "1e-3", ":", "break", "return", "ras" ]
Computes equivalent distance :param repi: Epicentral distance :param theta: Azimuth value :param mag: Magnitude :param coeff: GMPE coefficients
[ "Computes", "equivalent", "distance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/yu_2013.py#L122-L150
gem/oq-engine
openquake/hazardlib/gsim/yu_2013.py
YuEtAl2013Mw.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # Check that the requested standard deviation type is available assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) # # Set parameters magn = rup.mag epi = dists.repi theta = dists.azimuth # # Convert Mw into Ms if magn < 6.58: mag = (magn - 0.59) / 0.86 else: mag = (magn + 2.42) / 1.28 # # Set coefficients coeff = self.COEFFS[imt] a1ca, a1cb, a1cc, a1cd, a1ce, a2ca, a2cb, a2cc, a2cd, a2ce = \ gc(coeff, mag) # # Get correction coefficients. Here for each site we find the # the geometry of the ellipses ras = [] for epi, theta in zip(dists.repi, dists.azimuth): res = get_ras(epi, theta, mag, coeff) ras.append(res) ras = np.array(ras) rbs = rbf(ras, coeff, mag) # # Compute values of ground motion for the two cases. The value of # 225 is hardcoded under the assumption that the hypocentral depth # corresponds to 15 km (i.e. 15**2) mean1 = (a1ca + a1cb * mag + a1cc * np.log((ras**2+225)**0.5 + a1cd * np.exp(a1ce * mag))) mean2 = (a2ca + a2cb * mag + a2cc * np.log((rbs**2+225)**0.5 + a2cd * np.exp(a2ce * mag))) # # Get distances x = (mean1 * np.sin(np.radians(dists.azimuth)))**2 y = (mean2 * np.cos(np.radians(dists.azimuth)))**2 mean = mean1 * mean2 / np.sqrt(x+y) if imt.name == "PGA": mean = np.exp(mean)/g/100 elif imt.name == "PGV": mean = np.exp(mean) else: raise ValueError('Unsupported IMT') # # Get the standard deviation stddevs = self._compute_std(coeff, stddev_types, len(dists.repi)) # # Return results return np.log(mean), stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) magn = rup.mag epi = dists.repi theta = dists.azimuth if magn < 6.58: mag = (magn - 0.59) / 0.86 else: mag = (magn + 2.42) / 1.28 coeff = self.COEFFS[imt] a1ca, a1cb, a1cc, a1cd, a1ce, a2ca, a2cb, a2cc, a2cd, a2ce = \ gc(coeff, mag) ras = [] for epi, theta in zip(dists.repi, dists.azimuth): res = get_ras(epi, theta, mag, coeff) ras.append(res) ras = np.array(ras) rbs = rbf(ras, coeff, mag) mean1 = (a1ca + a1cb * mag + a1cc * np.log((ras**2+225)**0.5 + a1cd * np.exp(a1ce * mag))) mean2 = (a2ca + a2cb * mag + a2cc * np.log((rbs**2+225)**0.5 + a2cd * np.exp(a2ce * mag))) x = (mean1 * np.sin(np.radians(dists.azimuth)))**2 y = (mean2 * np.cos(np.radians(dists.azimuth)))**2 mean = mean1 * mean2 / np.sqrt(x+y) if imt.name == "PGA": mean = np.exp(mean)/g/100 elif imt.name == "PGV": mean = np.exp(mean) else: raise ValueError('Unsupported IMT') stddevs = self._compute_std(coeff, stddev_types, len(dists.repi)) return np.log(mean), stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# Check that the requested standard deviation type is available", "assert", "all", "(", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "for", "stddev_type", "in", "stddev_types", ")", "#", "# Set parameters", "magn", "=", "rup", ".", "mag", "epi", "=", "dists", ".", "repi", "theta", "=", "dists", ".", "azimuth", "#", "# Convert Mw into Ms", "if", "magn", "<", "6.58", ":", "mag", "=", "(", "magn", "-", "0.59", ")", "/", "0.86", "else", ":", "mag", "=", "(", "magn", "+", "2.42", ")", "/", "1.28", "#", "# Set coefficients", "coeff", "=", "self", ".", "COEFFS", "[", "imt", "]", "a1ca", ",", "a1cb", ",", "a1cc", ",", "a1cd", ",", "a1ce", ",", "a2ca", ",", "a2cb", ",", "a2cc", ",", "a2cd", ",", "a2ce", "=", "gc", "(", "coeff", ",", "mag", ")", "#", "# Get correction coefficients. Here for each site we find the", "# the geometry of the ellipses", "ras", "=", "[", "]", "for", "epi", ",", "theta", "in", "zip", "(", "dists", ".", "repi", ",", "dists", ".", "azimuth", ")", ":", "res", "=", "get_ras", "(", "epi", ",", "theta", ",", "mag", ",", "coeff", ")", "ras", ".", "append", "(", "res", ")", "ras", "=", "np", ".", "array", "(", "ras", ")", "rbs", "=", "rbf", "(", "ras", ",", "coeff", ",", "mag", ")", "#", "# Compute values of ground motion for the two cases. The value of", "# 225 is hardcoded under the assumption that the hypocentral depth", "# corresponds to 15 km (i.e. 15**2)", "mean1", "=", "(", "a1ca", "+", "a1cb", "*", "mag", "+", "a1cc", "*", "np", ".", "log", "(", "(", "ras", "**", "2", "+", "225", ")", "**", "0.5", "+", "a1cd", "*", "np", ".", "exp", "(", "a1ce", "*", "mag", ")", ")", ")", "mean2", "=", "(", "a2ca", "+", "a2cb", "*", "mag", "+", "a2cc", "*", "np", ".", "log", "(", "(", "rbs", "**", "2", "+", "225", ")", "**", "0.5", "+", "a2cd", "*", "np", ".", "exp", "(", "a2ce", "*", "mag", ")", ")", ")", "#", "# Get distances", "x", "=", "(", "mean1", "*", "np", ".", "sin", "(", "np", ".", "radians", "(", "dists", ".", "azimuth", ")", ")", ")", "**", "2", "y", "=", "(", "mean2", "*", "np", ".", "cos", "(", "np", ".", "radians", "(", "dists", ".", "azimuth", ")", ")", ")", "**", "2", "mean", "=", "mean1", "*", "mean2", "/", "np", ".", "sqrt", "(", "x", "+", "y", ")", "if", "imt", ".", "name", "==", "\"PGA\"", ":", "mean", "=", "np", ".", "exp", "(", "mean", ")", "/", "g", "/", "100", "elif", "imt", ".", "name", "==", "\"PGV\"", ":", "mean", "=", "np", ".", "exp", "(", "mean", ")", "else", ":", "raise", "ValueError", "(", "'Unsupported IMT'", ")", "#", "# Get the standard deviation", "stddevs", "=", "self", ".", "_compute_std", "(", "coeff", ",", "stddev_types", ",", "len", "(", "dists", ".", "repi", ")", ")", "#", "# Return results", "return", "np", ".", "log", "(", "mean", ")", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/yu_2013.py#L296-L356
gem/oq-engine
openquake/hazardlib/gsim/shahjouei_pezeshk_2016.py
ShahjoueiPezeshk2016._get_stddevs
def _get_stddevs(self, C, stddev_types, rup, imt, num_sites): """ Return standard deviations as defined in eq. 4 and 5, page 744, based on table 8, page 744. Eq. 5 yields std dev in natural log, so convert to log10 """ stddevs = [] for stddev_type in stddev_types: sigma_mean = self._compute_standard_dev(rup, imt, C) sigma_tot = np.sqrt((sigma_mean ** 2) + (C['SigmaReg'] ** 2)) sigma_tot = np.log10(np.exp(sigma_tot)) stddevs.append(sigma_tot + np.zeros(num_sites)) return stddevs
python
def _get_stddevs(self, C, stddev_types, rup, imt, num_sites): stddevs = [] for stddev_type in stddev_types: sigma_mean = self._compute_standard_dev(rup, imt, C) sigma_tot = np.sqrt((sigma_mean ** 2) + (C['SigmaReg'] ** 2)) sigma_tot = np.log10(np.exp(sigma_tot)) stddevs.append(sigma_tot + np.zeros(num_sites)) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "stddev_types", ",", "rup", ",", "imt", ",", "num_sites", ")", ":", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "sigma_mean", "=", "self", ".", "_compute_standard_dev", "(", "rup", ",", "imt", ",", "C", ")", "sigma_tot", "=", "np", ".", "sqrt", "(", "(", "sigma_mean", "**", "2", ")", "+", "(", "C", "[", "'SigmaReg'", "]", "**", "2", ")", ")", "sigma_tot", "=", "np", ".", "log10", "(", "np", ".", "exp", "(", "sigma_tot", ")", ")", "stddevs", ".", "append", "(", "sigma_tot", "+", "np", ".", "zeros", "(", "num_sites", ")", ")", "return", "stddevs" ]
Return standard deviations as defined in eq. 4 and 5, page 744, based on table 8, page 744. Eq. 5 yields std dev in natural log, so convert to log10
[ "Return", "standard", "deviations", "as", "defined", "in", "eq", ".", "4", "and", "5", "page", "744", "based", "on", "table", "8", "page", "744", ".", "Eq", ".", "5", "yields", "std", "dev", "in", "natural", "log", "so", "convert", "to", "log10" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/shahjouei_pezeshk_2016.py#L103-L115
gem/oq-engine
openquake/hazardlib/gsim/shahjouei_pezeshk_2016.py
ShahjoueiPezeshk2016._compute_standard_dev
def _compute_standard_dev(self, rup, imt, C): """ Compute the the standard deviation in terms of magnitude described on page 744, eq. 4 """ sigma_mean = 0. if imt.name in "SA PGA": psi = -6.898E-3 else: psi = -3.054E-5 if rup.mag <= 6.5: sigma_mean = (C['c12'] * rup.mag) + C['c13'] elif rup.mag > 6.5: sigma_mean = (psi * rup.mag) + C['c14'] return sigma_mean
python
def _compute_standard_dev(self, rup, imt, C): sigma_mean = 0. if imt.name in "SA PGA": psi = -6.898E-3 else: psi = -3.054E-5 if rup.mag <= 6.5: sigma_mean = (C['c12'] * rup.mag) + C['c13'] elif rup.mag > 6.5: sigma_mean = (psi * rup.mag) + C['c14'] return sigma_mean
[ "def", "_compute_standard_dev", "(", "self", ",", "rup", ",", "imt", ",", "C", ")", ":", "sigma_mean", "=", "0.", "if", "imt", ".", "name", "in", "\"SA PGA\"", ":", "psi", "=", "-", "6.898E-3", "else", ":", "psi", "=", "-", "3.054E-5", "if", "rup", ".", "mag", "<=", "6.5", ":", "sigma_mean", "=", "(", "C", "[", "'c12'", "]", "*", "rup", ".", "mag", ")", "+", "C", "[", "'c13'", "]", "elif", "rup", ".", "mag", ">", "6.5", ":", "sigma_mean", "=", "(", "psi", "*", "rup", ".", "mag", ")", "+", "C", "[", "'c14'", "]", "return", "sigma_mean" ]
Compute the the standard deviation in terms of magnitude described on page 744, eq. 4
[ "Compute", "the", "the", "standard", "deviation", "in", "terms", "of", "magnitude", "described", "on", "page", "744", "eq", ".", "4" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/shahjouei_pezeshk_2016.py#L164-L178
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2015.py
AbrahamsonEtAl2015SInter._compute_pga_rock
def _compute_pga_rock(self, C, dc1, sites, rup, dists): """ Compute and return mean imt value for rock conditions (vs30 = 1000 m/s) """ mean = (self._compute_magnitude_term(C, dc1, rup.mag) + self._compute_distance_term(C, rup.mag, dists) + self._compute_focal_depth_term(C, rup) + self._compute_forearc_backarc_term(C, sites, dists)) # Apply linear site term site_response = ((C['theta12'] + C['b'] * self.CONSTS['n']) * np.log(1000. / C['vlin'])) return mean + site_response
python
def _compute_pga_rock(self, C, dc1, sites, rup, dists): mean = (self._compute_magnitude_term(C, dc1, rup.mag) + self._compute_distance_term(C, rup.mag, dists) + self._compute_focal_depth_term(C, rup) + self._compute_forearc_backarc_term(C, sites, dists)) site_response = ((C['theta12'] + C['b'] * self.CONSTS['n']) * np.log(1000. / C['vlin'])) return mean + site_response
[ "def", "_compute_pga_rock", "(", "self", ",", "C", ",", "dc1", ",", "sites", ",", "rup", ",", "dists", ")", ":", "mean", "=", "(", "self", ".", "_compute_magnitude_term", "(", "C", ",", "dc1", ",", "rup", ".", "mag", ")", "+", "self", ".", "_compute_distance_term", "(", "C", ",", "rup", ".", "mag", ",", "dists", ")", "+", "self", ".", "_compute_focal_depth_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_compute_forearc_backarc_term", "(", "C", ",", "sites", ",", "dists", ")", ")", "# Apply linear site term", "site_response", "=", "(", "(", "C", "[", "'theta12'", "]", "+", "C", "[", "'b'", "]", "*", "self", ".", "CONSTS", "[", "'n'", "]", ")", "*", "np", ".", "log", "(", "1000.", "/", "C", "[", "'vlin'", "]", ")", ")", "return", "mean", "+", "site_response" ]
Compute and return mean imt value for rock conditions (vs30 = 1000 m/s)
[ "Compute", "and", "return", "mean", "imt", "value", "for", "rock", "conditions", "(", "vs30", "=", "1000", "m", "/", "s", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2015.py#L120-L132
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2015.py
AbrahamsonEtAl2015SInter._compute_magnitude_term
def _compute_magnitude_term(self, C, dc1, mag): """ Computes the magnitude scaling term given by equation (2) """ base = C['theta1'] + (self.CONSTS['theta4'] * dc1) dmag = self.CONSTS["C1"] + dc1 if mag > dmag: f_mag = (self.CONSTS['theta5'] * (mag - dmag)) +\ C['theta13'] * ((10. - mag) ** 2.) else: f_mag = (self.CONSTS['theta4'] * (mag - dmag)) +\ C['theta13'] * ((10. - mag) ** 2.) return base + f_mag
python
def _compute_magnitude_term(self, C, dc1, mag): base = C['theta1'] + (self.CONSTS['theta4'] * dc1) dmag = self.CONSTS["C1"] + dc1 if mag > dmag: f_mag = (self.CONSTS['theta5'] * (mag - dmag)) +\ C['theta13'] * ((10. - mag) ** 2.) else: f_mag = (self.CONSTS['theta4'] * (mag - dmag)) +\ C['theta13'] * ((10. - mag) ** 2.) return base + f_mag
[ "def", "_compute_magnitude_term", "(", "self", ",", "C", ",", "dc1", ",", "mag", ")", ":", "base", "=", "C", "[", "'theta1'", "]", "+", "(", "self", ".", "CONSTS", "[", "'theta4'", "]", "*", "dc1", ")", "dmag", "=", "self", ".", "CONSTS", "[", "\"C1\"", "]", "+", "dc1", "if", "mag", ">", "dmag", ":", "f_mag", "=", "(", "self", ".", "CONSTS", "[", "'theta5'", "]", "*", "(", "mag", "-", "dmag", ")", ")", "+", "C", "[", "'theta13'", "]", "*", "(", "(", "10.", "-", "mag", ")", "**", "2.", ")", "else", ":", "f_mag", "=", "(", "self", ".", "CONSTS", "[", "'theta4'", "]", "*", "(", "mag", "-", "dmag", ")", ")", "+", "C", "[", "'theta13'", "]", "*", "(", "(", "10.", "-", "mag", ")", "**", "2.", ")", "return", "base", "+", "f_mag" ]
Computes the magnitude scaling term given by equation (2)
[ "Computes", "the", "magnitude", "scaling", "term", "given", "by", "equation", "(", "2", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2015.py#L134-L148
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2015.py
AbrahamsonEtAl2015SInter._compute_site_response_term
def _compute_site_response_term(self, C, sites, pga1000): """ Compute and return site response model term This GMPE adopts the same site response scaling model of Walling et al (2008) as implemented in the Abrahamson & Silva (2008) GMPE. The functional form is retained here. """ vs_star = sites.vs30.copy() vs_star[vs_star > 1000.0] = 1000. arg = vs_star / C["vlin"] site_resp_term = C["theta12"] * np.log(arg) # Get linear scaling term idx = sites.vs30 >= C["vlin"] site_resp_term[idx] += (C["b"] * self.CONSTS["n"] * np.log(arg[idx])) # Get nonlinear scaling term idx = np.logical_not(idx) site_resp_term[idx] += ( -C["b"] * np.log(pga1000[idx] + self.CONSTS["c"]) + C["b"] * np.log(pga1000[idx] + self.CONSTS["c"] * (arg[idx] ** self.CONSTS["n"]))) return site_resp_term
python
def _compute_site_response_term(self, C, sites, pga1000): vs_star = sites.vs30.copy() vs_star[vs_star > 1000.0] = 1000. arg = vs_star / C["vlin"] site_resp_term = C["theta12"] * np.log(arg) idx = sites.vs30 >= C["vlin"] site_resp_term[idx] += (C["b"] * self.CONSTS["n"] * np.log(arg[idx])) idx = np.logical_not(idx) site_resp_term[idx] += ( -C["b"] * np.log(pga1000[idx] + self.CONSTS["c"]) + C["b"] * np.log(pga1000[idx] + self.CONSTS["c"] * (arg[idx] ** self.CONSTS["n"]))) return site_resp_term
[ "def", "_compute_site_response_term", "(", "self", ",", "C", ",", "sites", ",", "pga1000", ")", ":", "vs_star", "=", "sites", ".", "vs30", ".", "copy", "(", ")", "vs_star", "[", "vs_star", ">", "1000.0", "]", "=", "1000.", "arg", "=", "vs_star", "/", "C", "[", "\"vlin\"", "]", "site_resp_term", "=", "C", "[", "\"theta12\"", "]", "*", "np", ".", "log", "(", "arg", ")", "# Get linear scaling term", "idx", "=", "sites", ".", "vs30", ">=", "C", "[", "\"vlin\"", "]", "site_resp_term", "[", "idx", "]", "+=", "(", "C", "[", "\"b\"", "]", "*", "self", ".", "CONSTS", "[", "\"n\"", "]", "*", "np", ".", "log", "(", "arg", "[", "idx", "]", ")", ")", "# Get nonlinear scaling term", "idx", "=", "np", ".", "logical_not", "(", "idx", ")", "site_resp_term", "[", "idx", "]", "+=", "(", "-", "C", "[", "\"b\"", "]", "*", "np", ".", "log", "(", "pga1000", "[", "idx", "]", "+", "self", ".", "CONSTS", "[", "\"c\"", "]", ")", "+", "C", "[", "\"b\"", "]", "*", "np", ".", "log", "(", "pga1000", "[", "idx", "]", "+", "self", ".", "CONSTS", "[", "\"c\"", "]", "*", "(", "arg", "[", "idx", "]", "**", "self", ".", "CONSTS", "[", "\"n\"", "]", ")", ")", ")", "return", "site_resp_term" ]
Compute and return site response model term This GMPE adopts the same site response scaling model of Walling et al (2008) as implemented in the Abrahamson & Silva (2008) GMPE. The functional form is retained here.
[ "Compute", "and", "return", "site", "response", "model", "term", "This", "GMPE", "adopts", "the", "same", "site", "response", "scaling", "model", "of", "Walling", "et", "al", "(", "2008", ")", "as", "implemented", "in", "the", "Abrahamson", "&", "Silva", "(", "2008", ")", "GMPE", ".", "The", "functional", "form", "is", "retained", "here", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2015.py#L178-L198
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2015.py
AbrahamsonEtAl2015SSlab._compute_focal_depth_term
def _compute_focal_depth_term(self, C, rup): """ Computes the hypocentral depth scaling term - as indicated by equation (3) """ if rup.hypo_depth > 120.0: z_h = 120.0 else: z_h = rup.hypo_depth return C['theta11'] * (z_h - 60.)
python
def _compute_focal_depth_term(self, C, rup): if rup.hypo_depth > 120.0: z_h = 120.0 else: z_h = rup.hypo_depth return C['theta11'] * (z_h - 60.)
[ "def", "_compute_focal_depth_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "rup", ".", "hypo_depth", ">", "120.0", ":", "z_h", "=", "120.0", "else", ":", "z_h", "=", "rup", ".", "hypo_depth", "return", "C", "[", "'theta11'", "]", "*", "(", "z_h", "-", "60.", ")" ]
Computes the hypocentral depth scaling term - as indicated by equation (3)
[ "Computes", "the", "hypocentral", "depth", "scaling", "term", "-", "as", "indicated", "by", "equation", "(", "3", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2015.py#L335-L344
gem/oq-engine
openquake/hazardlib/gsim/abrahamson_2015.py
AbrahamsonEtAl2015SSlab._compute_forearc_backarc_term
def _compute_forearc_backarc_term(self, C, sites, dists): """ Computes the forearc/backarc scaling term given by equation (4). """ f_faba = np.zeros_like(dists.rhypo) # Term only applies to backarc sites (F_FABA = 0. for forearc) max_dist = dists.rhypo[sites.backarc] max_dist[max_dist < 85.0] = 85.0 f_faba[sites.backarc] = C['theta7'] +\ (C['theta8'] * np.log(max_dist / 40.0)) return f_faba
python
def _compute_forearc_backarc_term(self, C, sites, dists): f_faba = np.zeros_like(dists.rhypo) max_dist = dists.rhypo[sites.backarc] max_dist[max_dist < 85.0] = 85.0 f_faba[sites.backarc] = C['theta7'] +\ (C['theta8'] * np.log(max_dist / 40.0)) return f_faba
[ "def", "_compute_forearc_backarc_term", "(", "self", ",", "C", ",", "sites", ",", "dists", ")", ":", "f_faba", "=", "np", ".", "zeros_like", "(", "dists", ".", "rhypo", ")", "# Term only applies to backarc sites (F_FABA = 0. for forearc)", "max_dist", "=", "dists", ".", "rhypo", "[", "sites", ".", "backarc", "]", "max_dist", "[", "max_dist", "<", "85.0", "]", "=", "85.0", "f_faba", "[", "sites", ".", "backarc", "]", "=", "C", "[", "'theta7'", "]", "+", "(", "C", "[", "'theta8'", "]", "*", "np", ".", "log", "(", "max_dist", "/", "40.0", ")", ")", "return", "f_faba" ]
Computes the forearc/backarc scaling term given by equation (4).
[ "Computes", "the", "forearc", "/", "backarc", "scaling", "term", "given", "by", "equation", "(", "4", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_2015.py#L355-L365
gem/oq-engine
openquake/server/dbapi.py
match
def match(m_templ, *m_args): """ :param m_templ: a meta template string :param m_args: all arguments :returns: template, args Here is an example of usage: >>> match('SELECT * FROM job WHERE id=?x', 1) ('SELECT * FROM job WHERE id=?', (1,)) """ # strip commented lines m_templ = '\n'.join(line for line in m_templ.splitlines() if not line.lstrip().startswith('--')) if not m_args: return m_templ, () try: return _Replacer(m_args).match(m_templ) except IndexError: raise ValueError('Incorrect number of ?-parameters in %s, expected %s' % (m_templ, len(m_args)))
python
def match(m_templ, *m_args): m_templ = '\n'.join(line for line in m_templ.splitlines() if not line.lstrip().startswith('--')) if not m_args: return m_templ, () try: return _Replacer(m_args).match(m_templ) except IndexError: raise ValueError('Incorrect number of ?-parameters in %s, expected %s' % (m_templ, len(m_args)))
[ "def", "match", "(", "m_templ", ",", "*", "m_args", ")", ":", "# strip commented lines", "m_templ", "=", "'\\n'", ".", "join", "(", "line", "for", "line", "in", "m_templ", ".", "splitlines", "(", ")", "if", "not", "line", ".", "lstrip", "(", ")", ".", "startswith", "(", "'--'", ")", ")", "if", "not", "m_args", ":", "return", "m_templ", ",", "(", ")", "try", ":", "return", "_Replacer", "(", "m_args", ")", ".", "match", "(", "m_templ", ")", "except", "IndexError", ":", "raise", "ValueError", "(", "'Incorrect number of ?-parameters in %s, expected %s'", "%", "(", "m_templ", ",", "len", "(", "m_args", ")", ")", ")" ]
:param m_templ: a meta template string :param m_args: all arguments :returns: template, args Here is an example of usage: >>> match('SELECT * FROM job WHERE id=?x', 1) ('SELECT * FROM job WHERE id=?', (1,))
[ ":", "param", "m_templ", ":", "a", "meta", "template", "string", ":", "param", "m_args", ":", "all", "arguments", ":", "returns", ":", "template", "args" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbapi.py#L259-L279
gem/oq-engine
openquake/server/dbapi.py
Db.insert
def insert(self, table, columns, rows): """ Insert several rows with executemany. Return a cursor. """ cursor = self.conn.cursor() if len(rows): templ, _args = match('INSERT INTO ?s (?S) VALUES (?X)', table, columns, rows[0]) cursor.executemany(templ, rows) return cursor
python
def insert(self, table, columns, rows): cursor = self.conn.cursor() if len(rows): templ, _args = match('INSERT INTO ?s (?S) VALUES (?X)', table, columns, rows[0]) cursor.executemany(templ, rows) return cursor
[ "def", "insert", "(", "self", ",", "table", ",", "columns", ",", "rows", ")", ":", "cursor", "=", "self", ".", "conn", ".", "cursor", "(", ")", "if", "len", "(", "rows", ")", ":", "templ", ",", "_args", "=", "match", "(", "'INSERT INTO ?s (?S) VALUES (?X)'", ",", "table", ",", "columns", ",", "rows", "[", "0", "]", ")", "cursor", ".", "executemany", "(", "templ", ",", "rows", ")", "return", "cursor" ]
Insert several rows with executemany. Return a cursor.
[ "Insert", "several", "rows", "with", "executemany", ".", "Return", "a", "cursor", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbapi.py#L362-L371
gem/oq-engine
openquake/hazardlib/calc/hazard_curve.py
_cluster
def _cluster(param, tom, imtls, gsims, grp_ids, pmap): """ Computes the probability map in case of a cluster group """ pmapclu = AccumDict({grp_id: ProbabilityMap(len(imtls.array), len(gsims)) for grp_id in grp_ids}) # Get temporal occurrence model # Number of occurrences for the cluster first = True for nocc in range(0, 50): # TODO fix this once the occurrence rate will be used just as # an object attribute ocr = tom.occurrence_rate prob_n_occ = tom.get_probability_n_occurrences(ocr, nocc) if first: pmapclu = prob_n_occ * (~pmap)**nocc first = False else: pmapclu += prob_n_occ * (~pmap)**nocc pmap = ~pmapclu return pmap
python
def _cluster(param, tom, imtls, gsims, grp_ids, pmap): pmapclu = AccumDict({grp_id: ProbabilityMap(len(imtls.array), len(gsims)) for grp_id in grp_ids}) first = True for nocc in range(0, 50): ocr = tom.occurrence_rate prob_n_occ = tom.get_probability_n_occurrences(ocr, nocc) if first: pmapclu = prob_n_occ * (~pmap)**nocc first = False else: pmapclu += prob_n_occ * (~pmap)**nocc pmap = ~pmapclu return pmap
[ "def", "_cluster", "(", "param", ",", "tom", ",", "imtls", ",", "gsims", ",", "grp_ids", ",", "pmap", ")", ":", "pmapclu", "=", "AccumDict", "(", "{", "grp_id", ":", "ProbabilityMap", "(", "len", "(", "imtls", ".", "array", ")", ",", "len", "(", "gsims", ")", ")", "for", "grp_id", "in", "grp_ids", "}", ")", "# Get temporal occurrence model", "# Number of occurrences for the cluster", "first", "=", "True", "for", "nocc", "in", "range", "(", "0", ",", "50", ")", ":", "# TODO fix this once the occurrence rate will be used just as", "# an object attribute", "ocr", "=", "tom", ".", "occurrence_rate", "prob_n_occ", "=", "tom", ".", "get_probability_n_occurrences", "(", "ocr", ",", "nocc", ")", "if", "first", ":", "pmapclu", "=", "prob_n_occ", "*", "(", "~", "pmap", ")", "**", "nocc", "first", "=", "False", "else", ":", "pmapclu", "+=", "prob_n_occ", "*", "(", "~", "pmap", ")", "**", "nocc", "pmap", "=", "~", "pmapclu", "return", "pmap" ]
Computes the probability map in case of a cluster group
[ "Computes", "the", "probability", "map", "in", "case", "of", "a", "cluster", "group" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/hazard_curve.py#L71-L91
gem/oq-engine
openquake/hazardlib/calc/hazard_curve.py
classical
def classical(group, src_filter, gsims, param, monitor=Monitor()): """ Compute the hazard curves for a set of sources belonging to the same tectonic region type for all the GSIMs associated to that TRT. The arguments are the same as in :func:`calc_hazard_curves`, except for ``gsims``, which is a list of GSIM instances. :returns: a dictionary {grp_id: pmap} with attributes .grp_ids, .calc_times, .eff_ruptures """ if not hasattr(src_filter, 'sitecol'): # a sitecol was passed src_filter = SourceFilter(src_filter, {}) # Get the parameters assigned to the group src_mutex = getattr(group, 'src_interdep', None) == 'mutex' rup_mutex = getattr(group, 'rup_interdep', None) == 'mutex' cluster = getattr(group, 'cluster', None) # Compute the number of ruptures grp_ids = set() for src in group: if not src.num_ruptures: # src.num_ruptures is set when parsing the XML, but not when # the source is instantiated manually, so it is set here src.num_ruptures = src.count_ruptures() # This sets the proper TOM in case of a cluster if cluster: src.temporal_occurrence_model = FatedTOM(time_span=1) # Updating IDs grp_ids.update(src.src_group_ids) # Now preparing context maxdist = src_filter.integration_distance imtls = param['imtls'] trunclevel = param.get('truncation_level') cmaker = ContextMaker( src.tectonic_region_type, gsims, maxdist, param, monitor) # Prepare the accumulator for the probability maps pmap = AccumDict({grp_id: ProbabilityMap(len(imtls.array), len(gsims)) for grp_id in grp_ids}) rupdata = {grp_id: [] for grp_id in grp_ids} # AccumDict of arrays with 3 elements weight, nsites, calc_time calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32)) eff_ruptures = AccumDict(accum=0) # grp_id -> num_ruptures # Computing hazard for src, s_sites in src_filter(group): # filter now t0 = time.time() try: poemap = cmaker.poe_map(src, s_sites, imtls, trunclevel, rup_indep=not rup_mutex) except Exception as err: etype, err, tb = sys.exc_info() msg = '%s (source id=%s)' % (str(err), src.source_id) raise etype(msg).with_traceback(tb) if src_mutex: # mutex sources, there is a single group for sid in poemap: pcurve = pmap[src.src_group_id].setdefault(sid, 0) pcurve += poemap[sid] * src.mutex_weight elif poemap: for gid in src.src_group_ids: pmap[gid] |= poemap if len(cmaker.rupdata): for gid in src.src_group_ids: rupdata[gid].append(cmaker.rupdata) calc_times[src.id] += numpy.array( [src.weight, len(s_sites), time.time() - t0]) # storing the number of contributing ruptures too eff_ruptures += {gid: getattr(poemap, 'eff_ruptures', 0) for gid in src.src_group_ids} # Updating the probability map in the case of mutually exclusive # sources group_probability = getattr(group, 'grp_probability', None) if src_mutex and group_probability: pmap[src.src_group_id] *= group_probability # Processing cluster if cluster: tom = getattr(group, 'temporal_occurrence_model') pmap = _cluster(param, tom, imtls, gsims, grp_ids, pmap) # Return results for gid, data in rupdata.items(): if len(data): rupdata[gid] = numpy.concatenate(data) return dict(pmap=pmap, calc_times=calc_times, eff_ruptures=eff_ruptures, rup_data=rupdata)
python
def classical(group, src_filter, gsims, param, monitor=Monitor()): if not hasattr(src_filter, 'sitecol'): src_filter = SourceFilter(src_filter, {}) src_mutex = getattr(group, 'src_interdep', None) == 'mutex' rup_mutex = getattr(group, 'rup_interdep', None) == 'mutex' cluster = getattr(group, 'cluster', None) grp_ids = set() for src in group: if not src.num_ruptures: src.num_ruptures = src.count_ruptures() if cluster: src.temporal_occurrence_model = FatedTOM(time_span=1) grp_ids.update(src.src_group_ids) maxdist = src_filter.integration_distance imtls = param['imtls'] trunclevel = param.get('truncation_level') cmaker = ContextMaker( src.tectonic_region_type, gsims, maxdist, param, monitor) pmap = AccumDict({grp_id: ProbabilityMap(len(imtls.array), len(gsims)) for grp_id in grp_ids}) rupdata = {grp_id: [] for grp_id in grp_ids} calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32)) eff_ruptures = AccumDict(accum=0) for src, s_sites in src_filter(group): t0 = time.time() try: poemap = cmaker.poe_map(src, s_sites, imtls, trunclevel, rup_indep=not rup_mutex) except Exception as err: etype, err, tb = sys.exc_info() msg = '%s (source id=%s)' % (str(err), src.source_id) raise etype(msg).with_traceback(tb) if src_mutex: for sid in poemap: pcurve = pmap[src.src_group_id].setdefault(sid, 0) pcurve += poemap[sid] * src.mutex_weight elif poemap: for gid in src.src_group_ids: pmap[gid] |= poemap if len(cmaker.rupdata): for gid in src.src_group_ids: rupdata[gid].append(cmaker.rupdata) calc_times[src.id] += numpy.array( [src.weight, len(s_sites), time.time() - t0]) eff_ruptures += {gid: getattr(poemap, 'eff_ruptures', 0) for gid in src.src_group_ids} group_probability = getattr(group, 'grp_probability', None) if src_mutex and group_probability: pmap[src.src_group_id] *= group_probability if cluster: tom = getattr(group, 'temporal_occurrence_model') pmap = _cluster(param, tom, imtls, gsims, grp_ids, pmap) for gid, data in rupdata.items(): if len(data): rupdata[gid] = numpy.concatenate(data) return dict(pmap=pmap, calc_times=calc_times, eff_ruptures=eff_ruptures, rup_data=rupdata)
[ "def", "classical", "(", "group", ",", "src_filter", ",", "gsims", ",", "param", ",", "monitor", "=", "Monitor", "(", ")", ")", ":", "if", "not", "hasattr", "(", "src_filter", ",", "'sitecol'", ")", ":", "# a sitecol was passed", "src_filter", "=", "SourceFilter", "(", "src_filter", ",", "{", "}", ")", "# Get the parameters assigned to the group", "src_mutex", "=", "getattr", "(", "group", ",", "'src_interdep'", ",", "None", ")", "==", "'mutex'", "rup_mutex", "=", "getattr", "(", "group", ",", "'rup_interdep'", ",", "None", ")", "==", "'mutex'", "cluster", "=", "getattr", "(", "group", ",", "'cluster'", ",", "None", ")", "# Compute the number of ruptures", "grp_ids", "=", "set", "(", ")", "for", "src", "in", "group", ":", "if", "not", "src", ".", "num_ruptures", ":", "# src.num_ruptures is set when parsing the XML, but not when", "# the source is instantiated manually, so it is set here", "src", ".", "num_ruptures", "=", "src", ".", "count_ruptures", "(", ")", "# This sets the proper TOM in case of a cluster", "if", "cluster", ":", "src", ".", "temporal_occurrence_model", "=", "FatedTOM", "(", "time_span", "=", "1", ")", "# Updating IDs", "grp_ids", ".", "update", "(", "src", ".", "src_group_ids", ")", "# Now preparing context", "maxdist", "=", "src_filter", ".", "integration_distance", "imtls", "=", "param", "[", "'imtls'", "]", "trunclevel", "=", "param", ".", "get", "(", "'truncation_level'", ")", "cmaker", "=", "ContextMaker", "(", "src", ".", "tectonic_region_type", ",", "gsims", ",", "maxdist", ",", "param", ",", "monitor", ")", "# Prepare the accumulator for the probability maps", "pmap", "=", "AccumDict", "(", "{", "grp_id", ":", "ProbabilityMap", "(", "len", "(", "imtls", ".", "array", ")", ",", "len", "(", "gsims", ")", ")", "for", "grp_id", "in", "grp_ids", "}", ")", "rupdata", "=", "{", "grp_id", ":", "[", "]", "for", "grp_id", "in", "grp_ids", "}", "# AccumDict of arrays with 3 elements weight, nsites, calc_time", "calc_times", "=", "AccumDict", "(", "accum", "=", "numpy", ".", "zeros", "(", "3", ",", "numpy", ".", "float32", ")", ")", "eff_ruptures", "=", "AccumDict", "(", "accum", "=", "0", ")", "# grp_id -> num_ruptures", "# Computing hazard", "for", "src", ",", "s_sites", "in", "src_filter", "(", "group", ")", ":", "# filter now", "t0", "=", "time", ".", "time", "(", ")", "try", ":", "poemap", "=", "cmaker", ".", "poe_map", "(", "src", ",", "s_sites", ",", "imtls", ",", "trunclevel", ",", "rup_indep", "=", "not", "rup_mutex", ")", "except", "Exception", "as", "err", ":", "etype", ",", "err", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'%s (source id=%s)'", "%", "(", "str", "(", "err", ")", ",", "src", ".", "source_id", ")", "raise", "etype", "(", "msg", ")", ".", "with_traceback", "(", "tb", ")", "if", "src_mutex", ":", "# mutex sources, there is a single group", "for", "sid", "in", "poemap", ":", "pcurve", "=", "pmap", "[", "src", ".", "src_group_id", "]", ".", "setdefault", "(", "sid", ",", "0", ")", "pcurve", "+=", "poemap", "[", "sid", "]", "*", "src", ".", "mutex_weight", "elif", "poemap", ":", "for", "gid", "in", "src", ".", "src_group_ids", ":", "pmap", "[", "gid", "]", "|=", "poemap", "if", "len", "(", "cmaker", ".", "rupdata", ")", ":", "for", "gid", "in", "src", ".", "src_group_ids", ":", "rupdata", "[", "gid", "]", ".", "append", "(", "cmaker", ".", "rupdata", ")", "calc_times", "[", "src", ".", "id", "]", "+=", "numpy", ".", "array", "(", "[", "src", ".", "weight", ",", "len", "(", "s_sites", ")", ",", "time", ".", "time", "(", ")", "-", "t0", "]", ")", "# storing the number of contributing ruptures too", "eff_ruptures", "+=", "{", "gid", ":", "getattr", "(", "poemap", ",", "'eff_ruptures'", ",", "0", ")", "for", "gid", "in", "src", ".", "src_group_ids", "}", "# Updating the probability map in the case of mutually exclusive", "# sources", "group_probability", "=", "getattr", "(", "group", ",", "'grp_probability'", ",", "None", ")", "if", "src_mutex", "and", "group_probability", ":", "pmap", "[", "src", ".", "src_group_id", "]", "*=", "group_probability", "# Processing cluster", "if", "cluster", ":", "tom", "=", "getattr", "(", "group", ",", "'temporal_occurrence_model'", ")", "pmap", "=", "_cluster", "(", "param", ",", "tom", ",", "imtls", ",", "gsims", ",", "grp_ids", ",", "pmap", ")", "# Return results", "for", "gid", ",", "data", "in", "rupdata", ".", "items", "(", ")", ":", "if", "len", "(", "data", ")", ":", "rupdata", "[", "gid", "]", "=", "numpy", ".", "concatenate", "(", "data", ")", "return", "dict", "(", "pmap", "=", "pmap", ",", "calc_times", "=", "calc_times", ",", "eff_ruptures", "=", "eff_ruptures", ",", "rup_data", "=", "rupdata", ")" ]
Compute the hazard curves for a set of sources belonging to the same tectonic region type for all the GSIMs associated to that TRT. The arguments are the same as in :func:`calc_hazard_curves`, except for ``gsims``, which is a list of GSIM instances. :returns: a dictionary {grp_id: pmap} with attributes .grp_ids, .calc_times, .eff_ruptures
[ "Compute", "the", "hazard", "curves", "for", "a", "set", "of", "sources", "belonging", "to", "the", "same", "tectonic", "region", "type", "for", "all", "the", "GSIMs", "associated", "to", "that", "TRT", ".", "The", "arguments", "are", "the", "same", "as", "in", ":", "func", ":", "calc_hazard_curves", "except", "for", "gsims", "which", "is", "a", "list", "of", "GSIM", "instances", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/hazard_curve.py#L94-L176
gem/oq-engine
openquake/hazardlib/calc/hazard_curve.py
calc_hazard_curves
def calc_hazard_curves( groups, ss_filter, imtls, gsim_by_trt, truncation_level=None, apply=sequential_apply, filter_distance='rjb', reqv=None): """ Compute hazard curves on a list of sites, given a set of seismic source groups and a dictionary of ground shaking intensity models (one per tectonic region type). Probability of ground motion exceedance is computed in different ways depending if the sources are independent or mutually exclusive. :param groups: A sequence of groups of seismic sources objects (instances of of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`). :param ss_filter: A source filter over the site collection or the site collection itself :param imtls: Dictionary mapping intensity measure type strings to lists of intensity measure levels. :param gsim_by_trt: Dictionary mapping tectonic region types (members of :class:`openquake.hazardlib.const.TRT`) to :class:`~openquake.hazardlib.gsim.base.GMPE` or :class:`~openquake.hazardlib.gsim.base.IPE` objects. :param truncation_level: Float, number of standard deviations for truncation of the intensity distribution. :param apply: apply function to use (default sequential_apply) :param filter_distance: The distance used to filter the ruptures (default rjb) :param reqv: If not None, an instance of RjbEquivalent :returns: An array of size N, where N is the number of sites, which elements are records with fields given by the intensity measure types; the size of each field is given by the number of levels in ``imtls``. """ # This is ensuring backward compatibility i.e. processing a list of # sources if not isinstance(groups[0], SourceGroup): # sent a list of sources odic = groupby(groups, operator.attrgetter('tectonic_region_type')) groups = [SourceGroup(trt, odic[trt], 'src_group', 'indep', 'indep') for trt in odic] # ensure the sources have the right src_group_id for i, grp in enumerate(groups): for src in grp: if src.src_group_id is None: src.src_group_id = i imtls = DictArray(imtls) param = dict(imtls=imtls, truncation_level=truncation_level, filter_distance=filter_distance, reqv=reqv, cluster=grp.cluster) pmap = ProbabilityMap(len(imtls.array), 1) # Processing groups with homogeneous tectonic region gsim = gsim_by_trt[groups[0][0].tectonic_region_type] mon = Monitor() for group in groups: if group.atomic: # do not split it = [classical(group, ss_filter, [gsim], param, mon)] else: # split the group and apply `classical` in parallel it = apply( classical, (group.sources, ss_filter, [gsim], param, mon), weight=operator.attrgetter('weight')) for dic in it: for grp_id, pval in dic['pmap'].items(): pmap |= pval sitecol = getattr(ss_filter, 'sitecol', ss_filter) return pmap.convert(imtls, len(sitecol.complete))
python
def calc_hazard_curves( groups, ss_filter, imtls, gsim_by_trt, truncation_level=None, apply=sequential_apply, filter_distance='rjb', reqv=None): if not isinstance(groups[0], SourceGroup): odic = groupby(groups, operator.attrgetter('tectonic_region_type')) groups = [SourceGroup(trt, odic[trt], 'src_group', 'indep', 'indep') for trt in odic] for i, grp in enumerate(groups): for src in grp: if src.src_group_id is None: src.src_group_id = i imtls = DictArray(imtls) param = dict(imtls=imtls, truncation_level=truncation_level, filter_distance=filter_distance, reqv=reqv, cluster=grp.cluster) pmap = ProbabilityMap(len(imtls.array), 1) gsim = gsim_by_trt[groups[0][0].tectonic_region_type] mon = Monitor() for group in groups: if group.atomic: it = [classical(group, ss_filter, [gsim], param, mon)] else: it = apply( classical, (group.sources, ss_filter, [gsim], param, mon), weight=operator.attrgetter('weight')) for dic in it: for grp_id, pval in dic['pmap'].items(): pmap |= pval sitecol = getattr(ss_filter, 'sitecol', ss_filter) return pmap.convert(imtls, len(sitecol.complete))
[ "def", "calc_hazard_curves", "(", "groups", ",", "ss_filter", ",", "imtls", ",", "gsim_by_trt", ",", "truncation_level", "=", "None", ",", "apply", "=", "sequential_apply", ",", "filter_distance", "=", "'rjb'", ",", "reqv", "=", "None", ")", ":", "# This is ensuring backward compatibility i.e. processing a list of", "# sources", "if", "not", "isinstance", "(", "groups", "[", "0", "]", ",", "SourceGroup", ")", ":", "# sent a list of sources", "odic", "=", "groupby", "(", "groups", ",", "operator", ".", "attrgetter", "(", "'tectonic_region_type'", ")", ")", "groups", "=", "[", "SourceGroup", "(", "trt", ",", "odic", "[", "trt", "]", ",", "'src_group'", ",", "'indep'", ",", "'indep'", ")", "for", "trt", "in", "odic", "]", "# ensure the sources have the right src_group_id", "for", "i", ",", "grp", "in", "enumerate", "(", "groups", ")", ":", "for", "src", "in", "grp", ":", "if", "src", ".", "src_group_id", "is", "None", ":", "src", ".", "src_group_id", "=", "i", "imtls", "=", "DictArray", "(", "imtls", ")", "param", "=", "dict", "(", "imtls", "=", "imtls", ",", "truncation_level", "=", "truncation_level", ",", "filter_distance", "=", "filter_distance", ",", "reqv", "=", "reqv", ",", "cluster", "=", "grp", ".", "cluster", ")", "pmap", "=", "ProbabilityMap", "(", "len", "(", "imtls", ".", "array", ")", ",", "1", ")", "# Processing groups with homogeneous tectonic region", "gsim", "=", "gsim_by_trt", "[", "groups", "[", "0", "]", "[", "0", "]", ".", "tectonic_region_type", "]", "mon", "=", "Monitor", "(", ")", "for", "group", "in", "groups", ":", "if", "group", ".", "atomic", ":", "# do not split", "it", "=", "[", "classical", "(", "group", ",", "ss_filter", ",", "[", "gsim", "]", ",", "param", ",", "mon", ")", "]", "else", ":", "# split the group and apply `classical` in parallel", "it", "=", "apply", "(", "classical", ",", "(", "group", ".", "sources", ",", "ss_filter", ",", "[", "gsim", "]", ",", "param", ",", "mon", ")", ",", "weight", "=", "operator", ".", "attrgetter", "(", "'weight'", ")", ")", "for", "dic", "in", "it", ":", "for", "grp_id", ",", "pval", "in", "dic", "[", "'pmap'", "]", ".", "items", "(", ")", ":", "pmap", "|=", "pval", "sitecol", "=", "getattr", "(", "ss_filter", ",", "'sitecol'", ",", "ss_filter", ")", "return", "pmap", ".", "convert", "(", "imtls", ",", "len", "(", "sitecol", ".", "complete", ")", ")" ]
Compute hazard curves on a list of sites, given a set of seismic source groups and a dictionary of ground shaking intensity models (one per tectonic region type). Probability of ground motion exceedance is computed in different ways depending if the sources are independent or mutually exclusive. :param groups: A sequence of groups of seismic sources objects (instances of of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`). :param ss_filter: A source filter over the site collection or the site collection itself :param imtls: Dictionary mapping intensity measure type strings to lists of intensity measure levels. :param gsim_by_trt: Dictionary mapping tectonic region types (members of :class:`openquake.hazardlib.const.TRT`) to :class:`~openquake.hazardlib.gsim.base.GMPE` or :class:`~openquake.hazardlib.gsim.base.IPE` objects. :param truncation_level: Float, number of standard deviations for truncation of the intensity distribution. :param apply: apply function to use (default sequential_apply) :param filter_distance: The distance used to filter the ruptures (default rjb) :param reqv: If not None, an instance of RjbEquivalent :returns: An array of size N, where N is the number of sites, which elements are records with fields given by the intensity measure types; the size of each field is given by the number of levels in ``imtls``.
[ "Compute", "hazard", "curves", "on", "a", "list", "of", "sites", "given", "a", "set", "of", "seismic", "source", "groups", "and", "a", "dictionary", "of", "ground", "shaking", "intensity", "models", "(", "one", "per", "tectonic", "region", "type", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/hazard_curve.py#L179-L247
gem/oq-engine
openquake/hazardlib/gsim/akkar_2014.py
AkkarEtAlRjb2014.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. Implement equation 1, page 20. """ # compute median PGA on rock, needed to compute non-linear site # amplification C_pga = self.COEFFS[PGA()] median_pga = np.exp( self._compute_mean(C_pga, rup.mag, dists, rup.rake) ) # compute full mean value by adding nonlinear site amplification terms C = self.COEFFS[imt] mean = (self._compute_mean(C, rup.mag, dists, rup.rake) + self._compute_non_linear_term(C, median_pga, sites)) stddevs = self._get_stddevs(C, stddev_types, num_sites=sites.vs30.size) return mean + self.adjustment_factor, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C_pga = self.COEFFS[PGA()] median_pga = np.exp( self._compute_mean(C_pga, rup.mag, dists, rup.rake) ) C = self.COEFFS[imt] mean = (self._compute_mean(C, rup.mag, dists, rup.rake) + self._compute_non_linear_term(C, median_pga, sites)) stddevs = self._get_stddevs(C, stddev_types, num_sites=sites.vs30.size) return mean + self.adjustment_factor, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# compute median PGA on rock, needed to compute non-linear site", "# amplification", "C_pga", "=", "self", ".", "COEFFS", "[", "PGA", "(", ")", "]", "median_pga", "=", "np", ".", "exp", "(", "self", ".", "_compute_mean", "(", "C_pga", ",", "rup", ".", "mag", ",", "dists", ",", "rup", ".", "rake", ")", ")", "# compute full mean value by adding nonlinear site amplification terms", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "mean", "=", "(", "self", ".", "_compute_mean", "(", "C", ",", "rup", ".", "mag", ",", "dists", ",", "rup", ".", "rake", ")", "+", "self", ".", "_compute_non_linear_term", "(", "C", ",", "median_pga", ",", "sites", ")", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "stddev_types", ",", "num_sites", "=", "sites", ".", "vs30", ".", "size", ")", "return", "mean", "+", "self", ".", "adjustment_factor", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. Implement equation 1, page 20.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_2014.py#L80-L102
gem/oq-engine
openquake/hazardlib/gsim/akkar_2014.py
AkkarEtAlRjb2014._compute_faulting_style_term
def _compute_faulting_style_term(self, C, rake): """ Compute and return fifth and sixth terms in equations (2a) and (2b), pages 20. """ Fn = float(rake > -135.0 and rake < -45.0) Fr = float(rake > 45.0 and rake < 135.0) return C['a8'] * Fn + C['a9'] * Fr
python
def _compute_faulting_style_term(self, C, rake): Fn = float(rake > -135.0 and rake < -45.0) Fr = float(rake > 45.0 and rake < 135.0) return C['a8'] * Fn + C['a9'] * Fr
[ "def", "_compute_faulting_style_term", "(", "self", ",", "C", ",", "rake", ")", ":", "Fn", "=", "float", "(", "rake", ">", "-", "135.0", "and", "rake", "<", "-", "45.0", ")", "Fr", "=", "float", "(", "rake", ">", "45.0", "and", "rake", "<", "135.0", ")", "return", "C", "[", "'a8'", "]", "*", "Fn", "+", "C", "[", "'a9'", "]", "*", "Fr" ]
Compute and return fifth and sixth terms in equations (2a) and (2b), pages 20.
[ "Compute", "and", "return", "fifth", "and", "sixth", "terms", "in", "equations", "(", "2a", ")", "and", "(", "2b", ")", "pages", "20", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_2014.py#L149-L157
gem/oq-engine
openquake/hazardlib/gsim/akkar_2014.py
AkkarEtAlRjb2014._compute_non_linear_term
def _compute_non_linear_term(self, C, pga_only, sites): """ Compute non-linear term, equation (3a) to (3c), page 20. """ Vref = 750.0 Vcon = 1000.0 lnS = np.zeros_like(sites.vs30) # equation (3a) idx = sites.vs30 < Vref lnS[idx] = ( C['b1'] * np.log(sites.vs30[idx] / Vref) + C['b2'] * np.log( (pga_only[idx] + C['c'] * (sites.vs30[idx] / Vref) ** C['n']) / ((pga_only[idx] + C['c']) * (sites.vs30[idx] / Vref) ** C['n']) ) ) # equation (3b) idx = (sites.vs30 >= Vref) & (sites.vs30 <= Vcon) lnS[idx] = C['b1'] * np.log(sites.vs30[idx]/Vref) # equation (3c) idx = sites.vs30 > Vcon lnS[idx] = C['b1'] * np.log(Vcon/Vref) return lnS
python
def _compute_non_linear_term(self, C, pga_only, sites): Vref = 750.0 Vcon = 1000.0 lnS = np.zeros_like(sites.vs30) idx = sites.vs30 < Vref lnS[idx] = ( C['b1'] * np.log(sites.vs30[idx] / Vref) + C['b2'] * np.log( (pga_only[idx] + C['c'] * (sites.vs30[idx] / Vref) ** C['n']) / ((pga_only[idx] + C['c']) * (sites.vs30[idx] / Vref) ** C['n']) ) ) idx = (sites.vs30 >= Vref) & (sites.vs30 <= Vcon) lnS[idx] = C['b1'] * np.log(sites.vs30[idx]/Vref) idx = sites.vs30 > Vcon lnS[idx] = C['b1'] * np.log(Vcon/Vref) return lnS
[ "def", "_compute_non_linear_term", "(", "self", ",", "C", ",", "pga_only", ",", "sites", ")", ":", "Vref", "=", "750.0", "Vcon", "=", "1000.0", "lnS", "=", "np", ".", "zeros_like", "(", "sites", ".", "vs30", ")", "# equation (3a)", "idx", "=", "sites", ".", "vs30", "<", "Vref", "lnS", "[", "idx", "]", "=", "(", "C", "[", "'b1'", "]", "*", "np", ".", "log", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "+", "C", "[", "'b2'", "]", "*", "np", ".", "log", "(", "(", "pga_only", "[", "idx", "]", "+", "C", "[", "'c'", "]", "*", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "**", "C", "[", "'n'", "]", ")", "/", "(", "(", "pga_only", "[", "idx", "]", "+", "C", "[", "'c'", "]", ")", "*", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "**", "C", "[", "'n'", "]", ")", ")", ")", "# equation (3b)", "idx", "=", "(", "sites", ".", "vs30", ">=", "Vref", ")", "&", "(", "sites", ".", "vs30", "<=", "Vcon", ")", "lnS", "[", "idx", "]", "=", "C", "[", "'b1'", "]", "*", "np", ".", "log", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "# equation (3c)", "idx", "=", "sites", ".", "vs30", ">", "Vcon", "lnS", "[", "idx", "]", "=", "C", "[", "'b1'", "]", "*", "np", ".", "log", "(", "Vcon", "/", "Vref", ")", "return", "lnS" ]
Compute non-linear term, equation (3a) to (3c), page 20.
[ "Compute", "non", "-", "linear", "term", "equation", "(", "3a", ")", "to", "(", "3c", ")", "page", "20", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_2014.py#L159-L185
gem/oq-engine
openquake/hazardlib/gsim/akkar_2014.py
AkkarEtAlRjb2014._compute_mean
def _compute_mean(self, C, mag, dists, rake): """ Compute and return mean value without site conditions, that is equations (1a) and (1b), p.2981-2982. """ mean = ( C['a1'] + self._compute_linear_magnitude_term(C, mag) + self._compute_quadratic_magnitude_term(C, mag) + self._compute_logarithmic_distance_term(C, mag, dists) + self._compute_faulting_style_term(C, rake) ) return mean
python
def _compute_mean(self, C, mag, dists, rake): mean = ( C['a1'] + self._compute_linear_magnitude_term(C, mag) + self._compute_quadratic_magnitude_term(C, mag) + self._compute_logarithmic_distance_term(C, mag, dists) + self._compute_faulting_style_term(C, rake) ) return mean
[ "def", "_compute_mean", "(", "self", ",", "C", ",", "mag", ",", "dists", ",", "rake", ")", ":", "mean", "=", "(", "C", "[", "'a1'", "]", "+", "self", ".", "_compute_linear_magnitude_term", "(", "C", ",", "mag", ")", "+", "self", ".", "_compute_quadratic_magnitude_term", "(", "C", ",", "mag", ")", "+", "self", ".", "_compute_logarithmic_distance_term", "(", "C", ",", "mag", ",", "dists", ")", "+", "self", ".", "_compute_faulting_style_term", "(", "C", ",", "rake", ")", ")", "return", "mean" ]
Compute and return mean value without site conditions, that is equations (1a) and (1b), p.2981-2982.
[ "Compute", "and", "return", "mean", "value", "without", "site", "conditions", "that", "is", "equations", "(", "1a", ")", "and", "(", "1b", ")", "p", ".", "2981", "-", "2982", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_2014.py#L187-L200
gem/oq-engine
openquake/hazardlib/gsim/akkar_2014.py
AkkarEtAlRhyp2014._compute_logarithmic_distance_term
def _compute_logarithmic_distance_term(self, C, mag, dists): """ Compute and return fourth term in equations (2a) and (2b), page 20. """ return ( (C['a4'] + C['a5'] * (mag - self.c1)) * np.log(np.sqrt(dists.rhypo ** 2 + C['a6'] ** 2)) )
python
def _compute_logarithmic_distance_term(self, C, mag, dists): return ( (C['a4'] + C['a5'] * (mag - self.c1)) * np.log(np.sqrt(dists.rhypo ** 2 + C['a6'] ** 2)) )
[ "def", "_compute_logarithmic_distance_term", "(", "self", ",", "C", ",", "mag", ",", "dists", ")", ":", "return", "(", "(", "C", "[", "'a4'", "]", "+", "C", "[", "'a5'", "]", "*", "(", "mag", "-", "self", ".", "c1", ")", ")", "*", "np", ".", "log", "(", "np", ".", "sqrt", "(", "dists", ".", "rhypo", "**", "2", "+", "C", "[", "'a6'", "]", "**", "2", ")", ")", ")" ]
Compute and return fourth term in equations (2a) and (2b), page 20.
[ "Compute", "and", "return", "fourth", "term", "in", "equations", "(", "2a", ")", "and", "(", "2b", ")", "page", "20", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/akkar_2014.py#L385-L393
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._get_stddevs
def _get_stddevs(self, C, rup, shape, stddev_types): """ Return standard deviations as defined in p. 971. """ weight = self._compute_weight_std(C, rup.mag) std_intra = weight * C["sd1"] * np.ones(shape) std_inter = weight * C["sd2"] * np.ones(shape) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt(std_intra ** 2. + std_inter ** 2.)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(std_intra) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(std_inter) return stddevs
python
def _get_stddevs(self, C, rup, shape, stddev_types): weight = self._compute_weight_std(C, rup.mag) std_intra = weight * C["sd1"] * np.ones(shape) std_inter = weight * C["sd2"] * np.ones(shape) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt(std_intra ** 2. + std_inter ** 2.)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(std_intra) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(std_inter) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "rup", ",", "shape", ",", "stddev_types", ")", ":", "weight", "=", "self", ".", "_compute_weight_std", "(", "C", ",", "rup", ".", "mag", ")", "std_intra", "=", "weight", "*", "C", "[", "\"sd1\"", "]", "*", "np", ".", "ones", "(", "shape", ")", "std_inter", "=", "weight", "*", "C", "[", "\"sd2\"", "]", "*", "np", ".", "ones", "(", "shape", ")", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "np", ".", "sqrt", "(", "std_intra", "**", "2.", "+", "std_inter", "**", "2.", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "std_intra", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "std_inter", ")", "return", "stddevs" ]
Return standard deviations as defined in p. 971.
[ "Return", "standard", "deviations", "as", "defined", "in", "p", ".", "971", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L99-L116
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._compute_weight_std
def _compute_weight_std(self, C, mag): """ Common part of equations 8 and 9, page 971. """ if mag < 6.0: return C['a1'] elif mag >= 6.0 and mag < 6.5: return C['a1'] + (C['a2'] - C['a1']) * ((mag - 6.0) / 0.5) else: return C['a2']
python
def _compute_weight_std(self, C, mag): if mag < 6.0: return C['a1'] elif mag >= 6.0 and mag < 6.5: return C['a1'] + (C['a2'] - C['a1']) * ((mag - 6.0) / 0.5) else: return C['a2']
[ "def", "_compute_weight_std", "(", "self", ",", "C", ",", "mag", ")", ":", "if", "mag", "<", "6.0", ":", "return", "C", "[", "'a1'", "]", "elif", "mag", ">=", "6.0", "and", "mag", "<", "6.5", ":", "return", "C", "[", "'a1'", "]", "+", "(", "C", "[", "'a2'", "]", "-", "C", "[", "'a1'", "]", ")", "*", "(", "(", "mag", "-", "6.0", ")", "/", "0.5", ")", "else", ":", "return", "C", "[", "'a2'", "]" ]
Common part of equations 8 and 9, page 971.
[ "Common", "part", "of", "equations", "8", "and", "9", "page", "971", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L118-L127
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._compute_magnitude_scaling_term
def _compute_magnitude_scaling_term(self, C, mag): """ Compute and return magnitude scaling term in equation 2, page 970. """ c1 = self.CONSTS['c1'] if mag <= c1: return C['b1'] + C['b2'] * (mag - c1) + C['b3'] * (8.5 - mag) ** 2 else: return C['b1'] + C['b7'] * (mag - c1) + C['b3'] * (8.5 - mag) ** 2
python
def _compute_magnitude_scaling_term(self, C, mag): c1 = self.CONSTS['c1'] if mag <= c1: return C['b1'] + C['b2'] * (mag - c1) + C['b3'] * (8.5 - mag) ** 2 else: return C['b1'] + C['b7'] * (mag - c1) + C['b3'] * (8.5 - mag) ** 2
[ "def", "_compute_magnitude_scaling_term", "(", "self", ",", "C", ",", "mag", ")", ":", "c1", "=", "self", ".", "CONSTS", "[", "'c1'", "]", "if", "mag", "<=", "c1", ":", "return", "C", "[", "'b1'", "]", "+", "C", "[", "'b2'", "]", "*", "(", "mag", "-", "c1", ")", "+", "C", "[", "'b3'", "]", "*", "(", "8.5", "-", "mag", ")", "**", "2", "else", ":", "return", "C", "[", "'b1'", "]", "+", "C", "[", "'b7'", "]", "*", "(", "mag", "-", "c1", ")", "+", "C", "[", "'b3'", "]", "*", "(", "8.5", "-", "mag", ")", "**", "2" ]
Compute and return magnitude scaling term in equation 2, page 970.
[ "Compute", "and", "return", "magnitude", "scaling", "term", "in", "equation", "2", "page", "970", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L129-L138
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._compute_geometric_decay_term
def _compute_geometric_decay_term(self, C, mag, dists): """ Compute and return geometric decay term in equation 3, page 970. """ c1 = self.CONSTS['c1'] return ( (C['b4'] + C['b5'] * (mag - c1)) * np.log(np.sqrt(dists.rjb ** 2.0 + C['b6'] ** 2.0)) )
python
def _compute_geometric_decay_term(self, C, mag, dists): c1 = self.CONSTS['c1'] return ( (C['b4'] + C['b5'] * (mag - c1)) * np.log(np.sqrt(dists.rjb ** 2.0 + C['b6'] ** 2.0)) )
[ "def", "_compute_geometric_decay_term", "(", "self", ",", "C", ",", "mag", ",", "dists", ")", ":", "c1", "=", "self", ".", "CONSTS", "[", "'c1'", "]", "return", "(", "(", "C", "[", "'b4'", "]", "+", "C", "[", "'b5'", "]", "*", "(", "mag", "-", "c1", ")", ")", "*", "np", ".", "log", "(", "np", ".", "sqrt", "(", "dists", ".", "rjb", "**", "2.0", "+", "C", "[", "'b6'", "]", "**", "2.0", ")", ")", ")" ]
Compute and return geometric decay term in equation 3, page 970.
[ "Compute", "and", "return", "geometric", "decay", "term", "in", "equation", "3", "page", "970", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L140-L149
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._compute_anelestic_attenuation_term
def _compute_anelestic_attenuation_term(self, C, dists): """ Compute and return anelastic attenuation term in equation 5, page 970. """ f_aat = np.zeros_like(dists.rjb) idx = dists.rjb > 80.0 f_aat[idx] = C["b10"] * (dists.rjb[idx] - 80.0) return f_aat
python
def _compute_anelestic_attenuation_term(self, C, dists): f_aat = np.zeros_like(dists.rjb) idx = dists.rjb > 80.0 f_aat[idx] = C["b10"] * (dists.rjb[idx] - 80.0) return f_aat
[ "def", "_compute_anelestic_attenuation_term", "(", "self", ",", "C", ",", "dists", ")", ":", "f_aat", "=", "np", ".", "zeros_like", "(", "dists", ".", "rjb", ")", "idx", "=", "dists", ".", "rjb", ">", "80.0", "f_aat", "[", "idx", "]", "=", "C", "[", "\"b10\"", "]", "*", "(", "dists", ".", "rjb", "[", "idx", "]", "-", "80.0", ")", "return", "f_aat" ]
Compute and return anelastic attenuation term in equation 5, page 970.
[ "Compute", "and", "return", "anelastic", "attenuation", "term", "in", "equation", "5", "page", "970", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L161-L169
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._compute_non_linear_term
def _compute_non_linear_term(self, C, pga_only, sites): """ Compute non-linear term, equation 6, page 970. """ Vref = self.CONSTS['Vref'] Vcon = self.CONSTS['Vcon'] c = self.CONSTS['c'] n = self.CONSTS['n'] lnS = np.zeros_like(sites.vs30) # equation (6a) idx = sites.vs30 < Vref lnS[idx] = ( C['sb1'] * np.log(sites.vs30[idx] / Vref) + C['sb2'] * np.log( (pga_only[idx] + c * (sites.vs30[idx] / Vref) ** n) / ((pga_only[idx] + c) * (sites.vs30[idx] / Vref) ** n) ) ) # equation (6b) idx = sites.vs30 >= Vref new_sites = sites.vs30[idx] new_sites[new_sites > Vcon] = Vcon lnS[idx] = C['sb1'] * np.log(new_sites / Vref) return lnS
python
def _compute_non_linear_term(self, C, pga_only, sites): Vref = self.CONSTS['Vref'] Vcon = self.CONSTS['Vcon'] c = self.CONSTS['c'] n = self.CONSTS['n'] lnS = np.zeros_like(sites.vs30) idx = sites.vs30 < Vref lnS[idx] = ( C['sb1'] * np.log(sites.vs30[idx] / Vref) + C['sb2'] * np.log( (pga_only[idx] + c * (sites.vs30[idx] / Vref) ** n) / ((pga_only[idx] + c) * (sites.vs30[idx] / Vref) ** n) ) ) idx = sites.vs30 >= Vref new_sites = sites.vs30[idx] new_sites[new_sites > Vcon] = Vcon lnS[idx] = C['sb1'] * np.log(new_sites / Vref) return lnS
[ "def", "_compute_non_linear_term", "(", "self", ",", "C", ",", "pga_only", ",", "sites", ")", ":", "Vref", "=", "self", ".", "CONSTS", "[", "'Vref'", "]", "Vcon", "=", "self", ".", "CONSTS", "[", "'Vcon'", "]", "c", "=", "self", ".", "CONSTS", "[", "'c'", "]", "n", "=", "self", ".", "CONSTS", "[", "'n'", "]", "lnS", "=", "np", ".", "zeros_like", "(", "sites", ".", "vs30", ")", "# equation (6a)\r", "idx", "=", "sites", ".", "vs30", "<", "Vref", "lnS", "[", "idx", "]", "=", "(", "C", "[", "'sb1'", "]", "*", "np", ".", "log", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "+", "C", "[", "'sb2'", "]", "*", "np", ".", "log", "(", "(", "pga_only", "[", "idx", "]", "+", "c", "*", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "**", "n", ")", "/", "(", "(", "pga_only", "[", "idx", "]", "+", "c", ")", "*", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "**", "n", ")", ")", ")", "# equation (6b)\r", "idx", "=", "sites", ".", "vs30", ">=", "Vref", "new_sites", "=", "sites", ".", "vs30", "[", "idx", "]", "new_sites", "[", "new_sites", ">", "Vcon", "]", "=", "Vcon", "lnS", "[", "idx", "]", "=", "C", "[", "'sb1'", "]", "*", "np", ".", "log", "(", "new_sites", "/", "Vref", ")", "return", "lnS" ]
Compute non-linear term, equation 6, page 970.
[ "Compute", "non", "-", "linear", "term", "equation", "6", "page", "970", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L171-L197
gem/oq-engine
openquake/hazardlib/gsim/kale_2015.py
KaleEtAl2015Turkey._compute_mean
def _compute_mean(self, C, mag, dists, rake): """ Compute and return mean value without site conditions, that is equations 2-5, page 970. """ mean = ( self._compute_magnitude_scaling_term(C, mag) + self._compute_geometric_decay_term(C, mag, dists) + self._compute_faulting_style_term(C, rake) + self._compute_anelestic_attenuation_term(C, dists) ) return mean
python
def _compute_mean(self, C, mag, dists, rake): mean = ( self._compute_magnitude_scaling_term(C, mag) + self._compute_geometric_decay_term(C, mag, dists) + self._compute_faulting_style_term(C, rake) + self._compute_anelestic_attenuation_term(C, dists) ) return mean
[ "def", "_compute_mean", "(", "self", ",", "C", ",", "mag", ",", "dists", ",", "rake", ")", ":", "mean", "=", "(", "self", ".", "_compute_magnitude_scaling_term", "(", "C", ",", "mag", ")", "+", "self", ".", "_compute_geometric_decay_term", "(", "C", ",", "mag", ",", "dists", ")", "+", "self", ".", "_compute_faulting_style_term", "(", "C", ",", "rake", ")", "+", "self", ".", "_compute_anelestic_attenuation_term", "(", "C", ",", "dists", ")", ")", "return", "mean" ]
Compute and return mean value without site conditions, that is equations 2-5, page 970.
[ "Compute", "and", "return", "mean", "value", "without", "site", "conditions", "that", "is", "equations", "2", "-", "5", "page", "970", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kale_2015.py#L199-L211
gem/oq-engine
openquake/hazardlib/source/multi.py
MultiPointSource.count_ruptures
def count_ruptures(self): """ See :meth:`openquake.hazardlib.source.base.BaseSeismicSource.count_ruptures` for description of parameters and return value. """ return (len(self.get_annual_occurrence_rates()) * len(self.nodal_plane_distribution.data) * len(self.hypocenter_distribution.data))
python
def count_ruptures(self): return (len(self.get_annual_occurrence_rates()) * len(self.nodal_plane_distribution.data) * len(self.hypocenter_distribution.data))
[ "def", "count_ruptures", "(", "self", ")", ":", "return", "(", "len", "(", "self", ".", "get_annual_occurrence_rates", "(", ")", ")", "*", "len", "(", "self", ".", "nodal_plane_distribution", ".", "data", ")", "*", "len", "(", "self", ".", "hypocenter_distribution", ".", "data", ")", ")" ]
See :meth:`openquake.hazardlib.source.base.BaseSeismicSource.count_ruptures` for description of parameters and return value.
[ "See", ":", "meth", ":", "openquake", ".", "hazardlib", ".", "source", ".", "base", ".", "BaseSeismicSource", ".", "count_ruptures", "for", "description", "of", "parameters", "and", "return", "value", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/source/multi.py#L99-L107
gem/oq-engine
openquake/hazardlib/source/multi.py
MultiPointSource.get_bounding_box
def get_bounding_box(self, maxdist): """ Bounding box containing all the point sources, enlarged by the maximum distance. """ return utils.get_bounding_box([ps.location for ps in self], maxdist)
python
def get_bounding_box(self, maxdist): return utils.get_bounding_box([ps.location for ps in self], maxdist)
[ "def", "get_bounding_box", "(", "self", ",", "maxdist", ")", ":", "return", "utils", ".", "get_bounding_box", "(", "[", "ps", ".", "location", "for", "ps", "in", "self", "]", ",", "maxdist", ")" ]
Bounding box containing all the point sources, enlarged by the maximum distance.
[ "Bounding", "box", "containing", "all", "the", "point", "sources", "enlarged", "by", "the", "maximum", "distance", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/source/multi.py#L109-L114
gem/oq-engine
openquake/hazardlib/source/multi.py
MultiPointSource.geom
def geom(self): """ :returns: the geometry as an array of shape (N, 3) """ return numpy.array([(p.x, p.y, p.z) for p in self.mesh], numpy.float32)
python
def geom(self): return numpy.array([(p.x, p.y, p.z) for p in self.mesh], numpy.float32)
[ "def", "geom", "(", "self", ")", ":", "return", "numpy", ".", "array", "(", "[", "(", "p", ".", "x", ",", "p", ".", "y", ",", "p", ".", "z", ")", "for", "p", "in", "self", ".", "mesh", "]", ",", "numpy", ".", "float32", ")" ]
:returns: the geometry as an array of shape (N, 3)
[ ":", "returns", ":", "the", "geometry", "as", "an", "array", "of", "shape", "(", "N", "3", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/source/multi.py#L171-L176
gem/oq-engine
openquake/hazardlib/gsim/pezeshk_2011.py
PezeshkEtAl2011.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # Extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] imean = (self._compute_magnitude(rup, C) + self._compute_attenuation(rup, dists, imt, C) + self._compute_distance(rup, dists, imt, C)) mean = np.log(10.0 ** (imean)) istddevs = self._get_stddevs(C, stddev_types, rup, imt, num_sites=len(dists.rrup)) stddevs = np.log(10.0 ** np.array(istddevs)) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] imean = (self._compute_magnitude(rup, C) + self._compute_attenuation(rup, dists, imt, C) + self._compute_distance(rup, dists, imt, C)) mean = np.log(10.0 ** (imean)) istddevs = self._get_stddevs(C, stddev_types, rup, imt, num_sites=len(dists.rrup)) stddevs = np.log(10.0 ** np.array(istddevs)) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# Extracting dictionary of coefficients specific to required", "# intensity measure type.", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "imean", "=", "(", "self", ".", "_compute_magnitude", "(", "rup", ",", "C", ")", "+", "self", ".", "_compute_attenuation", "(", "rup", ",", "dists", ",", "imt", ",", "C", ")", "+", "self", ".", "_compute_distance", "(", "rup", ",", "dists", ",", "imt", ",", "C", ")", ")", "mean", "=", "np", ".", "log", "(", "10.0", "**", "(", "imean", ")", ")", "istddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "stddev_types", ",", "rup", ",", "imt", ",", "num_sites", "=", "len", "(", "dists", ".", "rrup", ")", ")", "stddevs", "=", "np", ".", "log", "(", "10.0", "**", "np", ".", "array", "(", "istddevs", ")", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/pezeshk_2011.py#L74-L96
gem/oq-engine
openquake/hazardlib/gsim/pezeshk_2011.py
PezeshkEtAl2011._compute_attenuation
def _compute_attenuation(self, rup, dists, imt, C): """ Compute the second term of the equation described on p. 1866: " [(c4 + c5 * M) * min{ log10(R), log10(70.) }] + [(c4 + c5 * M) * max{ min{ log10(R/70.), log10(140./70.) }, 0.}] + [(c8 + c9 * M) * max{ log10(R/140.), 0}] " """ vec = np.ones(len(dists.rrup)) a1 = (np.log10(np.sqrt(dists.rrup ** 2.0 + C['c11'] ** 2.0)), np.log10(70. * vec)) a = np.column_stack([a1[0], a1[1]]) b3 = (np.log10(np.sqrt(dists.rrup ** 2.0 + C['c11'] ** 2.0) / (70. * vec)), np.log10((140. / 70.) * vec)) b2 = np.column_stack([b3[0], b3[1]]) b1 = ([np.min(b2, axis=1), 0. * vec]) b = np.column_stack([b1[0], b1[1]]) c1 = (np.log10(np.sqrt(dists.rrup ** 2.0 + C['c11'] ** 2.0) / (140.) * vec), 0. * vec) c = np.column_stack([c1[0], c1[1]]) return (((C['c4'] + C['c5'] * rup.mag) * np.min(a, axis=1)) + ((C['c6'] + C['c7'] * rup.mag) * np.max(b, axis=1)) + ((C['c8'] + C['c9'] * rup.mag) * np.max(c, axis=1)))
python
def _compute_attenuation(self, rup, dists, imt, C): vec = np.ones(len(dists.rrup)) a1 = (np.log10(np.sqrt(dists.rrup ** 2.0 + C['c11'] ** 2.0)), np.log10(70. * vec)) a = np.column_stack([a1[0], a1[1]]) b3 = (np.log10(np.sqrt(dists.rrup ** 2.0 + C['c11'] ** 2.0) / (70. * vec)), np.log10((140. / 70.) * vec)) b2 = np.column_stack([b3[0], b3[1]]) b1 = ([np.min(b2, axis=1), 0. * vec]) b = np.column_stack([b1[0], b1[1]]) c1 = (np.log10(np.sqrt(dists.rrup ** 2.0 + C['c11'] ** 2.0) / (140.) * vec), 0. * vec) c = np.column_stack([c1[0], c1[1]]) return (((C['c4'] + C['c5'] * rup.mag) * np.min(a, axis=1)) + ((C['c6'] + C['c7'] * rup.mag) * np.max(b, axis=1)) + ((C['c8'] + C['c9'] * rup.mag) * np.max(c, axis=1)))
[ "def", "_compute_attenuation", "(", "self", ",", "rup", ",", "dists", ",", "imt", ",", "C", ")", ":", "vec", "=", "np", ".", "ones", "(", "len", "(", "dists", ".", "rrup", ")", ")", "a1", "=", "(", "np", ".", "log10", "(", "np", ".", "sqrt", "(", "dists", ".", "rrup", "**", "2.0", "+", "C", "[", "'c11'", "]", "**", "2.0", ")", ")", ",", "np", ".", "log10", "(", "70.", "*", "vec", ")", ")", "a", "=", "np", ".", "column_stack", "(", "[", "a1", "[", "0", "]", ",", "a1", "[", "1", "]", "]", ")", "b3", "=", "(", "np", ".", "log10", "(", "np", ".", "sqrt", "(", "dists", ".", "rrup", "**", "2.0", "+", "C", "[", "'c11'", "]", "**", "2.0", ")", "/", "(", "70.", "*", "vec", ")", ")", ",", "np", ".", "log10", "(", "(", "140.", "/", "70.", ")", "*", "vec", ")", ")", "b2", "=", "np", ".", "column_stack", "(", "[", "b3", "[", "0", "]", ",", "b3", "[", "1", "]", "]", ")", "b1", "=", "(", "[", "np", ".", "min", "(", "b2", ",", "axis", "=", "1", ")", ",", "0.", "*", "vec", "]", ")", "b", "=", "np", ".", "column_stack", "(", "[", "b1", "[", "0", "]", ",", "b1", "[", "1", "]", "]", ")", "c1", "=", "(", "np", ".", "log10", "(", "np", ".", "sqrt", "(", "dists", ".", "rrup", "**", "2.0", "+", "C", "[", "'c11'", "]", "**", "2.0", ")", "/", "(", "140.", ")", "*", "vec", ")", ",", "0.", "*", "vec", ")", "c", "=", "np", ".", "column_stack", "(", "[", "c1", "[", "0", "]", ",", "c1", "[", "1", "]", "]", ")", "return", "(", "(", "(", "C", "[", "'c4'", "]", "+", "C", "[", "'c5'", "]", "*", "rup", ".", "mag", ")", "*", "np", ".", "min", "(", "a", ",", "axis", "=", "1", ")", ")", "+", "(", "(", "C", "[", "'c6'", "]", "+", "C", "[", "'c7'", "]", "*", "rup", ".", "mag", ")", "*", "np", ".", "max", "(", "b", ",", "axis", "=", "1", ")", ")", "+", "(", "(", "C", "[", "'c8'", "]", "+", "C", "[", "'c9'", "]", "*", "rup", ".", "mag", ")", "*", "np", ".", "max", "(", "c", ",", "axis", "=", "1", ")", ")", ")" ]
Compute the second term of the equation described on p. 1866: " [(c4 + c5 * M) * min{ log10(R), log10(70.) }] + [(c4 + c5 * M) * max{ min{ log10(R/70.), log10(140./70.) }, 0.}] + [(c8 + c9 * M) * max{ log10(R/140.), 0}] "
[ "Compute", "the", "second", "term", "of", "the", "equation", "described", "on", "p", ".", "1866", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/pezeshk_2011.py#L118-L148
gem/oq-engine
openquake/hazardlib/gsim/pezeshk_2011.py
PezeshkEtAl2011._compute_standard_dev
def _compute_standard_dev(self, rup, imt, C): """ Compute the the standard deviation in terms of magnitude described on p. 1866, eq. 6 """ sigma_mean = 0. if rup.mag <= 7.0: sigma_mean = (C['c12'] * rup.mag) + C['c13'] elif rup.mag > 7.0: sigma_mean = (-0.00695 * rup.mag) + C['c14'] return sigma_mean
python
def _compute_standard_dev(self, rup, imt, C): sigma_mean = 0. if rup.mag <= 7.0: sigma_mean = (C['c12'] * rup.mag) + C['c13'] elif rup.mag > 7.0: sigma_mean = (-0.00695 * rup.mag) + C['c14'] return sigma_mean
[ "def", "_compute_standard_dev", "(", "self", ",", "rup", ",", "imt", ",", "C", ")", ":", "sigma_mean", "=", "0.", "if", "rup", ".", "mag", "<=", "7.0", ":", "sigma_mean", "=", "(", "C", "[", "'c12'", "]", "*", "rup", ".", "mag", ")", "+", "C", "[", "'c13'", "]", "elif", "rup", ".", "mag", ">", "7.0", ":", "sigma_mean", "=", "(", "-", "0.00695", "*", "rup", ".", "mag", ")", "+", "C", "[", "'c14'", "]", "return", "sigma_mean" ]
Compute the the standard deviation in terms of magnitude described on p. 1866, eq. 6
[ "Compute", "the", "the", "standard", "deviation", "in", "terms", "of", "magnitude", "described", "on", "p", ".", "1866", "eq", ".", "6" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/pezeshk_2011.py#L158-L168
gem/oq-engine
openquake/hmtk/strain/shift.py
Shift._get_base_rates
def _get_base_rates(self, base_params): ''' Defines the base moment rate that should be assigned to places of zero strain (i.e. Intraplate regions). In Bird et al (2010) this is taken as basic rate of Intraplate events in GCMT catalogue above the threshold magnitude :param dict base_params: Parameters needed for calculating the base rate. Requires: 'CMT_EVENTS': The number of CMT events 'area': Total area (km ^ 2) of the region class 'CMT_duration': Duration of reference catalogue 'CMT_moment': Moment rate from CMT catalogue 'corner_mag': Corner magnitude of Tapered G-R for region 'beta': Beta value of tapered G-R for distribution ''' base_ipl_rate = base_params['CMT_EVENTS'] / ( base_params['area'] * base_params['CMT_duration']) base_rate = np.zeros(self.number_magnitudes, dtype=float) for iloc in range(0, self.number_magnitudes): base_rate[iloc] = base_ipl_rate * calculate_taper_function( base_params['CMT_moment'], self.threshold_moment[iloc], moment_function(base_params['corner_mag']), base_params['beta']) return base_rate
python
def _get_base_rates(self, base_params): base_ipl_rate = base_params['CMT_EVENTS'] / ( base_params['area'] * base_params['CMT_duration']) base_rate = np.zeros(self.number_magnitudes, dtype=float) for iloc in range(0, self.number_magnitudes): base_rate[iloc] = base_ipl_rate * calculate_taper_function( base_params['CMT_moment'], self.threshold_moment[iloc], moment_function(base_params['corner_mag']), base_params['beta']) return base_rate
[ "def", "_get_base_rates", "(", "self", ",", "base_params", ")", ":", "base_ipl_rate", "=", "base_params", "[", "'CMT_EVENTS'", "]", "/", "(", "base_params", "[", "'area'", "]", "*", "base_params", "[", "'CMT_duration'", "]", ")", "base_rate", "=", "np", ".", "zeros", "(", "self", ".", "number_magnitudes", ",", "dtype", "=", "float", ")", "for", "iloc", "in", "range", "(", "0", ",", "self", ".", "number_magnitudes", ")", ":", "base_rate", "[", "iloc", "]", "=", "base_ipl_rate", "*", "calculate_taper_function", "(", "base_params", "[", "'CMT_moment'", "]", ",", "self", ".", "threshold_moment", "[", "iloc", "]", ",", "moment_function", "(", "base_params", "[", "'corner_mag'", "]", ")", ",", "base_params", "[", "'beta'", "]", ")", "return", "base_rate" ]
Defines the base moment rate that should be assigned to places of zero strain (i.e. Intraplate regions). In Bird et al (2010) this is taken as basic rate of Intraplate events in GCMT catalogue above the threshold magnitude :param dict base_params: Parameters needed for calculating the base rate. Requires: 'CMT_EVENTS': The number of CMT events 'area': Total area (km ^ 2) of the region class 'CMT_duration': Duration of reference catalogue 'CMT_moment': Moment rate from CMT catalogue 'corner_mag': Corner magnitude of Tapered G-R for region 'beta': Beta value of tapered G-R for distribution
[ "Defines", "the", "base", "moment", "rate", "that", "should", "be", "assigned", "to", "places", "of", "zero", "strain", "(", "i", ".", "e", ".", "Intraplate", "regions", ")", ".", "In", "Bird", "et", "al", "(", "2010", ")", "this", "is", "taken", "as", "basic", "rate", "of", "Intraplate", "events", "in", "GCMT", "catalogue", "above", "the", "threshold", "magnitude" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/shift.py#L251-L277
gem/oq-engine
openquake/hmtk/strain/shift.py
Shift.calculate_activity_rate
def calculate_activity_rate(self, strain_data, cumulative=False, in_seconds=False): ''' Main function to calculate the activity rate (for each of the magnitudes in target_magnitudes) for all of the cells specified in the input strain model file :param strain_data: Strain model as an instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain :param bool cumulative: Set to true if the cumulative rate is required, False for incremental :param bool in_seconds: Returns the activity rate in seconds (True) or else as an annual activity rate ''' self.strain = strain_data self.strain.target_magnitudes = self.target_magnitudes # Adjust strain rates from annual to seconds (SI) for key in STRAIN_VARIABLES: self.strain.data[key] = self.strain.data[key] / SECS_PER_YEAR if 'region' not in self.strain.data: raise ValueError('Cannot implment SHIFT methodology without ' 'definition of regionalisation') else: self._reclassify_Bird_regions_with_data() # Initially all seismicity rates assigned to background rate self.strain.seismicity_rate = np.tile( self.base_rate, [self.strain.get_number_observations(), 1]) regionalisation_zones = ( np.unique(self.strain.data['region'])).tolist() for region in regionalisation_zones: id0 = self.strain.data['region'] == region if b'IPL' in region: # For intra-plate seismicity everything is refered to # the background rate continue elif b'OSR_special_1' in region: # Special case 1 - normal and transform faulting calculated_rate = self.get_rate_osr_normal_transform( self.threshold_moment, id0) elif b'OSR_special_2' in region: # Special case 2 - convergent and transform faulting calculated_rate = self.get_rate_osr_convergent_transform( self.threshold_moment, id0) else: region = region.decode('utf-8') calculated_rate = \ self.regionalisation[region]['adjustment_factor'] * \ self.continuum_seismicity(self.threshold_moment, self.strain.data['e1h'][id0], self.strain.data['e2h'][id0], self.strain.data['err'][id0], self.regionalisation[region]) for jloc, iloc in enumerate(np.where(id0)[0]): # Where the calculated rate exceeds the base rate then becomes # calculated rate. In this version the magnitudes are treated # independently (i.e. if Rate(M < 7) > Base Rate (M < 7) but # Rate (M > 7) < Base Rate (M > 7) then returned Rate (M < 7) # = Rate (M < 7) and returned Rate (M > 7) = Base Rate (M > 7) id1 = calculated_rate[jloc] > self.base_rate self.strain.seismicity_rate[iloc, id1] = calculated_rate[jloc, id1] if not cumulative and self.number_magnitudes > 1: # Seismicity rates are currently cumulative - need to turn them # into discrete for iloc in range(0, self.number_magnitudes - 1): self.strain.seismicity_rate[:, iloc] = \ self.strain.seismicity_rate[:, iloc] -\ self.strain.seismicity_rate[:, iloc + 1] if not in_seconds: self.strain.seismicity_rate = self.strain.seismicity_rate * \ SECS_PER_YEAR for key in STRAIN_VARIABLES: self.strain.data[key] = self.strain.data[key] * SECS_PER_YEAR
python
def calculate_activity_rate(self, strain_data, cumulative=False, in_seconds=False): self.strain = strain_data self.strain.target_magnitudes = self.target_magnitudes for key in STRAIN_VARIABLES: self.strain.data[key] = self.strain.data[key] / SECS_PER_YEAR if 'region' not in self.strain.data: raise ValueError('Cannot implment SHIFT methodology without ' 'definition of regionalisation') else: self._reclassify_Bird_regions_with_data() self.strain.seismicity_rate = np.tile( self.base_rate, [self.strain.get_number_observations(), 1]) regionalisation_zones = ( np.unique(self.strain.data['region'])).tolist() for region in regionalisation_zones: id0 = self.strain.data['region'] == region if b'IPL' in region: continue elif b'OSR_special_1' in region: calculated_rate = self.get_rate_osr_normal_transform( self.threshold_moment, id0) elif b'OSR_special_2' in region: calculated_rate = self.get_rate_osr_convergent_transform( self.threshold_moment, id0) else: region = region.decode('utf-8') calculated_rate = \ self.regionalisation[region]['adjustment_factor'] * \ self.continuum_seismicity(self.threshold_moment, self.strain.data['e1h'][id0], self.strain.data['e2h'][id0], self.strain.data['err'][id0], self.regionalisation[region]) for jloc, iloc in enumerate(np.where(id0)[0]): id1 = calculated_rate[jloc] > self.base_rate self.strain.seismicity_rate[iloc, id1] = calculated_rate[jloc, id1] if not cumulative and self.number_magnitudes > 1: for iloc in range(0, self.number_magnitudes - 1): self.strain.seismicity_rate[:, iloc] = \ self.strain.seismicity_rate[:, iloc] -\ self.strain.seismicity_rate[:, iloc + 1] if not in_seconds: self.strain.seismicity_rate = self.strain.seismicity_rate * \ SECS_PER_YEAR for key in STRAIN_VARIABLES: self.strain.data[key] = self.strain.data[key] * SECS_PER_YEAR
[ "def", "calculate_activity_rate", "(", "self", ",", "strain_data", ",", "cumulative", "=", "False", ",", "in_seconds", "=", "False", ")", ":", "self", ".", "strain", "=", "strain_data", "self", ".", "strain", ".", "target_magnitudes", "=", "self", ".", "target_magnitudes", "# Adjust strain rates from annual to seconds (SI)", "for", "key", "in", "STRAIN_VARIABLES", ":", "self", ".", "strain", ".", "data", "[", "key", "]", "=", "self", ".", "strain", ".", "data", "[", "key", "]", "/", "SECS_PER_YEAR", "if", "'region'", "not", "in", "self", ".", "strain", ".", "data", ":", "raise", "ValueError", "(", "'Cannot implment SHIFT methodology without '", "'definition of regionalisation'", ")", "else", ":", "self", ".", "_reclassify_Bird_regions_with_data", "(", ")", "# Initially all seismicity rates assigned to background rate", "self", ".", "strain", ".", "seismicity_rate", "=", "np", ".", "tile", "(", "self", ".", "base_rate", ",", "[", "self", ".", "strain", ".", "get_number_observations", "(", ")", ",", "1", "]", ")", "regionalisation_zones", "=", "(", "np", ".", "unique", "(", "self", ".", "strain", ".", "data", "[", "'region'", "]", ")", ")", ".", "tolist", "(", ")", "for", "region", "in", "regionalisation_zones", ":", "id0", "=", "self", ".", "strain", ".", "data", "[", "'region'", "]", "==", "region", "if", "b'IPL'", "in", "region", ":", "# For intra-plate seismicity everything is refered to", "# the background rate", "continue", "elif", "b'OSR_special_1'", "in", "region", ":", "# Special case 1 - normal and transform faulting", "calculated_rate", "=", "self", ".", "get_rate_osr_normal_transform", "(", "self", ".", "threshold_moment", ",", "id0", ")", "elif", "b'OSR_special_2'", "in", "region", ":", "# Special case 2 - convergent and transform faulting", "calculated_rate", "=", "self", ".", "get_rate_osr_convergent_transform", "(", "self", ".", "threshold_moment", ",", "id0", ")", "else", ":", "region", "=", "region", ".", "decode", "(", "'utf-8'", ")", "calculated_rate", "=", "self", ".", "regionalisation", "[", "region", "]", "[", "'adjustment_factor'", "]", "*", "self", ".", "continuum_seismicity", "(", "self", ".", "threshold_moment", ",", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "id0", "]", ",", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "id0", "]", ",", "self", ".", "strain", ".", "data", "[", "'err'", "]", "[", "id0", "]", ",", "self", ".", "regionalisation", "[", "region", "]", ")", "for", "jloc", ",", "iloc", "in", "enumerate", "(", "np", ".", "where", "(", "id0", ")", "[", "0", "]", ")", ":", "# Where the calculated rate exceeds the base rate then becomes", "# calculated rate. In this version the magnitudes are treated", "# independently (i.e. if Rate(M < 7) > Base Rate (M < 7) but", "# Rate (M > 7) < Base Rate (M > 7) then returned Rate (M < 7)", "# = Rate (M < 7) and returned Rate (M > 7) = Base Rate (M > 7)", "id1", "=", "calculated_rate", "[", "jloc", "]", ">", "self", ".", "base_rate", "self", ".", "strain", ".", "seismicity_rate", "[", "iloc", ",", "id1", "]", "=", "calculated_rate", "[", "jloc", ",", "id1", "]", "if", "not", "cumulative", "and", "self", ".", "number_magnitudes", ">", "1", ":", "# Seismicity rates are currently cumulative - need to turn them", "# into discrete", "for", "iloc", "in", "range", "(", "0", ",", "self", ".", "number_magnitudes", "-", "1", ")", ":", "self", ".", "strain", ".", "seismicity_rate", "[", ":", ",", "iloc", "]", "=", "self", ".", "strain", ".", "seismicity_rate", "[", ":", ",", "iloc", "]", "-", "self", ".", "strain", ".", "seismicity_rate", "[", ":", ",", "iloc", "+", "1", "]", "if", "not", "in_seconds", ":", "self", ".", "strain", ".", "seismicity_rate", "=", "self", ".", "strain", ".", "seismicity_rate", "*", "SECS_PER_YEAR", "for", "key", "in", "STRAIN_VARIABLES", ":", "self", ".", "strain", ".", "data", "[", "key", "]", "=", "self", ".", "strain", ".", "data", "[", "key", "]", "*", "SECS_PER_YEAR" ]
Main function to calculate the activity rate (for each of the magnitudes in target_magnitudes) for all of the cells specified in the input strain model file :param strain_data: Strain model as an instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain :param bool cumulative: Set to true if the cumulative rate is required, False for incremental :param bool in_seconds: Returns the activity rate in seconds (True) or else as an annual activity rate
[ "Main", "function", "to", "calculate", "the", "activity", "rate", "(", "for", "each", "of", "the", "magnitudes", "in", "target_magnitudes", ")", "for", "all", "of", "the", "cells", "specified", "in", "the", "input", "strain", "model", "file" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/shift.py#L279-L367
gem/oq-engine
openquake/hmtk/strain/shift.py
Shift.get_rate_osr_normal_transform
def get_rate_osr_normal_transform(self, threshold_moment, id0): ''' Gets seismicity rate for special case of the ridge condition with spreading and transform component :param float threshold_moment: Moment required for calculating activity rate :param np.ndarray id0: Logical vector indicating the cells to which this condition applies :returns: Activity rates for cells corresponding to the hybrid ocean spreading ridge and oceanic transform condition ''' # Get normal component e1h_ridge = np.zeros(np.sum(id0), dtype=float) e2h_ridge = self.strain.data['e1h'][id0] + self.strain.data['e2h'][id0] err_ridge = -(e1h_ridge + e2h_ridge) calculated_rate_ridge = self.continuum_seismicity( threshold_moment, e1h_ridge, e2h_ridge, err_ridge, self.regionalisation['OSRnor']) # Get transform e1h_trans = self.strain.data['e1h'][id0] e2h_trans = -e1h_trans err_trans = np.zeros(np.sum(id0), dtype=float) calculated_rate_transform = self.continuum_seismicity( threshold_moment, e1h_trans, e2h_trans, err_trans, self.regionalisation['OTFmed']) return ( self.regionalisation['OSRnor']['adjustment_factor'] * (calculated_rate_ridge + calculated_rate_transform))
python
def get_rate_osr_normal_transform(self, threshold_moment, id0): e1h_ridge = np.zeros(np.sum(id0), dtype=float) e2h_ridge = self.strain.data['e1h'][id0] + self.strain.data['e2h'][id0] err_ridge = -(e1h_ridge + e2h_ridge) calculated_rate_ridge = self.continuum_seismicity( threshold_moment, e1h_ridge, e2h_ridge, err_ridge, self.regionalisation['OSRnor']) e1h_trans = self.strain.data['e1h'][id0] e2h_trans = -e1h_trans err_trans = np.zeros(np.sum(id0), dtype=float) calculated_rate_transform = self.continuum_seismicity( threshold_moment, e1h_trans, e2h_trans, err_trans, self.regionalisation['OTFmed']) return ( self.regionalisation['OSRnor']['adjustment_factor'] * (calculated_rate_ridge + calculated_rate_transform))
[ "def", "get_rate_osr_normal_transform", "(", "self", ",", "threshold_moment", ",", "id0", ")", ":", "# Get normal component", "e1h_ridge", "=", "np", ".", "zeros", "(", "np", ".", "sum", "(", "id0", ")", ",", "dtype", "=", "float", ")", "e2h_ridge", "=", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "id0", "]", "+", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "id0", "]", "err_ridge", "=", "-", "(", "e1h_ridge", "+", "e2h_ridge", ")", "calculated_rate_ridge", "=", "self", ".", "continuum_seismicity", "(", "threshold_moment", ",", "e1h_ridge", ",", "e2h_ridge", ",", "err_ridge", ",", "self", ".", "regionalisation", "[", "'OSRnor'", "]", ")", "# Get transform", "e1h_trans", "=", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "id0", "]", "e2h_trans", "=", "-", "e1h_trans", "err_trans", "=", "np", ".", "zeros", "(", "np", ".", "sum", "(", "id0", ")", ",", "dtype", "=", "float", ")", "calculated_rate_transform", "=", "self", ".", "continuum_seismicity", "(", "threshold_moment", ",", "e1h_trans", ",", "e2h_trans", ",", "err_trans", ",", "self", ".", "regionalisation", "[", "'OTFmed'", "]", ")", "return", "(", "self", ".", "regionalisation", "[", "'OSRnor'", "]", "[", "'adjustment_factor'", "]", "*", "(", "calculated_rate_ridge", "+", "calculated_rate_transform", ")", ")" ]
Gets seismicity rate for special case of the ridge condition with spreading and transform component :param float threshold_moment: Moment required for calculating activity rate :param np.ndarray id0: Logical vector indicating the cells to which this condition applies :returns: Activity rates for cells corresponding to the hybrid ocean spreading ridge and oceanic transform condition
[ "Gets", "seismicity", "rate", "for", "special", "case", "of", "the", "ridge", "condition", "with", "spreading", "and", "transform", "component" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/shift.py#L369-L411
gem/oq-engine
openquake/hmtk/strain/shift.py
Shift.get_rate_osr_convergent_transform
def get_rate_osr_convergent_transform(self, threshold_moment, id0): ''' Calculates seismicity rate for special case of the ridge condition with convergence and transform :param float threshold_moment: Moment required for calculating activity rate :param np.ndarray id0: Logical vector indicating the cells to which this condition applies :returns: Activity rates for cells corresponding to the hybrid ocean convergent boundary and oceanic transform condition ''' # Get convergent component e1h_ocb = self.strain.data['e1h'][id0] + self.strain.data['e2h'][id0] e2h_ocb = np.zeros(np.sum(id0), dtype=float) err_ocb = -(e1h_ocb + e2h_ocb) calculated_rate_ocb = self.continuum_seismicity( threshold_moment, e1h_ocb, e2h_ocb, err_ocb, self.regionalisation['OCB']) # Get transform e2h_trans = self.strain.data['e2h'][id0] e1h_trans = -e2h_trans err_trans = np.zeros(np.sum(id0), dtype=float) calculated_rate_transform = self.continuum_seismicity( threshold_moment, e1h_trans, e2h_trans, err_trans, self.regionalisation['OTFmed']) return (self.regionalisation['OSRnor']['adjustment_factor'] * (calculated_rate_ocb + calculated_rate_transform))
python
def get_rate_osr_convergent_transform(self, threshold_moment, id0): e1h_ocb = self.strain.data['e1h'][id0] + self.strain.data['e2h'][id0] e2h_ocb = np.zeros(np.sum(id0), dtype=float) err_ocb = -(e1h_ocb + e2h_ocb) calculated_rate_ocb = self.continuum_seismicity( threshold_moment, e1h_ocb, e2h_ocb, err_ocb, self.regionalisation['OCB']) e2h_trans = self.strain.data['e2h'][id0] e1h_trans = -e2h_trans err_trans = np.zeros(np.sum(id0), dtype=float) calculated_rate_transform = self.continuum_seismicity( threshold_moment, e1h_trans, e2h_trans, err_trans, self.regionalisation['OTFmed']) return (self.regionalisation['OSRnor']['adjustment_factor'] * (calculated_rate_ocb + calculated_rate_transform))
[ "def", "get_rate_osr_convergent_transform", "(", "self", ",", "threshold_moment", ",", "id0", ")", ":", "# Get convergent component", "e1h_ocb", "=", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "id0", "]", "+", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "id0", "]", "e2h_ocb", "=", "np", ".", "zeros", "(", "np", ".", "sum", "(", "id0", ")", ",", "dtype", "=", "float", ")", "err_ocb", "=", "-", "(", "e1h_ocb", "+", "e2h_ocb", ")", "calculated_rate_ocb", "=", "self", ".", "continuum_seismicity", "(", "threshold_moment", ",", "e1h_ocb", ",", "e2h_ocb", ",", "err_ocb", ",", "self", ".", "regionalisation", "[", "'OCB'", "]", ")", "# Get transform", "e2h_trans", "=", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "id0", "]", "e1h_trans", "=", "-", "e2h_trans", "err_trans", "=", "np", ".", "zeros", "(", "np", ".", "sum", "(", "id0", ")", ",", "dtype", "=", "float", ")", "calculated_rate_transform", "=", "self", ".", "continuum_seismicity", "(", "threshold_moment", ",", "e1h_trans", ",", "e2h_trans", ",", "err_trans", ",", "self", ".", "regionalisation", "[", "'OTFmed'", "]", ")", "return", "(", "self", ".", "regionalisation", "[", "'OSRnor'", "]", "[", "'adjustment_factor'", "]", "*", "(", "calculated_rate_ocb", "+", "calculated_rate_transform", ")", ")" ]
Calculates seismicity rate for special case of the ridge condition with convergence and transform :param float threshold_moment: Moment required for calculating activity rate :param np.ndarray id0: Logical vector indicating the cells to which this condition applies :returns: Activity rates for cells corresponding to the hybrid ocean convergent boundary and oceanic transform condition
[ "Calculates", "seismicity", "rate", "for", "special", "case", "of", "the", "ridge", "condition", "with", "convergence", "and", "transform" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/shift.py#L413-L453
gem/oq-engine
openquake/hmtk/strain/shift.py
Shift.continuum_seismicity
def continuum_seismicity(self, threshold_moment, e1h, e2h, err, region_params): ''' Function to implement the continuum seismicity calculation given vectors of input rates e1h, e2h [np.ndarray] and a dictionary of the corresponding regionalisation params returns a vector of the corresponding seismicity rates Python implementation of the CONTINUUM_SEISMICITY subroutine of SHIFT_GSRM.f90 :param float threshold_moment: Target moment for calculation of activity rate :param np.ndarray e1h: First principal strain rate :param np.ndarray e1h: Second principal strain rate :param np.ndarray err: Vertical strain rate :param dict region_params: Activity rate parameters specific to the tectonic region under consideration :returns: Cumulative seismicity rate greater than or equal to the threshold magnitude ''' strain_values = np.column_stack([e1h, e2h, err]) e1_rate = np.amin(strain_values, axis=1) e3_rate = np.amax(strain_values, axis=1) e2_rate = 0. - e1_rate - e3_rate # Pre-allocate seismicity rate with zeros seismicity_rate = np.zeros( [np.shape(strain_values)[0], len(threshold_moment)], dtype=float) # Calculate moment rate per unit area temp_e_rate = 2.0 * (-e1_rate) id0 = np.where(e2_rate < 0.0)[0] temp_e_rate[id0] = 2.0 * e3_rate[id0] M_persec_per_m2 = ( region_params['assumed_mu'] * temp_e_rate * region_params['coupled_thickness']) # Calculate seismicity rate at the threshold moment of the CMT # catalogue - Eq 6 in Bird et al (2010) seismicity_at_cmt_threshold = region_params['CMT_pure_event_rate'] * \ (M_persec_per_m2 / region_params['tGR_moment_rate']) # Adjust forecast rate to desired rate using tapered G-R model # Taken from Eq 7 (Bird et al. 2010) and Eq 9 (Bird & Kagan, 2004) for iloc, moment_thresh in enumerate(threshold_moment): g_function = calculate_taper_function( region_params['CMT_moment'], moment_thresh, region_params['corner_moment'], region_params['beta']) seismicity_rate[:, iloc] = g_function * seismicity_at_cmt_threshold return seismicity_rate
python
def continuum_seismicity(self, threshold_moment, e1h, e2h, err, region_params): strain_values = np.column_stack([e1h, e2h, err]) e1_rate = np.amin(strain_values, axis=1) e3_rate = np.amax(strain_values, axis=1) e2_rate = 0. - e1_rate - e3_rate seismicity_rate = np.zeros( [np.shape(strain_values)[0], len(threshold_moment)], dtype=float) temp_e_rate = 2.0 * (-e1_rate) id0 = np.where(e2_rate < 0.0)[0] temp_e_rate[id0] = 2.0 * e3_rate[id0] M_persec_per_m2 = ( region_params['assumed_mu'] * temp_e_rate * region_params['coupled_thickness']) seismicity_at_cmt_threshold = region_params['CMT_pure_event_rate'] * \ (M_persec_per_m2 / region_params['tGR_moment_rate']) for iloc, moment_thresh in enumerate(threshold_moment): g_function = calculate_taper_function( region_params['CMT_moment'], moment_thresh, region_params['corner_moment'], region_params['beta']) seismicity_rate[:, iloc] = g_function * seismicity_at_cmt_threshold return seismicity_rate
[ "def", "continuum_seismicity", "(", "self", ",", "threshold_moment", ",", "e1h", ",", "e2h", ",", "err", ",", "region_params", ")", ":", "strain_values", "=", "np", ".", "column_stack", "(", "[", "e1h", ",", "e2h", ",", "err", "]", ")", "e1_rate", "=", "np", ".", "amin", "(", "strain_values", ",", "axis", "=", "1", ")", "e3_rate", "=", "np", ".", "amax", "(", "strain_values", ",", "axis", "=", "1", ")", "e2_rate", "=", "0.", "-", "e1_rate", "-", "e3_rate", "# Pre-allocate seismicity rate with zeros", "seismicity_rate", "=", "np", ".", "zeros", "(", "[", "np", ".", "shape", "(", "strain_values", ")", "[", "0", "]", ",", "len", "(", "threshold_moment", ")", "]", ",", "dtype", "=", "float", ")", "# Calculate moment rate per unit area", "temp_e_rate", "=", "2.0", "*", "(", "-", "e1_rate", ")", "id0", "=", "np", ".", "where", "(", "e2_rate", "<", "0.0", ")", "[", "0", "]", "temp_e_rate", "[", "id0", "]", "=", "2.0", "*", "e3_rate", "[", "id0", "]", "M_persec_per_m2", "=", "(", "region_params", "[", "'assumed_mu'", "]", "*", "temp_e_rate", "*", "region_params", "[", "'coupled_thickness'", "]", ")", "# Calculate seismicity rate at the threshold moment of the CMT", "# catalogue - Eq 6 in Bird et al (2010)", "seismicity_at_cmt_threshold", "=", "region_params", "[", "'CMT_pure_event_rate'", "]", "*", "(", "M_persec_per_m2", "/", "region_params", "[", "'tGR_moment_rate'", "]", ")", "# Adjust forecast rate to desired rate using tapered G-R model", "# Taken from Eq 7 (Bird et al. 2010) and Eq 9 (Bird & Kagan, 2004)", "for", "iloc", ",", "moment_thresh", "in", "enumerate", "(", "threshold_moment", ")", ":", "g_function", "=", "calculate_taper_function", "(", "region_params", "[", "'CMT_moment'", "]", ",", "moment_thresh", ",", "region_params", "[", "'corner_moment'", "]", ",", "region_params", "[", "'beta'", "]", ")", "seismicity_rate", "[", ":", ",", "iloc", "]", "=", "g_function", "*", "seismicity_at_cmt_threshold", "return", "seismicity_rate" ]
Function to implement the continuum seismicity calculation given vectors of input rates e1h, e2h [np.ndarray] and a dictionary of the corresponding regionalisation params returns a vector of the corresponding seismicity rates Python implementation of the CONTINUUM_SEISMICITY subroutine of SHIFT_GSRM.f90 :param float threshold_moment: Target moment for calculation of activity rate :param np.ndarray e1h: First principal strain rate :param np.ndarray e1h: Second principal strain rate :param np.ndarray err: Vertical strain rate :param dict region_params: Activity rate parameters specific to the tectonic region under consideration :returns: Cumulative seismicity rate greater than or equal to the threshold magnitude
[ "Function", "to", "implement", "the", "continuum", "seismicity", "calculation", "given", "vectors", "of", "input", "rates", "e1h", "e2h", "[", "np", ".", "ndarray", "]", "and", "a", "dictionary", "of", "the", "corresponding", "regionalisation", "params", "returns", "a", "vector", "of", "the", "corresponding", "seismicity", "rates", "Python", "implementation", "of", "the", "CONTINUUM_SEISMICITY", "subroutine", "of", "SHIFT_GSRM", ".", "f90" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/shift.py#L455-L515
gem/oq-engine
openquake/hmtk/strain/shift.py
Shift._reclassify_Bird_regions_with_data
def _reclassify_Bird_regions_with_data(self): ''' The SHIFT regionalisation defines only 'C','R','S','O' - need to use strain data to reclassify to sub-categories according to the definition in Bird & Liu (2007) ''' # Treat trivial cases of subduction zones and oceanic types self.strain.data['region'][ self.strain.data['region'] == b'IPL'] = ['IPL'] self.strain.data['region'][ self.strain.data['region'] == b'S'] = ['SUB'] self.strain.data['region'][ self.strain.data['region'] == b'O'] = ['OCB'] # Continental types id0 = self.strain.data['region'] == b'C' self.strain.data['region'][id0] = ['CTF'] id0_pos_err = np.logical_and( self.strain.data['err'] > 0., self.strain.data['err'] > (0.364 * self.strain.data['e2h'])) id0_neg_err = np.logical_and( self.strain.data['err'] < 0., self.strain.data['err'] <= (0.364 * self.strain.data['e1h'])) self.strain.data['region'][np.logical_and(id0, id0_pos_err)] = 'CCB' self.strain.data['region'][np.logical_and(id0, id0_neg_err)] = 'CRB' # Ridge Types id0 = self.strain.data['region'] == b'R' for iloc in np.where(id0)[0]: cond = (self.strain.data['e1h'][iloc] > 0.0 and self.strain.data['e2h'][iloc] > 0.0) if cond: self.strain.data['region'][iloc] = 'OSRnor' # Effective == 0.0 elif fabs(self.strain.data['e1h'][iloc]) < 1E-99: self.strain.data['region'][iloc] = 'OSRnor' elif ((self.strain.data['e1h'][iloc] * self.strain.data['e2h'][iloc]) < 0.0) and\ ((self.strain.data['e1h'][iloc] + self.strain.data['e2h'][iloc]) >= 0.): self.strain.data['region'][iloc] = 'OSR_special_1' elif ((self.strain.data['e1h'][iloc] * self.strain.data['e2h'][iloc]) < 0.) and\ ((self.strain.data['e1h'][iloc] + self.strain.data['e2h'][iloc]) < 0.): self.strain.data['region'][iloc] = 'OSR_special_2' else: self.strain.data['region'][iloc] = 'OCB'
python
def _reclassify_Bird_regions_with_data(self): self.strain.data['region'][ self.strain.data['region'] == b'IPL'] = ['IPL'] self.strain.data['region'][ self.strain.data['region'] == b'S'] = ['SUB'] self.strain.data['region'][ self.strain.data['region'] == b'O'] = ['OCB'] id0 = self.strain.data['region'] == b'C' self.strain.data['region'][id0] = ['CTF'] id0_pos_err = np.logical_and( self.strain.data['err'] > 0., self.strain.data['err'] > (0.364 * self.strain.data['e2h'])) id0_neg_err = np.logical_and( self.strain.data['err'] < 0., self.strain.data['err'] <= (0.364 * self.strain.data['e1h'])) self.strain.data['region'][np.logical_and(id0, id0_pos_err)] = 'CCB' self.strain.data['region'][np.logical_and(id0, id0_neg_err)] = 'CRB' id0 = self.strain.data['region'] == b'R' for iloc in np.where(id0)[0]: cond = (self.strain.data['e1h'][iloc] > 0.0 and self.strain.data['e2h'][iloc] > 0.0) if cond: self.strain.data['region'][iloc] = 'OSRnor' elif fabs(self.strain.data['e1h'][iloc]) < 1E-99: self.strain.data['region'][iloc] = 'OSRnor' elif ((self.strain.data['e1h'][iloc] * self.strain.data['e2h'][iloc]) < 0.0) and\ ((self.strain.data['e1h'][iloc] + self.strain.data['e2h'][iloc]) >= 0.): self.strain.data['region'][iloc] = 'OSR_special_1' elif ((self.strain.data['e1h'][iloc] * self.strain.data['e2h'][iloc]) < 0.) and\ ((self.strain.data['e1h'][iloc] + self.strain.data['e2h'][iloc]) < 0.): self.strain.data['region'][iloc] = 'OSR_special_2' else: self.strain.data['region'][iloc] = 'OCB'
[ "def", "_reclassify_Bird_regions_with_data", "(", "self", ")", ":", "# Treat trivial cases of subduction zones and oceanic types", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "self", ".", "strain", ".", "data", "[", "'region'", "]", "==", "b'IPL'", "]", "=", "[", "'IPL'", "]", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "self", ".", "strain", ".", "data", "[", "'region'", "]", "==", "b'S'", "]", "=", "[", "'SUB'", "]", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "self", ".", "strain", ".", "data", "[", "'region'", "]", "==", "b'O'", "]", "=", "[", "'OCB'", "]", "# Continental types", "id0", "=", "self", ".", "strain", ".", "data", "[", "'region'", "]", "==", "b'C'", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "id0", "]", "=", "[", "'CTF'", "]", "id0_pos_err", "=", "np", ".", "logical_and", "(", "self", ".", "strain", ".", "data", "[", "'err'", "]", ">", "0.", ",", "self", ".", "strain", ".", "data", "[", "'err'", "]", ">", "(", "0.364", "*", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", ")", ")", "id0_neg_err", "=", "np", ".", "logical_and", "(", "self", ".", "strain", ".", "data", "[", "'err'", "]", "<", "0.", ",", "self", ".", "strain", ".", "data", "[", "'err'", "]", "<=", "(", "0.364", "*", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", ")", ")", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "np", ".", "logical_and", "(", "id0", ",", "id0_pos_err", ")", "]", "=", "'CCB'", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "np", ".", "logical_and", "(", "id0", ",", "id0_neg_err", ")", "]", "=", "'CRB'", "# Ridge Types", "id0", "=", "self", ".", "strain", ".", "data", "[", "'region'", "]", "==", "b'R'", "for", "iloc", "in", "np", ".", "where", "(", "id0", ")", "[", "0", "]", ":", "cond", "=", "(", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "iloc", "]", ">", "0.0", "and", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "iloc", "]", ">", "0.0", ")", "if", "cond", ":", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "iloc", "]", "=", "'OSRnor'", "# Effective == 0.0", "elif", "fabs", "(", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "iloc", "]", ")", "<", "1E-99", ":", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "iloc", "]", "=", "'OSRnor'", "elif", "(", "(", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "iloc", "]", "*", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "iloc", "]", ")", "<", "0.0", ")", "and", "(", "(", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "iloc", "]", "+", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "iloc", "]", ")", ">=", "0.", ")", ":", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "iloc", "]", "=", "'OSR_special_1'", "elif", "(", "(", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "iloc", "]", "*", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "iloc", "]", ")", "<", "0.", ")", "and", "(", "(", "self", ".", "strain", ".", "data", "[", "'e1h'", "]", "[", "iloc", "]", "+", "self", ".", "strain", ".", "data", "[", "'e2h'", "]", "[", "iloc", "]", ")", "<", "0.", ")", ":", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "iloc", "]", "=", "'OSR_special_2'", "else", ":", "self", ".", "strain", ".", "data", "[", "'region'", "]", "[", "iloc", "]", "=", "'OCB'" ]
The SHIFT regionalisation defines only 'C','R','S','O' - need to use strain data to reclassify to sub-categories according to the definition in Bird & Liu (2007)
[ "The", "SHIFT", "regionalisation", "defines", "only", "C", "R", "S", "O", "-", "need", "to", "use", "strain", "data", "to", "reclassify", "to", "sub", "-", "categories", "according", "to", "the", "definition", "in", "Bird", "&", "Liu", "(", "2007", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/shift.py#L517-L566
gem/oq-engine
openquake/hazardlib/gsim/chiou_youngs_2008.py
ChiouYoungs2008._get_ln_y_ref
def _get_ln_y_ref(self, rup, dists, C): """ Get an intensity on a reference soil. Implements eq. 13a. """ # reverse faulting flag Frv = 1 if 30 <= rup.rake <= 150 else 0 # normal faulting flag Fnm = 1 if -120 <= rup.rake <= -60 else 0 # hanging wall flag Fhw = (dists.rx >= 0) # aftershock flag. always zero since we only consider main shock AS = 0 ln_y_ref = ( # first line of eq. 13a C['c1'] + (C['c1a'] * Frv + C['c1b'] * Fnm + C['c7'] * (rup.ztor - 4)) * (1 - AS) + (C['c10'] + C['c7a'] * (rup.ztor - 4)) * AS # second line + C['c2'] * (rup.mag - 6) + ((C['c2'] - C['c3']) / C['cn']) * np.log(1 + np.exp(C['cn'] * (C['cm'] - rup.mag))) # third line + C['c4'] * np.log(dists.rrup + C['c5'] * np.cosh(C['c6'] * max(rup.mag - C['chm'], 0))) # fourth line + (C['c4a'] - C['c4']) * np.log(np.sqrt(dists.rrup ** 2 + C['crb'] ** 2)) # fifth line + (C['cg1'] + C['cg2'] / (np.cosh(max(rup.mag - C['cg3'], 0)))) * dists.rrup # sixth line + C['c9'] * Fhw * np.tanh(dists.rx * (np.cos(np.radians(rup.dip)) ** 2) / C['c9a']) * (1 - np.sqrt(dists.rjb ** 2 + rup.ztor ** 2) / (dists.rrup + 0.001)) ) return ln_y_ref
python
def _get_ln_y_ref(self, rup, dists, C): Frv = 1 if 30 <= rup.rake <= 150 else 0 Fnm = 1 if -120 <= rup.rake <= -60 else 0 Fhw = (dists.rx >= 0) AS = 0 ln_y_ref = ( C['c1'] + (C['c1a'] * Frv + C['c1b'] * Fnm + C['c7'] * (rup.ztor - 4)) * (1 - AS) + (C['c10'] + C['c7a'] * (rup.ztor - 4)) * AS + C['c2'] * (rup.mag - 6) + ((C['c2'] - C['c3']) / C['cn']) * np.log(1 + np.exp(C['cn'] * (C['cm'] - rup.mag))) + C['c4'] * np.log(dists.rrup + C['c5'] * np.cosh(C['c6'] * max(rup.mag - C['chm'], 0))) + (C['c4a'] - C['c4']) * np.log(np.sqrt(dists.rrup ** 2 + C['crb'] ** 2)) + (C['cg1'] + C['cg2'] / (np.cosh(max(rup.mag - C['cg3'], 0)))) * dists.rrup + C['c9'] * Fhw * np.tanh(dists.rx * (np.cos(np.radians(rup.dip)) ** 2) / C['c9a']) * (1 - np.sqrt(dists.rjb ** 2 + rup.ztor ** 2) / (dists.rrup + 0.001)) ) return ln_y_ref
[ "def", "_get_ln_y_ref", "(", "self", ",", "rup", ",", "dists", ",", "C", ")", ":", "# reverse faulting flag", "Frv", "=", "1", "if", "30", "<=", "rup", ".", "rake", "<=", "150", "else", "0", "# normal faulting flag", "Fnm", "=", "1", "if", "-", "120", "<=", "rup", ".", "rake", "<=", "-", "60", "else", "0", "# hanging wall flag", "Fhw", "=", "(", "dists", ".", "rx", ">=", "0", ")", "# aftershock flag. always zero since we only consider main shock", "AS", "=", "0", "ln_y_ref", "=", "(", "# first line of eq. 13a", "C", "[", "'c1'", "]", "+", "(", "C", "[", "'c1a'", "]", "*", "Frv", "+", "C", "[", "'c1b'", "]", "*", "Fnm", "+", "C", "[", "'c7'", "]", "*", "(", "rup", ".", "ztor", "-", "4", ")", ")", "*", "(", "1", "-", "AS", ")", "+", "(", "C", "[", "'c10'", "]", "+", "C", "[", "'c7a'", "]", "*", "(", "rup", ".", "ztor", "-", "4", ")", ")", "*", "AS", "# second line", "+", "C", "[", "'c2'", "]", "*", "(", "rup", ".", "mag", "-", "6", ")", "+", "(", "(", "C", "[", "'c2'", "]", "-", "C", "[", "'c3'", "]", ")", "/", "C", "[", "'cn'", "]", ")", "*", "np", ".", "log", "(", "1", "+", "np", ".", "exp", "(", "C", "[", "'cn'", "]", "*", "(", "C", "[", "'cm'", "]", "-", "rup", ".", "mag", ")", ")", ")", "# third line", "+", "C", "[", "'c4'", "]", "*", "np", ".", "log", "(", "dists", ".", "rrup", "+", "C", "[", "'c5'", "]", "*", "np", ".", "cosh", "(", "C", "[", "'c6'", "]", "*", "max", "(", "rup", ".", "mag", "-", "C", "[", "'chm'", "]", ",", "0", ")", ")", ")", "# fourth line", "+", "(", "C", "[", "'c4a'", "]", "-", "C", "[", "'c4'", "]", ")", "*", "np", ".", "log", "(", "np", ".", "sqrt", "(", "dists", ".", "rrup", "**", "2", "+", "C", "[", "'crb'", "]", "**", "2", ")", ")", "# fifth line", "+", "(", "C", "[", "'cg1'", "]", "+", "C", "[", "'cg2'", "]", "/", "(", "np", ".", "cosh", "(", "max", "(", "rup", ".", "mag", "-", "C", "[", "'cg3'", "]", ",", "0", ")", ")", ")", ")", "*", "dists", ".", "rrup", "# sixth line", "+", "C", "[", "'c9'", "]", "*", "Fhw", "*", "np", ".", "tanh", "(", "dists", ".", "rx", "*", "(", "np", ".", "cos", "(", "np", ".", "radians", "(", "rup", ".", "dip", ")", ")", "**", "2", ")", "/", "C", "[", "'c9a'", "]", ")", "*", "(", "1", "-", "np", ".", "sqrt", "(", "dists", ".", "rjb", "**", "2", "+", "rup", ".", "ztor", "**", "2", ")", "/", "(", "dists", ".", "rrup", "+", "0.001", ")", ")", ")", "return", "ln_y_ref" ]
Get an intensity on a reference soil. Implements eq. 13a.
[ "Get", "an", "intensity", "on", "a", "reference", "soil", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/chiou_youngs_2008.py#L169-L215
gem/oq-engine
openquake/hazardlib/scalerel/leonard2014.py
Leonard2014_SCR.get_median_area
def get_median_area(self, mag, rake): """ Calculates median fault area from magnitude. """ if rake is None: # Return average of strike-slip and dip-slip curves return power(10.0, (mag - 4.185)) elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135): # strike-slip return power(10.0, (mag - 4.18)) else: # Dip-slip (thrust or normal), and undefined rake return power(10.0, (mag - 4.19))
python
def get_median_area(self, mag, rake): if rake is None: return power(10.0, (mag - 4.185)) elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135): return power(10.0, (mag - 4.18)) else: return power(10.0, (mag - 4.19))
[ "def", "get_median_area", "(", "self", ",", "mag", ",", "rake", ")", ":", "if", "rake", "is", "None", ":", "# Return average of strike-slip and dip-slip curves", "return", "power", "(", "10.0", ",", "(", "mag", "-", "4.185", ")", ")", "elif", "(", "-", "45", "<=", "rake", "<=", "45", ")", "or", "(", "rake", ">=", "135", ")", "or", "(", "rake", "<=", "-", "135", ")", ":", "# strike-slip", "return", "power", "(", "10.0", ",", "(", "mag", "-", "4.18", ")", ")", "else", ":", "# Dip-slip (thrust or normal), and undefined rake", "return", "power", "(", "10.0", ",", "(", "mag", "-", "4.19", ")", ")" ]
Calculates median fault area from magnitude.
[ "Calculates", "median", "fault", "area", "from", "magnitude", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/leonard2014.py#L36-L48
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_1995.py
AtkinsonBoore1995GSCBest.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ C = self.COEFFS[imt] # clip rhypo at 10 (this is the minimum distance used in # deriving the equation), see page 22, this avoids singularity # in mean value equation rhypo = dists.rhypo.copy() rhypo[rhypo < 10] = 10 # convert magnitude from Mblg to Mw mag = rup.mag * 0.98 - 0.39 if rup.mag <= 5.5 else \ 2.715 - 0.277 * rup.mag + 0.127 * rup.mag * rup.mag # functional form as explained in 'Youngs_fit_to_AB95lookup.doc' f1 = np.minimum(np.log(rhypo), np.log(70.)) f2 = np.maximum(np.log(rhypo / 130.), 0) mean = ( C['c1'] + C['c2'] * mag + C['c3'] * mag ** 2 + (C['c4'] + C['c5'] * mag) * f1 + (C['c6'] + C['c7'] * mag) * f2 + C['c8'] * rhypo ) stddevs = self._get_stddevs(stddev_types, dists.rhypo.shape[0]) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] rhypo = dists.rhypo.copy() rhypo[rhypo < 10] = 10 mag = rup.mag * 0.98 - 0.39 if rup.mag <= 5.5 else \ 2.715 - 0.277 * rup.mag + 0.127 * rup.mag * rup.mag f1 = np.minimum(np.log(rhypo), np.log(70.)) f2 = np.maximum(np.log(rhypo / 130.), 0) mean = ( C['c1'] + C['c2'] * mag + C['c3'] * mag ** 2 + (C['c4'] + C['c5'] * mag) * f1 + (C['c6'] + C['c7'] * mag) * f2 + C['c8'] * rhypo ) stddevs = self._get_stddevs(stddev_types, dists.rhypo.shape[0]) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "# clip rhypo at 10 (this is the minimum distance used in", "# deriving the equation), see page 22, this avoids singularity", "# in mean value equation", "rhypo", "=", "dists", ".", "rhypo", ".", "copy", "(", ")", "rhypo", "[", "rhypo", "<", "10", "]", "=", "10", "# convert magnitude from Mblg to Mw", "mag", "=", "rup", ".", "mag", "*", "0.98", "-", "0.39", "if", "rup", ".", "mag", "<=", "5.5", "else", "2.715", "-", "0.277", "*", "rup", ".", "mag", "+", "0.127", "*", "rup", ".", "mag", "*", "rup", ".", "mag", "# functional form as explained in 'Youngs_fit_to_AB95lookup.doc'", "f1", "=", "np", ".", "minimum", "(", "np", ".", "log", "(", "rhypo", ")", ",", "np", ".", "log", "(", "70.", ")", ")", "f2", "=", "np", ".", "maximum", "(", "np", ".", "log", "(", "rhypo", "/", "130.", ")", ",", "0", ")", "mean", "=", "(", "C", "[", "'c1'", "]", "+", "C", "[", "'c2'", "]", "*", "mag", "+", "C", "[", "'c3'", "]", "*", "mag", "**", "2", "+", "(", "C", "[", "'c4'", "]", "+", "C", "[", "'c5'", "]", "*", "mag", ")", "*", "f1", "+", "(", "C", "[", "'c6'", "]", "+", "C", "[", "'c7'", "]", "*", "mag", ")", "*", "f2", "+", "C", "[", "'c8'", "]", "*", "rhypo", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "stddev_types", ",", "dists", ".", "rhypo", ".", "shape", "[", "0", "]", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_1995.py#L75-L105
gem/oq-engine
openquake/server/views.py
_get_base_url
def _get_base_url(request): """ Construct a base URL, given a request object. This comprises the protocol prefix (http:// or https://) and the host, which can include the port number. For example: http://www.openquake.org or https://www.openquake.org:8000. """ if request.is_secure(): base_url = 'https://%s' else: base_url = 'http://%s' base_url %= request.META['HTTP_HOST'] return base_url
python
def _get_base_url(request): if request.is_secure(): base_url = 'https://%s' else: base_url = 'http://%s' base_url %= request.META['HTTP_HOST'] return base_url
[ "def", "_get_base_url", "(", "request", ")", ":", "if", "request", ".", "is_secure", "(", ")", ":", "base_url", "=", "'https://%s'", "else", ":", "base_url", "=", "'http://%s'", "base_url", "%=", "request", ".", "META", "[", "'HTTP_HOST'", "]", "return", "base_url" ]
Construct a base URL, given a request object. This comprises the protocol prefix (http:// or https://) and the host, which can include the port number. For example: http://www.openquake.org or https://www.openquake.org:8000.
[ "Construct", "a", "base", "URL", "given", "a", "request", "object", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L108-L121
gem/oq-engine
openquake/server/views.py
_prepare_job
def _prepare_job(request, candidates): """ Creates a temporary directory, move uploaded files there and select the job file by looking at the candidate names. :returns: full path of the job_file """ temp_dir = tempfile.mkdtemp() inifiles = [] arch = request.FILES.get('archive') if arch is None: # move each file to a new temp dir, using the upload file names, # not the temporary ones for each_file in request.FILES.values(): new_path = os.path.join(temp_dir, each_file.name) shutil.move(each_file.temporary_file_path(), new_path) if each_file.name in candidates: inifiles.append(new_path) return inifiles # else extract the files from the archive into temp_dir return readinput.extract_from_zip(arch, candidates)
python
def _prepare_job(request, candidates): temp_dir = tempfile.mkdtemp() inifiles = [] arch = request.FILES.get('archive') if arch is None: for each_file in request.FILES.values(): new_path = os.path.join(temp_dir, each_file.name) shutil.move(each_file.temporary_file_path(), new_path) if each_file.name in candidates: inifiles.append(new_path) return inifiles return readinput.extract_from_zip(arch, candidates)
[ "def", "_prepare_job", "(", "request", ",", "candidates", ")", ":", "temp_dir", "=", "tempfile", ".", "mkdtemp", "(", ")", "inifiles", "=", "[", "]", "arch", "=", "request", ".", "FILES", ".", "get", "(", "'archive'", ")", "if", "arch", "is", "None", ":", "# move each file to a new temp dir, using the upload file names,", "# not the temporary ones", "for", "each_file", "in", "request", ".", "FILES", ".", "values", "(", ")", ":", "new_path", "=", "os", ".", "path", ".", "join", "(", "temp_dir", ",", "each_file", ".", "name", ")", "shutil", ".", "move", "(", "each_file", ".", "temporary_file_path", "(", ")", ",", "new_path", ")", "if", "each_file", ".", "name", "in", "candidates", ":", "inifiles", ".", "append", "(", "new_path", ")", "return", "inifiles", "# else extract the files from the archive into temp_dir", "return", "readinput", ".", "extract_from_zip", "(", "arch", ",", "candidates", ")" ]
Creates a temporary directory, move uploaded files there and select the job file by looking at the candidate names. :returns: full path of the job_file
[ "Creates", "a", "temporary", "directory", "move", "uploaded", "files", "there", "and", "select", "the", "job", "file", "by", "looking", "at", "the", "candidate", "names", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L124-L144
gem/oq-engine
openquake/server/views.py
ajax_login
def ajax_login(request): """ Accept a POST request to login. :param request: `django.http.HttpRequest` object, containing mandatory parameters username and password required. """ username = request.POST['username'] password = request.POST['password'] user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponse(content='Successful login', content_type='text/plain', status=200) else: return HttpResponse(content='Disabled account', content_type='text/plain', status=403) else: return HttpResponse(content='Invalid login', content_type='text/plain', status=403)
python
def ajax_login(request): username = request.POST['username'] password = request.POST['password'] user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponse(content='Successful login', content_type='text/plain', status=200) else: return HttpResponse(content='Disabled account', content_type='text/plain', status=403) else: return HttpResponse(content='Invalid login', content_type='text/plain', status=403)
[ "def", "ajax_login", "(", "request", ")", ":", "username", "=", "request", ".", "POST", "[", "'username'", "]", "password", "=", "request", ".", "POST", "[", "'password'", "]", "user", "=", "authenticate", "(", "username", "=", "username", ",", "password", "=", "password", ")", "if", "user", "is", "not", "None", ":", "if", "user", ".", "is_active", ":", "login", "(", "request", ",", "user", ")", "return", "HttpResponse", "(", "content", "=", "'Successful login'", ",", "content_type", "=", "'text/plain'", ",", "status", "=", "200", ")", "else", ":", "return", "HttpResponse", "(", "content", "=", "'Disabled account'", ",", "content_type", "=", "'text/plain'", ",", "status", "=", "403", ")", "else", ":", "return", "HttpResponse", "(", "content", "=", "'Invalid login'", ",", "content_type", "=", "'text/plain'", ",", "status", "=", "403", ")" ]
Accept a POST request to login. :param request: `django.http.HttpRequest` object, containing mandatory parameters username and password required.
[ "Accept", "a", "POST", "request", "to", "login", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L150-L171
gem/oq-engine
openquake/server/views.py
get_available_gsims
def get_available_gsims(request): """ Return a list of strings with the available GSIMs """ gsims = list(gsim.get_available_gsims()) return HttpResponse(content=json.dumps(gsims), content_type=JSON)
python
def get_available_gsims(request): gsims = list(gsim.get_available_gsims()) return HttpResponse(content=json.dumps(gsims), content_type=JSON)
[ "def", "get_available_gsims", "(", "request", ")", ":", "gsims", "=", "list", "(", "gsim", ".", "get_available_gsims", "(", ")", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "gsims", ")", ",", "content_type", "=", "JSON", ")" ]
Return a list of strings with the available GSIMs
[ "Return", "a", "list", "of", "strings", "with", "the", "available", "GSIMs" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L207-L212
gem/oq-engine
openquake/server/views.py
validate_nrml
def validate_nrml(request): """ Leverage oq-risklib to check if a given XML text is a valid NRML :param request: a `django.http.HttpRequest` object containing the mandatory parameter 'xml_text': the text of the XML to be validated as NRML :returns: a JSON object, containing: * 'valid': a boolean indicating if the provided text is a valid NRML * 'error_msg': the error message, if any error was found (None otherwise) * 'error_line': line of the given XML where the error was found (None if no error was found or if it was not a validation error) """ xml_text = request.POST.get('xml_text') if not xml_text: return HttpResponseBadRequest( 'Please provide the "xml_text" parameter') xml_file = gettemp(xml_text, suffix='.xml') try: nrml.to_python(xml_file) except ExpatError as exc: return _make_response(error_msg=str(exc), error_line=exc.lineno, valid=False) except Exception as exc: # get the exception message exc_msg = exc.args[0] if isinstance(exc_msg, bytes): exc_msg = exc_msg.decode('utf-8') # make it a unicode object elif isinstance(exc_msg, str): pass else: # if it is another kind of object, it is not obvious a priori how # to extract the error line from it return _make_response( error_msg=str(exc_msg), error_line=None, valid=False) # if the line is not mentioned, the whole message is taken error_msg = exc_msg.split(', line')[0] # check if the exc_msg contains a line number indication search_match = re.search(r'line \d+', exc_msg) if search_match: error_line = int(search_match.group(0).split()[1]) else: error_line = None return _make_response( error_msg=error_msg, error_line=error_line, valid=False) else: return _make_response(error_msg=None, error_line=None, valid=True)
python
def validate_nrml(request): xml_text = request.POST.get('xml_text') if not xml_text: return HttpResponseBadRequest( 'Please provide the "xml_text" parameter') xml_file = gettemp(xml_text, suffix='.xml') try: nrml.to_python(xml_file) except ExpatError as exc: return _make_response(error_msg=str(exc), error_line=exc.lineno, valid=False) except Exception as exc: exc_msg = exc.args[0] if isinstance(exc_msg, bytes): exc_msg = exc_msg.decode('utf-8') elif isinstance(exc_msg, str): pass else: return _make_response( error_msg=str(exc_msg), error_line=None, valid=False) error_msg = exc_msg.split(', line')[0] search_match = re.search(r'line \d+', exc_msg) if search_match: error_line = int(search_match.group(0).split()[1]) else: error_line = None return _make_response( error_msg=error_msg, error_line=error_line, valid=False) else: return _make_response(error_msg=None, error_line=None, valid=True)
[ "def", "validate_nrml", "(", "request", ")", ":", "xml_text", "=", "request", ".", "POST", ".", "get", "(", "'xml_text'", ")", "if", "not", "xml_text", ":", "return", "HttpResponseBadRequest", "(", "'Please provide the \"xml_text\" parameter'", ")", "xml_file", "=", "gettemp", "(", "xml_text", ",", "suffix", "=", "'.xml'", ")", "try", ":", "nrml", ".", "to_python", "(", "xml_file", ")", "except", "ExpatError", "as", "exc", ":", "return", "_make_response", "(", "error_msg", "=", "str", "(", "exc", ")", ",", "error_line", "=", "exc", ".", "lineno", ",", "valid", "=", "False", ")", "except", "Exception", "as", "exc", ":", "# get the exception message", "exc_msg", "=", "exc", ".", "args", "[", "0", "]", "if", "isinstance", "(", "exc_msg", ",", "bytes", ")", ":", "exc_msg", "=", "exc_msg", ".", "decode", "(", "'utf-8'", ")", "# make it a unicode object", "elif", "isinstance", "(", "exc_msg", ",", "str", ")", ":", "pass", "else", ":", "# if it is another kind of object, it is not obvious a priori how", "# to extract the error line from it", "return", "_make_response", "(", "error_msg", "=", "str", "(", "exc_msg", ")", ",", "error_line", "=", "None", ",", "valid", "=", "False", ")", "# if the line is not mentioned, the whole message is taken", "error_msg", "=", "exc_msg", ".", "split", "(", "', line'", ")", "[", "0", "]", "# check if the exc_msg contains a line number indication", "search_match", "=", "re", ".", "search", "(", "r'line \\d+'", ",", "exc_msg", ")", "if", "search_match", ":", "error_line", "=", "int", "(", "search_match", ".", "group", "(", "0", ")", ".", "split", "(", ")", "[", "1", "]", ")", "else", ":", "error_line", "=", "None", "return", "_make_response", "(", "error_msg", "=", "error_msg", ",", "error_line", "=", "error_line", ",", "valid", "=", "False", ")", "else", ":", "return", "_make_response", "(", "error_msg", "=", "None", ",", "error_line", "=", "None", ",", "valid", "=", "True", ")" ]
Leverage oq-risklib to check if a given XML text is a valid NRML :param request: a `django.http.HttpRequest` object containing the mandatory parameter 'xml_text': the text of the XML to be validated as NRML :returns: a JSON object, containing: * 'valid': a boolean indicating if the provided text is a valid NRML * 'error_msg': the error message, if any error was found (None otherwise) * 'error_line': line of the given XML where the error was found (None if no error was found or if it was not a validation error)
[ "Leverage", "oq", "-", "risklib", "to", "check", "if", "a", "given", "XML", "text", "is", "a", "valid", "NRML" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L226-L276
gem/oq-engine
openquake/server/views.py
calc
def calc(request, calc_id): """ Get a JSON blob containing all of parameters for the given calculation (specified by ``calc_id``). Also includes the current job status ( executing, complete, etc.). """ try: info = logs.dbcmd('calc_info', calc_id) if not utils.user_has_permission(request, info['user_name']): return HttpResponseForbidden() except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(info), content_type=JSON)
python
def calc(request, calc_id): try: info = logs.dbcmd('calc_info', calc_id) if not utils.user_has_permission(request, info['user_name']): return HttpResponseForbidden() except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(info), content_type=JSON)
[ "def", "calc", "(", "request", ",", "calc_id", ")", ":", "try", ":", "info", "=", "logs", ".", "dbcmd", "(", "'calc_info'", ",", "calc_id", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "info", "[", "'user_name'", "]", ")", ":", "return", "HttpResponseForbidden", "(", ")", "except", "dbapi", ".", "NotFound", ":", "return", "HttpResponseNotFound", "(", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "info", ")", ",", "content_type", "=", "JSON", ")" ]
Get a JSON blob containing all of parameters for the given calculation (specified by ``calc_id``). Also includes the current job status ( executing, complete, etc.).
[ "Get", "a", "JSON", "blob", "containing", "all", "of", "parameters", "for", "the", "given", "calculation", "(", "specified", "by", "calc_id", ")", ".", "Also", "includes", "the", "current", "job", "status", "(", "executing", "complete", "etc", ".", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L281-L293
gem/oq-engine
openquake/server/views.py
calc_list
def calc_list(request, id=None): # view associated to the endpoints /v1/calc/list and /v1/calc/:id/status """ Get a list of calculations and report their id, status, calculation_mode, is_running, description, and a url where more detailed information can be accessed. This is called several times by the Javascript. Responses are in JSON. """ base_url = _get_base_url(request) calc_data = logs.dbcmd('get_calcs', request.GET, utils.get_valid_users(request), utils.get_acl_on(request), id) response_data = [] username = psutil.Process(os.getpid()).username() for (hc_id, owner, status, calculation_mode, is_running, desc, pid, parent_id, size_mb) in calc_data: url = urlparse.urljoin(base_url, 'v1/calc/%d' % hc_id) abortable = False if is_running: try: if psutil.Process(pid).username() == username: abortable = True except psutil.NoSuchProcess: pass response_data.append( dict(id=hc_id, owner=owner, calculation_mode=calculation_mode, status=status, is_running=bool(is_running), description=desc, url=url, parent_id=parent_id, abortable=abortable, size_mb=size_mb)) # if id is specified the related dictionary is returned instead the list if id is not None: [response_data] = response_data return HttpResponse(content=json.dumps(response_data), content_type=JSON)
python
def calc_list(request, id=None): base_url = _get_base_url(request) calc_data = logs.dbcmd('get_calcs', request.GET, utils.get_valid_users(request), utils.get_acl_on(request), id) response_data = [] username = psutil.Process(os.getpid()).username() for (hc_id, owner, status, calculation_mode, is_running, desc, pid, parent_id, size_mb) in calc_data: url = urlparse.urljoin(base_url, 'v1/calc/%d' % hc_id) abortable = False if is_running: try: if psutil.Process(pid).username() == username: abortable = True except psutil.NoSuchProcess: pass response_data.append( dict(id=hc_id, owner=owner, calculation_mode=calculation_mode, status=status, is_running=bool(is_running), description=desc, url=url, parent_id=parent_id, abortable=abortable, size_mb=size_mb)) if id is not None: [response_data] = response_data return HttpResponse(content=json.dumps(response_data), content_type=JSON)
[ "def", "calc_list", "(", "request", ",", "id", "=", "None", ")", ":", "# view associated to the endpoints /v1/calc/list and /v1/calc/:id/status", "base_url", "=", "_get_base_url", "(", "request", ")", "calc_data", "=", "logs", ".", "dbcmd", "(", "'get_calcs'", ",", "request", ".", "GET", ",", "utils", ".", "get_valid_users", "(", "request", ")", ",", "utils", ".", "get_acl_on", "(", "request", ")", ",", "id", ")", "response_data", "=", "[", "]", "username", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", ".", "username", "(", ")", "for", "(", "hc_id", ",", "owner", ",", "status", ",", "calculation_mode", ",", "is_running", ",", "desc", ",", "pid", ",", "parent_id", ",", "size_mb", ")", "in", "calc_data", ":", "url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "'v1/calc/%d'", "%", "hc_id", ")", "abortable", "=", "False", "if", "is_running", ":", "try", ":", "if", "psutil", ".", "Process", "(", "pid", ")", ".", "username", "(", ")", "==", "username", ":", "abortable", "=", "True", "except", "psutil", ".", "NoSuchProcess", ":", "pass", "response_data", ".", "append", "(", "dict", "(", "id", "=", "hc_id", ",", "owner", "=", "owner", ",", "calculation_mode", "=", "calculation_mode", ",", "status", "=", "status", ",", "is_running", "=", "bool", "(", "is_running", ")", ",", "description", "=", "desc", ",", "url", "=", "url", ",", "parent_id", "=", "parent_id", ",", "abortable", "=", "abortable", ",", "size_mb", "=", "size_mb", ")", ")", "# if id is specified the related dictionary is returned instead the list", "if", "id", "is", "not", "None", ":", "[", "response_data", "]", "=", "response_data", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "response_data", ")", ",", "content_type", "=", "JSON", ")" ]
Get a list of calculations and report their id, status, calculation_mode, is_running, description, and a url where more detailed information can be accessed. This is called several times by the Javascript. Responses are in JSON.
[ "Get", "a", "list", "of", "calculations", "and", "report", "their", "id", "status", "calculation_mode", "is_running", "description", "and", "a", "url", "where", "more", "detailed", "information", "can", "be", "accessed", ".", "This", "is", "called", "several", "times", "by", "the", "Javascript", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L298-L335
gem/oq-engine
openquake/server/views.py
calc_abort
def calc_abort(request, calc_id): """ Abort the given calculation, it is it running """ job = logs.dbcmd('get_job', calc_id) if job is None: message = {'error': 'Unknown job %s' % calc_id} return HttpResponse(content=json.dumps(message), content_type=JSON) if job.status not in ('submitted', 'executing'): message = {'error': 'Job %s is not running' % job.id} return HttpResponse(content=json.dumps(message), content_type=JSON) if not utils.user_has_permission(request, job.user_name): message = {'error': ('User %s has no permission to abort job %s' % (job.user_name, job.id))} return HttpResponse(content=json.dumps(message), content_type=JSON, status=403) if job.pid: # is a spawned job try: os.kill(job.pid, signal.SIGTERM) except Exception as exc: logging.error(exc) else: logging.warning('Aborting job %d, pid=%d', job.id, job.pid) logs.dbcmd('set_status', job.id, 'aborted') message = {'success': 'Killing job %d' % job.id} return HttpResponse(content=json.dumps(message), content_type=JSON) message = {'error': 'PID for job %s not found' % job.id} return HttpResponse(content=json.dumps(message), content_type=JSON)
python
def calc_abort(request, calc_id): job = logs.dbcmd('get_job', calc_id) if job is None: message = {'error': 'Unknown job %s' % calc_id} return HttpResponse(content=json.dumps(message), content_type=JSON) if job.status not in ('submitted', 'executing'): message = {'error': 'Job %s is not running' % job.id} return HttpResponse(content=json.dumps(message), content_type=JSON) if not utils.user_has_permission(request, job.user_name): message = {'error': ('User %s has no permission to abort job %s' % (job.user_name, job.id))} return HttpResponse(content=json.dumps(message), content_type=JSON, status=403) if job.pid: try: os.kill(job.pid, signal.SIGTERM) except Exception as exc: logging.error(exc) else: logging.warning('Aborting job %d, pid=%d', job.id, job.pid) logs.dbcmd('set_status', job.id, 'aborted') message = {'success': 'Killing job %d' % job.id} return HttpResponse(content=json.dumps(message), content_type=JSON) message = {'error': 'PID for job %s not found' % job.id} return HttpResponse(content=json.dumps(message), content_type=JSON)
[ "def", "calc_abort", "(", "request", ",", "calc_id", ")", ":", "job", "=", "logs", ".", "dbcmd", "(", "'get_job'", ",", "calc_id", ")", "if", "job", "is", "None", ":", "message", "=", "{", "'error'", ":", "'Unknown job %s'", "%", "calc_id", "}", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ")", "if", "job", ".", "status", "not", "in", "(", "'submitted'", ",", "'executing'", ")", ":", "message", "=", "{", "'error'", ":", "'Job %s is not running'", "%", "job", ".", "id", "}", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "job", ".", "user_name", ")", ":", "message", "=", "{", "'error'", ":", "(", "'User %s has no permission to abort job %s'", "%", "(", "job", ".", "user_name", ",", "job", ".", "id", ")", ")", "}", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "403", ")", "if", "job", ".", "pid", ":", "# is a spawned job", "try", ":", "os", ".", "kill", "(", "job", ".", "pid", ",", "signal", ".", "SIGTERM", ")", "except", "Exception", "as", "exc", ":", "logging", ".", "error", "(", "exc", ")", "else", ":", "logging", ".", "warning", "(", "'Aborting job %d, pid=%d'", ",", "job", ".", "id", ",", "job", ".", "pid", ")", "logs", ".", "dbcmd", "(", "'set_status'", ",", "job", ".", "id", ",", "'aborted'", ")", "message", "=", "{", "'success'", ":", "'Killing job %d'", "%", "job", ".", "id", "}", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ")", "message", "=", "{", "'error'", ":", "'PID for job %s not found'", "%", "job", ".", "id", "}", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ")" ]
Abort the given calculation, it is it running
[ "Abort", "the", "given", "calculation", "it", "is", "it", "running" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L341-L372
gem/oq-engine
openquake/server/views.py
calc_remove
def calc_remove(request, calc_id): """ Remove the calculation id """ # Only the owner can remove a job user = utils.get_user(request) try: message = logs.dbcmd('del_calc', calc_id, user) except dbapi.NotFound: return HttpResponseNotFound() if 'success' in message: return HttpResponse(content=json.dumps(message), content_type=JSON, status=200) elif 'error' in message: logging.error(message['error']) return HttpResponse(content=json.dumps(message), content_type=JSON, status=403) else: # This is an untrapped server error logging.error(message) return HttpResponse(content=message, content_type='text/plain', status=500)
python
def calc_remove(request, calc_id): user = utils.get_user(request) try: message = logs.dbcmd('del_calc', calc_id, user) except dbapi.NotFound: return HttpResponseNotFound() if 'success' in message: return HttpResponse(content=json.dumps(message), content_type=JSON, status=200) elif 'error' in message: logging.error(message['error']) return HttpResponse(content=json.dumps(message), content_type=JSON, status=403) else: logging.error(message) return HttpResponse(content=message, content_type='text/plain', status=500)
[ "def", "calc_remove", "(", "request", ",", "calc_id", ")", ":", "# Only the owner can remove a job", "user", "=", "utils", ".", "get_user", "(", "request", ")", "try", ":", "message", "=", "logs", ".", "dbcmd", "(", "'del_calc'", ",", "calc_id", ",", "user", ")", "except", "dbapi", ".", "NotFound", ":", "return", "HttpResponseNotFound", "(", ")", "if", "'success'", "in", "message", ":", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "200", ")", "elif", "'error'", "in", "message", ":", "logging", ".", "error", "(", "message", "[", "'error'", "]", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "message", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "403", ")", "else", ":", "# This is an untrapped server error", "logging", ".", "error", "(", "message", ")", "return", "HttpResponse", "(", "content", "=", "message", ",", "content_type", "=", "'text/plain'", ",", "status", "=", "500", ")" ]
Remove the calculation id
[ "Remove", "the", "calculation", "id" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L378-L400
gem/oq-engine
openquake/server/views.py
log_to_json
def log_to_json(log): """Convert a log record into a list of strings""" return [log.timestamp.isoformat()[:22], log.level, log.process, log.message]
python
def log_to_json(log): return [log.timestamp.isoformat()[:22], log.level, log.process, log.message]
[ "def", "log_to_json", "(", "log", ")", ":", "return", "[", "log", ".", "timestamp", ".", "isoformat", "(", ")", "[", ":", "22", "]", ",", "log", ".", "level", ",", "log", ".", "process", ",", "log", ".", "message", "]" ]
Convert a log record into a list of strings
[ "Convert", "a", "log", "record", "into", "a", "list", "of", "strings" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L403-L406
gem/oq-engine
openquake/server/views.py
calc_log
def calc_log(request, calc_id, start, stop): """ Get a slice of the calculation log as a JSON list of rows """ start = start or 0 stop = stop or 0 try: response_data = logs.dbcmd('get_log_slice', calc_id, start, stop) except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
python
def calc_log(request, calc_id, start, stop): start = start or 0 stop = stop or 0 try: response_data = logs.dbcmd('get_log_slice', calc_id, start, stop) except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
[ "def", "calc_log", "(", "request", ",", "calc_id", ",", "start", ",", "stop", ")", ":", "start", "=", "start", "or", "0", "stop", "=", "stop", "or", "0", "try", ":", "response_data", "=", "logs", ".", "dbcmd", "(", "'get_log_slice'", ",", "calc_id", ",", "start", ",", "stop", ")", "except", "dbapi", ".", "NotFound", ":", "return", "HttpResponseNotFound", "(", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "response_data", ")", ",", "content_type", "=", "JSON", ")" ]
Get a slice of the calculation log as a JSON list of rows
[ "Get", "a", "slice", "of", "the", "calculation", "log", "as", "a", "JSON", "list", "of", "rows" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L411-L421
gem/oq-engine
openquake/server/views.py
calc_log_size
def calc_log_size(request, calc_id): """ Get the current number of lines in the log """ try: response_data = logs.dbcmd('get_log_size', calc_id) except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
python
def calc_log_size(request, calc_id): try: response_data = logs.dbcmd('get_log_size', calc_id) except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
[ "def", "calc_log_size", "(", "request", ",", "calc_id", ")", ":", "try", ":", "response_data", "=", "logs", ".", "dbcmd", "(", "'get_log_size'", ",", "calc_id", ")", "except", "dbapi", ".", "NotFound", ":", "return", "HttpResponseNotFound", "(", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "response_data", ")", ",", "content_type", "=", "JSON", ")" ]
Get the current number of lines in the log
[ "Get", "the", "current", "number", "of", "lines", "in", "the", "log" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L426-L434
gem/oq-engine
openquake/server/views.py
calc_run
def calc_run(request): """ Run a calculation. :param request: a `django.http.HttpRequest` object. If the request has the attribute `hazard_job_id`, the results of the specified hazard calculations will be re-used as input by the risk calculation. The request also needs to contain the files needed to perform the calculation. They can be uploaded as separate files, or zipped together. """ hazard_job_id = request.POST.get('hazard_job_id') if hazard_job_id: hazard_job_id = int(hazard_job_id) candidates = ("job_risk.ini", "job.ini") else: candidates = ("job_hazard.ini", "job_haz.ini", "job.ini") result = safely_call(_prepare_job, (request, candidates)) if result.tb_str: return HttpResponse(json.dumps(result.tb_str.splitlines()), content_type=JSON, status=500) inifiles = result.get() if not inifiles: msg = 'Could not find any file of the form %s' % str(candidates) logging.error(msg) return HttpResponse(content=json.dumps([msg]), content_type=JSON, status=500) user = utils.get_user(request) try: job_id, pid = submit_job(inifiles[0], user, hazard_job_id) except Exception as exc: # no job created, for instance missing .xml file # get the exception message exc_msg = str(exc) logging.error(exc_msg) response_data = exc_msg.splitlines() status = 500 else: response_data = dict(job_id=job_id, status='created', pid=pid) status = 200 return HttpResponse(content=json.dumps(response_data), content_type=JSON, status=status)
python
def calc_run(request): hazard_job_id = request.POST.get('hazard_job_id') if hazard_job_id: hazard_job_id = int(hazard_job_id) candidates = ("job_risk.ini", "job.ini") else: candidates = ("job_hazard.ini", "job_haz.ini", "job.ini") result = safely_call(_prepare_job, (request, candidates)) if result.tb_str: return HttpResponse(json.dumps(result.tb_str.splitlines()), content_type=JSON, status=500) inifiles = result.get() if not inifiles: msg = 'Could not find any file of the form %s' % str(candidates) logging.error(msg) return HttpResponse(content=json.dumps([msg]), content_type=JSON, status=500) user = utils.get_user(request) try: job_id, pid = submit_job(inifiles[0], user, hazard_job_id) except Exception as exc: exc_msg = str(exc) logging.error(exc_msg) response_data = exc_msg.splitlines() status = 500 else: response_data = dict(job_id=job_id, status='created', pid=pid) status = 200 return HttpResponse(content=json.dumps(response_data), content_type=JSON, status=status)
[ "def", "calc_run", "(", "request", ")", ":", "hazard_job_id", "=", "request", ".", "POST", ".", "get", "(", "'hazard_job_id'", ")", "if", "hazard_job_id", ":", "hazard_job_id", "=", "int", "(", "hazard_job_id", ")", "candidates", "=", "(", "\"job_risk.ini\"", ",", "\"job.ini\"", ")", "else", ":", "candidates", "=", "(", "\"job_hazard.ini\"", ",", "\"job_haz.ini\"", ",", "\"job.ini\"", ")", "result", "=", "safely_call", "(", "_prepare_job", ",", "(", "request", ",", "candidates", ")", ")", "if", "result", ".", "tb_str", ":", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "result", ".", "tb_str", ".", "splitlines", "(", ")", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "500", ")", "inifiles", "=", "result", ".", "get", "(", ")", "if", "not", "inifiles", ":", "msg", "=", "'Could not find any file of the form %s'", "%", "str", "(", "candidates", ")", "logging", ".", "error", "(", "msg", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "[", "msg", "]", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "500", ")", "user", "=", "utils", ".", "get_user", "(", "request", ")", "try", ":", "job_id", ",", "pid", "=", "submit_job", "(", "inifiles", "[", "0", "]", ",", "user", ",", "hazard_job_id", ")", "except", "Exception", "as", "exc", ":", "# no job created, for instance missing .xml file", "# get the exception message", "exc_msg", "=", "str", "(", "exc", ")", "logging", ".", "error", "(", "exc_msg", ")", "response_data", "=", "exc_msg", ".", "splitlines", "(", ")", "status", "=", "500", "else", ":", "response_data", "=", "dict", "(", "job_id", "=", "job_id", ",", "status", "=", "'created'", ",", "pid", "=", "pid", ")", "status", "=", "200", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "response_data", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "status", ")" ]
Run a calculation. :param request: a `django.http.HttpRequest` object. If the request has the attribute `hazard_job_id`, the results of the specified hazard calculations will be re-used as input by the risk calculation. The request also needs to contain the files needed to perform the calculation. They can be uploaded as separate files, or zipped together.
[ "Run", "a", "calculation", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L440-L484
gem/oq-engine
openquake/server/views.py
submit_job
def submit_job(job_ini, username, hazard_job_id=None): """ Create a job object from the given job.ini file in the job directory and run it in a new process. Returns the job ID and PID. """ job_id = logs.init('job') oq = engine.job_from_file( job_ini, job_id, username, hazard_calculation_id=hazard_job_id) pik = pickle.dumps(oq, protocol=0) # human readable protocol code = RUNCALC % dict(job_id=job_id, hazard_job_id=hazard_job_id, pik=pik, username=username) tmp_py = gettemp(code, suffix='.py') # print(code, tmp_py) # useful when debugging devnull = subprocess.DEVNULL popen = subprocess.Popen([sys.executable, tmp_py], stdin=devnull, stdout=devnull, stderr=devnull) threading.Thread(target=popen.wait).start() logs.dbcmd('update_job', job_id, {'pid': popen.pid}) return job_id, popen.pid
python
def submit_job(job_ini, username, hazard_job_id=None): job_id = logs.init('job') oq = engine.job_from_file( job_ini, job_id, username, hazard_calculation_id=hazard_job_id) pik = pickle.dumps(oq, protocol=0) code = RUNCALC % dict(job_id=job_id, hazard_job_id=hazard_job_id, pik=pik, username=username) tmp_py = gettemp(code, suffix='.py') devnull = subprocess.DEVNULL popen = subprocess.Popen([sys.executable, tmp_py], stdin=devnull, stdout=devnull, stderr=devnull) threading.Thread(target=popen.wait).start() logs.dbcmd('update_job', job_id, {'pid': popen.pid}) return job_id, popen.pid
[ "def", "submit_job", "(", "job_ini", ",", "username", ",", "hazard_job_id", "=", "None", ")", ":", "job_id", "=", "logs", ".", "init", "(", "'job'", ")", "oq", "=", "engine", ".", "job_from_file", "(", "job_ini", ",", "job_id", ",", "username", ",", "hazard_calculation_id", "=", "hazard_job_id", ")", "pik", "=", "pickle", ".", "dumps", "(", "oq", ",", "protocol", "=", "0", ")", "# human readable protocol", "code", "=", "RUNCALC", "%", "dict", "(", "job_id", "=", "job_id", ",", "hazard_job_id", "=", "hazard_job_id", ",", "pik", "=", "pik", ",", "username", "=", "username", ")", "tmp_py", "=", "gettemp", "(", "code", ",", "suffix", "=", "'.py'", ")", "# print(code, tmp_py) # useful when debugging", "devnull", "=", "subprocess", ".", "DEVNULL", "popen", "=", "subprocess", ".", "Popen", "(", "[", "sys", ".", "executable", ",", "tmp_py", "]", ",", "stdin", "=", "devnull", ",", "stdout", "=", "devnull", ",", "stderr", "=", "devnull", ")", "threading", ".", "Thread", "(", "target", "=", "popen", ".", "wait", ")", ".", "start", "(", ")", "logs", ".", "dbcmd", "(", "'update_job'", ",", "job_id", ",", "{", "'pid'", ":", "popen", ".", "pid", "}", ")", "return", "job_id", ",", "popen", ".", "pid" ]
Create a job object from the given job.ini file in the job directory and run it in a new process. Returns the job ID and PID.
[ "Create", "a", "job", "object", "from", "the", "given", "job", ".", "ini", "file", "in", "the", "job", "directory", "and", "run", "it", "in", "a", "new", "process", ".", "Returns", "the", "job", "ID", "and", "PID", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L502-L520
gem/oq-engine
openquake/server/views.py
calc_results
def calc_results(request, calc_id): """ Get a summarized list of calculation results for a given ``calc_id``. Result is a JSON array of objects containing the following attributes: * id * name * type (hazard_curve, hazard_map, etc.) * url (the exact url where the full result can be accessed) """ # If the specified calculation doesn't exist OR is not yet complete, # throw back a 404. try: info = logs.dbcmd('calc_info', calc_id) if not utils.user_has_permission(request, info['user_name']): return HttpResponseForbidden() except dbapi.NotFound: return HttpResponseNotFound() base_url = _get_base_url(request) # NB: export_output has as keys the list (output_type, extension) # so this returns an ordered map output_type -> extensions such as # {'agg_loss_curve': ['xml', 'csv'], ...} output_types = groupby(export, lambda oe: oe[0], lambda oes: [e for o, e in oes]) results = logs.dbcmd('get_outputs', calc_id) if not results: return HttpResponseNotFound() response_data = [] for result in results: try: # output from the datastore rtype = result.ds_key # Catalina asked to remove the .txt outputs (used for the GMFs) outtypes = [ot for ot in output_types[rtype] if ot != 'txt'] except KeyError: continue # non-exportable outputs should not be shown url = urlparse.urljoin(base_url, 'v1/calc/result/%d' % result.id) datum = dict( id=result.id, name=result.display_name, type=rtype, outtypes=outtypes, url=url, size_mb=result.size_mb) response_data.append(datum) return HttpResponse(content=json.dumps(response_data))
python
def calc_results(request, calc_id): try: info = logs.dbcmd('calc_info', calc_id) if not utils.user_has_permission(request, info['user_name']): return HttpResponseForbidden() except dbapi.NotFound: return HttpResponseNotFound() base_url = _get_base_url(request) output_types = groupby(export, lambda oe: oe[0], lambda oes: [e for o, e in oes]) results = logs.dbcmd('get_outputs', calc_id) if not results: return HttpResponseNotFound() response_data = [] for result in results: try: rtype = result.ds_key outtypes = [ot for ot in output_types[rtype] if ot != 'txt'] except KeyError: continue url = urlparse.urljoin(base_url, 'v1/calc/result/%d' % result.id) datum = dict( id=result.id, name=result.display_name, type=rtype, outtypes=outtypes, url=url, size_mb=result.size_mb) response_data.append(datum) return HttpResponse(content=json.dumps(response_data))
[ "def", "calc_results", "(", "request", ",", "calc_id", ")", ":", "# If the specified calculation doesn't exist OR is not yet complete,", "# throw back a 404.", "try", ":", "info", "=", "logs", ".", "dbcmd", "(", "'calc_info'", ",", "calc_id", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "info", "[", "'user_name'", "]", ")", ":", "return", "HttpResponseForbidden", "(", ")", "except", "dbapi", ".", "NotFound", ":", "return", "HttpResponseNotFound", "(", ")", "base_url", "=", "_get_base_url", "(", "request", ")", "# NB: export_output has as keys the list (output_type, extension)", "# so this returns an ordered map output_type -> extensions such as", "# {'agg_loss_curve': ['xml', 'csv'], ...}", "output_types", "=", "groupby", "(", "export", ",", "lambda", "oe", ":", "oe", "[", "0", "]", ",", "lambda", "oes", ":", "[", "e", "for", "o", ",", "e", "in", "oes", "]", ")", "results", "=", "logs", ".", "dbcmd", "(", "'get_outputs'", ",", "calc_id", ")", "if", "not", "results", ":", "return", "HttpResponseNotFound", "(", ")", "response_data", "=", "[", "]", "for", "result", "in", "results", ":", "try", ":", "# output from the datastore", "rtype", "=", "result", ".", "ds_key", "# Catalina asked to remove the .txt outputs (used for the GMFs)", "outtypes", "=", "[", "ot", "for", "ot", "in", "output_types", "[", "rtype", "]", "if", "ot", "!=", "'txt'", "]", "except", "KeyError", ":", "continue", "# non-exportable outputs should not be shown", "url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "'v1/calc/result/%d'", "%", "result", ".", "id", ")", "datum", "=", "dict", "(", "id", "=", "result", ".", "id", ",", "name", "=", "result", ".", "display_name", ",", "type", "=", "rtype", ",", "outtypes", "=", "outtypes", ",", "url", "=", "url", ",", "size_mb", "=", "result", ".", "size_mb", ")", "response_data", ".", "append", "(", "datum", ")", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "response_data", ")", ")" ]
Get a summarized list of calculation results for a given ``calc_id``. Result is a JSON array of objects containing the following attributes: * id * name * type (hazard_curve, hazard_map, etc.) * url (the exact url where the full result can be accessed)
[ "Get", "a", "summarized", "list", "of", "calculation", "results", "for", "a", "given", "calc_id", ".", "Result", "is", "a", "JSON", "array", "of", "objects", "containing", "the", "following", "attributes", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L525-L568
gem/oq-engine
openquake/server/views.py
calc_result
def calc_result(request, result_id): """ Download a specific result, by ``result_id``. The common abstracted functionality for getting hazard or risk results. :param request: `django.http.HttpRequest` object. Can contain a `export_type` GET param (the default is 'xml' if no param is specified). :param result_id: The id of the requested artifact. :returns: If the requested ``result_id`` is not available in the format designated by the `export_type`. Otherwise, return a `django.http.HttpResponse` containing the content of the requested artifact. Parameters for the GET request can include an `export_type`, such as 'xml', 'geojson', 'csv', etc. """ # If the result for the requested ID doesn't exist, OR # the job which it is related too is not complete, # throw back a 404. try: job_id, job_status, job_user, datadir, ds_key = logs.dbcmd( 'get_result', result_id) if not utils.user_has_permission(request, job_user): return HttpResponseForbidden() except dbapi.NotFound: return HttpResponseNotFound() etype = request.GET.get('export_type') export_type = etype or DEFAULT_EXPORT_TYPE tmpdir = tempfile.mkdtemp() try: exported = core.export_from_db( (ds_key, export_type), job_id, datadir, tmpdir) except DataStoreExportError as exc: # TODO: there should be a better error page return HttpResponse(content='%s: %s' % (exc.__class__.__name__, exc), content_type='text/plain', status=500) if not exported: # Throw back a 404 if the exact export parameters are not supported return HttpResponseNotFound( 'Nothing to export for export_type=%s, %s' % (export_type, ds_key)) elif len(exported) > 1: # Building an archive so that there can be a single file download archname = ds_key + '-' + export_type + '.zip' zipfiles(exported, os.path.join(tmpdir, archname)) exported = os.path.join(tmpdir, archname) else: # single file exported = exported[0] content_type = EXPORT_CONTENT_TYPE_MAP.get( export_type, DEFAULT_CONTENT_TYPE) fname = 'output-%s-%s' % (result_id, os.path.basename(exported)) stream = FileWrapper(open(exported, 'rb')) # 'b' is needed on Windows stream.close = lambda: ( FileWrapper.close(stream), shutil.rmtree(tmpdir)) response = FileResponse(stream, content_type=content_type) response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(exported)) return response
python
def calc_result(request, result_id): try: job_id, job_status, job_user, datadir, ds_key = logs.dbcmd( 'get_result', result_id) if not utils.user_has_permission(request, job_user): return HttpResponseForbidden() except dbapi.NotFound: return HttpResponseNotFound() etype = request.GET.get('export_type') export_type = etype or DEFAULT_EXPORT_TYPE tmpdir = tempfile.mkdtemp() try: exported = core.export_from_db( (ds_key, export_type), job_id, datadir, tmpdir) except DataStoreExportError as exc: return HttpResponse(content='%s: %s' % (exc.__class__.__name__, exc), content_type='text/plain', status=500) if not exported: return HttpResponseNotFound( 'Nothing to export for export_type=%s, %s' % (export_type, ds_key)) elif len(exported) > 1: archname = ds_key + '-' + export_type + '.zip' zipfiles(exported, os.path.join(tmpdir, archname)) exported = os.path.join(tmpdir, archname) else: exported = exported[0] content_type = EXPORT_CONTENT_TYPE_MAP.get( export_type, DEFAULT_CONTENT_TYPE) fname = 'output-%s-%s' % (result_id, os.path.basename(exported)) stream = FileWrapper(open(exported, 'rb')) stream.close = lambda: ( FileWrapper.close(stream), shutil.rmtree(tmpdir)) response = FileResponse(stream, content_type=content_type) response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(exported)) return response
[ "def", "calc_result", "(", "request", ",", "result_id", ")", ":", "# If the result for the requested ID doesn't exist, OR", "# the job which it is related too is not complete,", "# throw back a 404.", "try", ":", "job_id", ",", "job_status", ",", "job_user", ",", "datadir", ",", "ds_key", "=", "logs", ".", "dbcmd", "(", "'get_result'", ",", "result_id", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "job_user", ")", ":", "return", "HttpResponseForbidden", "(", ")", "except", "dbapi", ".", "NotFound", ":", "return", "HttpResponseNotFound", "(", ")", "etype", "=", "request", ".", "GET", ".", "get", "(", "'export_type'", ")", "export_type", "=", "etype", "or", "DEFAULT_EXPORT_TYPE", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "try", ":", "exported", "=", "core", ".", "export_from_db", "(", "(", "ds_key", ",", "export_type", ")", ",", "job_id", ",", "datadir", ",", "tmpdir", ")", "except", "DataStoreExportError", "as", "exc", ":", "# TODO: there should be a better error page", "return", "HttpResponse", "(", "content", "=", "'%s: %s'", "%", "(", "exc", ".", "__class__", ".", "__name__", ",", "exc", ")", ",", "content_type", "=", "'text/plain'", ",", "status", "=", "500", ")", "if", "not", "exported", ":", "# Throw back a 404 if the exact export parameters are not supported", "return", "HttpResponseNotFound", "(", "'Nothing to export for export_type=%s, %s'", "%", "(", "export_type", ",", "ds_key", ")", ")", "elif", "len", "(", "exported", ")", ">", "1", ":", "# Building an archive so that there can be a single file download", "archname", "=", "ds_key", "+", "'-'", "+", "export_type", "+", "'.zip'", "zipfiles", "(", "exported", ",", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "archname", ")", ")", "exported", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "archname", ")", "else", ":", "# single file", "exported", "=", "exported", "[", "0", "]", "content_type", "=", "EXPORT_CONTENT_TYPE_MAP", ".", "get", "(", "export_type", ",", "DEFAULT_CONTENT_TYPE", ")", "fname", "=", "'output-%s-%s'", "%", "(", "result_id", ",", "os", ".", "path", ".", "basename", "(", "exported", ")", ")", "stream", "=", "FileWrapper", "(", "open", "(", "exported", ",", "'rb'", ")", ")", "# 'b' is needed on Windows", "stream", ".", "close", "=", "lambda", ":", "(", "FileWrapper", ".", "close", "(", "stream", ")", ",", "shutil", ".", "rmtree", "(", "tmpdir", ")", ")", "response", "=", "FileResponse", "(", "stream", ",", "content_type", "=", "content_type", ")", "response", "[", "'Content-Disposition'", "]", "=", "(", "'attachment; filename=%s'", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "response", "[", "'Content-Length'", "]", "=", "str", "(", "os", ".", "path", ".", "getsize", "(", "exported", ")", ")", "return", "response" ]
Download a specific result, by ``result_id``. The common abstracted functionality for getting hazard or risk results. :param request: `django.http.HttpRequest` object. Can contain a `export_type` GET param (the default is 'xml' if no param is specified). :param result_id: The id of the requested artifact. :returns: If the requested ``result_id`` is not available in the format designated by the `export_type`. Otherwise, return a `django.http.HttpResponse` containing the content of the requested artifact. Parameters for the GET request can include an `export_type`, such as 'xml', 'geojson', 'csv', etc.
[ "Download", "a", "specific", "result", "by", "result_id", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L587-L653
gem/oq-engine
openquake/server/views.py
extract
def extract(request, calc_id, what): """ Wrapper over the `oq extract` command. If `setting.LOCKDOWN` is true only calculations owned by the current user can be retrieved. """ job = logs.dbcmd('get_job', int(calc_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() try: # read the data and save them on a temporary .npz file with datastore.read(job.ds_calc_dir + '.hdf5') as ds: fd, fname = tempfile.mkstemp( prefix=what.replace('/', '-'), suffix='.npz') os.close(fd) n = len(request.path_info) query_string = unquote_plus(request.get_full_path()[n:]) aw = _extract(ds, what + query_string) a = {} for key, val in vars(aw).items(): key = str(key) # can be a numpy.bytes_ if isinstance(val, str): # without this oq extract would fail a[key] = numpy.array(val.encode('utf-8')) elif isinstance(val, dict): # this is hack: we are losing the values a[key] = list(val) else: a[key] = val numpy.savez_compressed(fname, **a) except Exception as exc: tb = ''.join(traceback.format_tb(exc.__traceback__)) return HttpResponse( content='%s: %s\n%s' % (exc.__class__.__name__, exc, tb), content_type='text/plain', status=500) # stream the data back stream = FileWrapper(open(fname, 'rb')) stream.close = lambda: (FileWrapper.close(stream), os.remove(fname)) response = FileResponse(stream, content_type='application/octet-stream') response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(fname)) return response
python
def extract(request, calc_id, what): job = logs.dbcmd('get_job', int(calc_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() try: with datastore.read(job.ds_calc_dir + '.hdf5') as ds: fd, fname = tempfile.mkstemp( prefix=what.replace('/', '-'), suffix='.npz') os.close(fd) n = len(request.path_info) query_string = unquote_plus(request.get_full_path()[n:]) aw = _extract(ds, what + query_string) a = {} for key, val in vars(aw).items(): key = str(key) if isinstance(val, str): a[key] = numpy.array(val.encode('utf-8')) elif isinstance(val, dict): a[key] = list(val) else: a[key] = val numpy.savez_compressed(fname, **a) except Exception as exc: tb = ''.join(traceback.format_tb(exc.__traceback__)) return HttpResponse( content='%s: %s\n%s' % (exc.__class__.__name__, exc, tb), content_type='text/plain', status=500) stream = FileWrapper(open(fname, 'rb')) stream.close = lambda: (FileWrapper.close(stream), os.remove(fname)) response = FileResponse(stream, content_type='application/octet-stream') response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(fname)) return response
[ "def", "extract", "(", "request", ",", "calc_id", ",", "what", ")", ":", "job", "=", "logs", ".", "dbcmd", "(", "'get_job'", ",", "int", "(", "calc_id", ")", ")", "if", "job", "is", "None", ":", "return", "HttpResponseNotFound", "(", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "job", ".", "user_name", ")", ":", "return", "HttpResponseForbidden", "(", ")", "try", ":", "# read the data and save them on a temporary .npz file", "with", "datastore", ".", "read", "(", "job", ".", "ds_calc_dir", "+", "'.hdf5'", ")", "as", "ds", ":", "fd", ",", "fname", "=", "tempfile", ".", "mkstemp", "(", "prefix", "=", "what", ".", "replace", "(", "'/'", ",", "'-'", ")", ",", "suffix", "=", "'.npz'", ")", "os", ".", "close", "(", "fd", ")", "n", "=", "len", "(", "request", ".", "path_info", ")", "query_string", "=", "unquote_plus", "(", "request", ".", "get_full_path", "(", ")", "[", "n", ":", "]", ")", "aw", "=", "_extract", "(", "ds", ",", "what", "+", "query_string", ")", "a", "=", "{", "}", "for", "key", ",", "val", "in", "vars", "(", "aw", ")", ".", "items", "(", ")", ":", "key", "=", "str", "(", "key", ")", "# can be a numpy.bytes_", "if", "isinstance", "(", "val", ",", "str", ")", ":", "# without this oq extract would fail", "a", "[", "key", "]", "=", "numpy", ".", "array", "(", "val", ".", "encode", "(", "'utf-8'", ")", ")", "elif", "isinstance", "(", "val", ",", "dict", ")", ":", "# this is hack: we are losing the values", "a", "[", "key", "]", "=", "list", "(", "val", ")", "else", ":", "a", "[", "key", "]", "=", "val", "numpy", ".", "savez_compressed", "(", "fname", ",", "*", "*", "a", ")", "except", "Exception", "as", "exc", ":", "tb", "=", "''", ".", "join", "(", "traceback", ".", "format_tb", "(", "exc", ".", "__traceback__", ")", ")", "return", "HttpResponse", "(", "content", "=", "'%s: %s\\n%s'", "%", "(", "exc", ".", "__class__", ".", "__name__", ",", "exc", ",", "tb", ")", ",", "content_type", "=", "'text/plain'", ",", "status", "=", "500", ")", "# stream the data back", "stream", "=", "FileWrapper", "(", "open", "(", "fname", ",", "'rb'", ")", ")", "stream", ".", "close", "=", "lambda", ":", "(", "FileWrapper", ".", "close", "(", "stream", ")", ",", "os", ".", "remove", "(", "fname", ")", ")", "response", "=", "FileResponse", "(", "stream", ",", "content_type", "=", "'application/octet-stream'", ")", "response", "[", "'Content-Disposition'", "]", "=", "(", "'attachment; filename=%s'", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "response", "[", "'Content-Length'", "]", "=", "str", "(", "os", ".", "path", ".", "getsize", "(", "fname", ")", ")", "return", "response" ]
Wrapper over the `oq extract` command. If `setting.LOCKDOWN` is true only calculations owned by the current user can be retrieved.
[ "Wrapper", "over", "the", "oq", "extract", "command", ".", "If", "setting", ".", "LOCKDOWN", "is", "true", "only", "calculations", "owned", "by", "the", "current", "user", "can", "be", "retrieved", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L658-L703
gem/oq-engine
openquake/server/views.py
calc_datastore
def calc_datastore(request, job_id): """ Download a full datastore file. :param request: `django.http.HttpRequest` object. :param job_id: The id of the requested datastore :returns: A `django.http.HttpResponse` containing the content of the requested artifact, if present, else throws a 404 """ job = logs.dbcmd('get_job', int(job_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() fname = job.ds_calc_dir + '.hdf5' response = FileResponse( FileWrapper(open(fname, 'rb')), content_type=HDF5) response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(fname)) return response
python
def calc_datastore(request, job_id): job = logs.dbcmd('get_job', int(job_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() fname = job.ds_calc_dir + '.hdf5' response = FileResponse( FileWrapper(open(fname, 'rb')), content_type=HDF5) response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(fname)) return response
[ "def", "calc_datastore", "(", "request", ",", "job_id", ")", ":", "job", "=", "logs", ".", "dbcmd", "(", "'get_job'", ",", "int", "(", "job_id", ")", ")", "if", "job", "is", "None", ":", "return", "HttpResponseNotFound", "(", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "job", ".", "user_name", ")", ":", "return", "HttpResponseForbidden", "(", ")", "fname", "=", "job", ".", "ds_calc_dir", "+", "'.hdf5'", "response", "=", "FileResponse", "(", "FileWrapper", "(", "open", "(", "fname", ",", "'rb'", ")", ")", ",", "content_type", "=", "HDF5", ")", "response", "[", "'Content-Disposition'", "]", "=", "(", "'attachment; filename=%s'", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "response", "[", "'Content-Length'", "]", "=", "str", "(", "os", ".", "path", ".", "getsize", "(", "fname", ")", ")", "return", "response" ]
Download a full datastore file. :param request: `django.http.HttpRequest` object. :param job_id: The id of the requested datastore :returns: A `django.http.HttpResponse` containing the content of the requested artifact, if present, else throws a 404
[ "Download", "a", "full", "datastore", "file", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L708-L732
gem/oq-engine
openquake/server/views.py
calc_oqparam
def calc_oqparam(request, job_id): """ Return the calculation parameters as a JSON """ job = logs.dbcmd('get_job', int(job_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() with datastore.read(job.ds_calc_dir + '.hdf5') as ds: oq = ds['oqparam'] return HttpResponse(content=json.dumps(vars(oq)), content_type=JSON)
python
def calc_oqparam(request, job_id): job = logs.dbcmd('get_job', int(job_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() with datastore.read(job.ds_calc_dir + '.hdf5') as ds: oq = ds['oqparam'] return HttpResponse(content=json.dumps(vars(oq)), content_type=JSON)
[ "def", "calc_oqparam", "(", "request", ",", "job_id", ")", ":", "job", "=", "logs", ".", "dbcmd", "(", "'get_job'", ",", "int", "(", "job_id", ")", ")", "if", "job", "is", "None", ":", "return", "HttpResponseNotFound", "(", ")", "if", "not", "utils", ".", "user_has_permission", "(", "request", ",", "job", ".", "user_name", ")", ":", "return", "HttpResponseForbidden", "(", ")", "with", "datastore", ".", "read", "(", "job", ".", "ds_calc_dir", "+", "'.hdf5'", ")", "as", "ds", ":", "oq", "=", "ds", "[", "'oqparam'", "]", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "vars", "(", "oq", ")", ")", ",", "content_type", "=", "JSON", ")" ]
Return the calculation parameters as a JSON
[ "Return", "the", "calculation", "parameters", "as", "a", "JSON" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L737-L749
gem/oq-engine
openquake/server/views.py
on_same_fs
def on_same_fs(request): """ Accept a POST request to check access to a FS available by a client. :param request: `django.http.HttpRequest` object, containing mandatory parameters filename and checksum. """ filename = request.POST['filename'] checksum_in = request.POST['checksum'] checksum = 0 try: data = open(filename, 'rb').read(32) checksum = zlib.adler32(data, checksum) & 0xffffffff if checksum == int(checksum_in): return HttpResponse(content=json.dumps({'success': True}), content_type=JSON, status=200) except (IOError, ValueError): pass return HttpResponse(content=json.dumps({'success': False}), content_type=JSON, status=200)
python
def on_same_fs(request): filename = request.POST['filename'] checksum_in = request.POST['checksum'] checksum = 0 try: data = open(filename, 'rb').read(32) checksum = zlib.adler32(data, checksum) & 0xffffffff if checksum == int(checksum_in): return HttpResponse(content=json.dumps({'success': True}), content_type=JSON, status=200) except (IOError, ValueError): pass return HttpResponse(content=json.dumps({'success': False}), content_type=JSON, status=200)
[ "def", "on_same_fs", "(", "request", ")", ":", "filename", "=", "request", ".", "POST", "[", "'filename'", "]", "checksum_in", "=", "request", ".", "POST", "[", "'checksum'", "]", "checksum", "=", "0", "try", ":", "data", "=", "open", "(", "filename", ",", "'rb'", ")", ".", "read", "(", "32", ")", "checksum", "=", "zlib", ".", "adler32", "(", "data", ",", "checksum", ")", "&", "0xffffffff", "if", "checksum", "==", "int", "(", "checksum_in", ")", ":", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "{", "'success'", ":", "True", "}", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "200", ")", "except", "(", "IOError", ",", "ValueError", ")", ":", "pass", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "{", "'success'", ":", "False", "}", ")", ",", "content_type", "=", "JSON", ",", "status", "=", "200", ")" ]
Accept a POST request to check access to a FS available by a client. :param request: `django.http.HttpRequest` object, containing mandatory parameters filename and checksum.
[ "Accept", "a", "POST", "request", "to", "check", "access", "to", "a", "FS", "available", "by", "a", "client", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/views.py#L769-L791
gem/oq-engine
openquake/calculators/classical_damage.py
classical_damage
def classical_damage(riskinputs, riskmodel, param, monitor): """ Core function for a classical damage computation. :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param riskmodel: a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param param: dictionary of extra parameters :param monitor: :class:`openquake.baselib.performance.Monitor` instance :returns: a nested dictionary lt_idx, rlz_idx -> asset_idx -> <damage array> """ result = AccumDict(accum=AccumDict()) for ri in riskinputs: for out in riskmodel.gen_outputs(ri, monitor): for l, loss_type in enumerate(riskmodel.loss_types): ordinals = ri.assets['ordinal'] result[l, out.rlzi] += dict(zip(ordinals, out[loss_type])) return result
python
def classical_damage(riskinputs, riskmodel, param, monitor): result = AccumDict(accum=AccumDict()) for ri in riskinputs: for out in riskmodel.gen_outputs(ri, monitor): for l, loss_type in enumerate(riskmodel.loss_types): ordinals = ri.assets['ordinal'] result[l, out.rlzi] += dict(zip(ordinals, out[loss_type])) return result
[ "def", "classical_damage", "(", "riskinputs", ",", "riskmodel", ",", "param", ",", "monitor", ")", ":", "result", "=", "AccumDict", "(", "accum", "=", "AccumDict", "(", ")", ")", "for", "ri", "in", "riskinputs", ":", "for", "out", "in", "riskmodel", ".", "gen_outputs", "(", "ri", ",", "monitor", ")", ":", "for", "l", ",", "loss_type", "in", "enumerate", "(", "riskmodel", ".", "loss_types", ")", ":", "ordinals", "=", "ri", ".", "assets", "[", "'ordinal'", "]", "result", "[", "l", ",", "out", ".", "rlzi", "]", "+=", "dict", "(", "zip", "(", "ordinals", ",", "out", "[", "loss_type", "]", ")", ")", "return", "result" ]
Core function for a classical damage computation. :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param riskmodel: a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param param: dictionary of extra parameters :param monitor: :class:`openquake.baselib.performance.Monitor` instance :returns: a nested dictionary lt_idx, rlz_idx -> asset_idx -> <damage array>
[ "Core", "function", "for", "a", "classical", "damage", "computation", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/classical_damage.py#L26-L47
gem/oq-engine
openquake/commands/plot_losses.py
make_figure
def make_figure(losses_by_rlzi, loss_types, nbins): """ :param losses_by_event: composite array (eid, rlzi, losses) :param loss_types: list of loss types """ # NB: matplotlib is imported inside since it is a costly import import matplotlib.pyplot as plt R = len(losses_by_rlzi) L = len(loss_types) fig = plt.figure() for rlz in losses_by_rlzi: rlzi = int(rlz[4:]) # strip rlz- losses = losses_by_rlzi[rlz]['loss'].reshape(-1, L) print('%s, num_events=%d' % (rlz, len(losses))) for lti, lt in enumerate(loss_types): ls = losses[:, lti] if numpy.isnan(ls).all(): continue ax = fig.add_subplot(R, L, rlzi * L + lti + 1) ax.set_xlabel('%s, loss_type=%s' % (rlz, lt)) ax.hist(ls, nbins, rwidth=.9) ax.set_title('loss=%.5e$\pm$%.5e' % (ls.mean(), ls.std(ddof=1))) return plt
python
def make_figure(losses_by_rlzi, loss_types, nbins): import matplotlib.pyplot as plt R = len(losses_by_rlzi) L = len(loss_types) fig = plt.figure() for rlz in losses_by_rlzi: rlzi = int(rlz[4:]) losses = losses_by_rlzi[rlz]['loss'].reshape(-1, L) print('%s, num_events=%d' % (rlz, len(losses))) for lti, lt in enumerate(loss_types): ls = losses[:, lti] if numpy.isnan(ls).all(): continue ax = fig.add_subplot(R, L, rlzi * L + lti + 1) ax.set_xlabel('%s, loss_type=%s' % (rlz, lt)) ax.hist(ls, nbins, rwidth=.9) ax.set_title('loss=%.5e$\pm$%.5e' % (ls.mean(), ls.std(ddof=1))) return plt
[ "def", "make_figure", "(", "losses_by_rlzi", ",", "loss_types", ",", "nbins", ")", ":", "# NB: matplotlib is imported inside since it is a costly import", "import", "matplotlib", ".", "pyplot", "as", "plt", "R", "=", "len", "(", "losses_by_rlzi", ")", "L", "=", "len", "(", "loss_types", ")", "fig", "=", "plt", ".", "figure", "(", ")", "for", "rlz", "in", "losses_by_rlzi", ":", "rlzi", "=", "int", "(", "rlz", "[", "4", ":", "]", ")", "# strip rlz-", "losses", "=", "losses_by_rlzi", "[", "rlz", "]", "[", "'loss'", "]", ".", "reshape", "(", "-", "1", ",", "L", ")", "print", "(", "'%s, num_events=%d'", "%", "(", "rlz", ",", "len", "(", "losses", ")", ")", ")", "for", "lti", ",", "lt", "in", "enumerate", "(", "loss_types", ")", ":", "ls", "=", "losses", "[", ":", ",", "lti", "]", "if", "numpy", ".", "isnan", "(", "ls", ")", ".", "all", "(", ")", ":", "continue", "ax", "=", "fig", ".", "add_subplot", "(", "R", ",", "L", ",", "rlzi", "*", "L", "+", "lti", "+", "1", ")", "ax", ".", "set_xlabel", "(", "'%s, loss_type=%s'", "%", "(", "rlz", ",", "lt", ")", ")", "ax", ".", "hist", "(", "ls", ",", "nbins", ",", "rwidth", "=", ".9", ")", "ax", ".", "set_title", "(", "'loss=%.5e$\\pm$%.5e'", "%", "(", "ls", ".", "mean", "(", ")", ",", "ls", ".", "std", "(", "ddof", "=", "1", ")", ")", ")", "return", "plt" ]
:param losses_by_event: composite array (eid, rlzi, losses) :param loss_types: list of loss types
[ ":", "param", "losses_by_event", ":", "composite", "array", "(", "eid", "rlzi", "losses", ")", ":", "param", "loss_types", ":", "list", "of", "loss", "types" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_losses.py#L24-L46
gem/oq-engine
openquake/commands/plot_losses.py
plot_losses
def plot_losses(calc_id, bins=7): """ losses_by_event plotter """ # read the hazard data dstore = util.read(calc_id) losses_by_rlzi = dict(extract(dstore, 'losses_by_event')) oq = dstore['oqparam'] plt = make_figure(losses_by_rlzi, oq.loss_dt().names, bins) plt.show()
python
def plot_losses(calc_id, bins=7): dstore = util.read(calc_id) losses_by_rlzi = dict(extract(dstore, 'losses_by_event')) oq = dstore['oqparam'] plt = make_figure(losses_by_rlzi, oq.loss_dt().names, bins) plt.show()
[ "def", "plot_losses", "(", "calc_id", ",", "bins", "=", "7", ")", ":", "# read the hazard data", "dstore", "=", "util", ".", "read", "(", "calc_id", ")", "losses_by_rlzi", "=", "dict", "(", "extract", "(", "dstore", ",", "'losses_by_event'", ")", ")", "oq", "=", "dstore", "[", "'oqparam'", "]", "plt", "=", "make_figure", "(", "losses_by_rlzi", ",", "oq", ".", "loss_dt", "(", ")", ".", "names", ",", "bins", ")", "plt", ".", "show", "(", ")" ]
losses_by_event plotter
[ "losses_by_event", "plotter" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_losses.py#L50-L59
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
cmp_mat
def cmp_mat(a, b): """ Sorts two matrices returning a positive or zero value """ c = 0 for x, y in zip(a.flat, b.flat): c = cmp(abs(x), abs(y)) if c != 0: return c return c
python
def cmp_mat(a, b): c = 0 for x, y in zip(a.flat, b.flat): c = cmp(abs(x), abs(y)) if c != 0: return c return c
[ "def", "cmp_mat", "(", "a", ",", "b", ")", ":", "c", "=", "0", "for", "x", ",", "y", "in", "zip", "(", "a", ".", "flat", ",", "b", ".", "flat", ")", ":", "c", "=", "cmp", "(", "abs", "(", "x", ")", ",", "abs", "(", "y", ")", ")", "if", "c", "!=", "0", ":", "return", "c", "return", "c" ]
Sorts two matrices returning a positive or zero value
[ "Sorts", "two", "matrices", "returning", "a", "positive", "or", "zero", "value" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L62-L71
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTCentroid._get_centroid_time
def _get_centroid_time(self, time_diff): """ Calculates the time difference between the date-time classes """ source_time = datetime.datetime.combine(self.date, self.time) second_diff = floor(fabs(time_diff)) microsecond_diff = int(1000. * (time_diff - second_diff)) if time_diff < 0.: source_time = source_time - datetime.timedelta( seconds=int(second_diff), microseconds=microsecond_diff) else: source_time = source_time + datetime.timedelta( seconds=int(second_diff), microseconds=microsecond_diff) self.time = source_time.time() self.date = source_time.date()
python
def _get_centroid_time(self, time_diff): source_time = datetime.datetime.combine(self.date, self.time) second_diff = floor(fabs(time_diff)) microsecond_diff = int(1000. * (time_diff - second_diff)) if time_diff < 0.: source_time = source_time - datetime.timedelta( seconds=int(second_diff), microseconds=microsecond_diff) else: source_time = source_time + datetime.timedelta( seconds=int(second_diff), microseconds=microsecond_diff) self.time = source_time.time() self.date = source_time.date()
[ "def", "_get_centroid_time", "(", "self", ",", "time_diff", ")", ":", "source_time", "=", "datetime", ".", "datetime", ".", "combine", "(", "self", ".", "date", ",", "self", ".", "time", ")", "second_diff", "=", "floor", "(", "fabs", "(", "time_diff", ")", ")", "microsecond_diff", "=", "int", "(", "1000.", "*", "(", "time_diff", "-", "second_diff", ")", ")", "if", "time_diff", "<", "0.", ":", "source_time", "=", "source_time", "-", "datetime", ".", "timedelta", "(", "seconds", "=", "int", "(", "second_diff", ")", ",", "microseconds", "=", "microsecond_diff", ")", "else", ":", "source_time", "=", "source_time", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "int", "(", "second_diff", ")", ",", "microseconds", "=", "microsecond_diff", ")", "self", ".", "time", "=", "source_time", ".", "time", "(", ")", "self", ".", "date", "=", "source_time", ".", "date", "(", ")" ]
Calculates the time difference between the date-time classes
[ "Calculates", "the", "time", "difference", "between", "the", "date", "-", "time", "classes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L120-L134
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTMomentTensor.normalise_tensor
def normalise_tensor(self): """ Normalise the tensor by dividing it by its norm, defined such that np.sqrt(X:X) """ self.tensor, tensor_norm = utils.normalise_tensor(self.tensor) return self.tensor / tensor_norm, tensor_norm
python
def normalise_tensor(self): self.tensor, tensor_norm = utils.normalise_tensor(self.tensor) return self.tensor / tensor_norm, tensor_norm
[ "def", "normalise_tensor", "(", "self", ")", ":", "self", ".", "tensor", ",", "tensor_norm", "=", "utils", ".", "normalise_tensor", "(", "self", ".", "tensor", ")", "return", "self", ".", "tensor", "/", "tensor_norm", ",", "tensor_norm" ]
Normalise the tensor by dividing it by its norm, defined such that np.sqrt(X:X)
[ "Normalise", "the", "tensor", "by", "dividing", "it", "by", "its", "norm", "defined", "such", "that", "np", ".", "sqrt", "(", "X", ":", "X", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L185-L191
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTMomentTensor._to_ned
def _to_ned(self): """ Switches the reference frame to NED """ if self.ref_frame is 'USE': # Rotate return utils.use_to_ned(self.tensor), \ utils.use_to_ned(self.tensor_sigma) elif self.ref_frame is 'NED': # Alreadt NED return self.tensor, self.tensor_sigma else: raise ValueError('Reference frame %s not recognised - cannot ' 'transform to NED!' % self.ref_frame)
python
def _to_ned(self): if self.ref_frame is 'USE': return utils.use_to_ned(self.tensor), \ utils.use_to_ned(self.tensor_sigma) elif self.ref_frame is 'NED': return self.tensor, self.tensor_sigma else: raise ValueError('Reference frame %s not recognised - cannot ' 'transform to NED!' % self.ref_frame)
[ "def", "_to_ned", "(", "self", ")", ":", "if", "self", ".", "ref_frame", "is", "'USE'", ":", "# Rotate", "return", "utils", ".", "use_to_ned", "(", "self", ".", "tensor", ")", ",", "utils", ".", "use_to_ned", "(", "self", ".", "tensor_sigma", ")", "elif", "self", ".", "ref_frame", "is", "'NED'", ":", "# Alreadt NED", "return", "self", ".", "tensor", ",", "self", ".", "tensor_sigma", "else", ":", "raise", "ValueError", "(", "'Reference frame %s not recognised - cannot '", "'transform to NED!'", "%", "self", ".", "ref_frame", ")" ]
Switches the reference frame to NED
[ "Switches", "the", "reference", "frame", "to", "NED" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L193-L206
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTMomentTensor._to_use
def _to_use(self): """ Returns a tensor in the USE reference frame """ if self.ref_frame is 'NED': # Rotate return utils.ned_to_use(self.tensor), \ utils.ned_to_use(self.tensor_sigma) elif self.ref_frame is 'USE': # Already USE return self.tensor, self.tensor_sigma else: raise ValueError('Reference frame %s not recognised - cannot ' 'transform to USE!' % self.ref_frame)
python
def _to_use(self): if self.ref_frame is 'NED': return utils.ned_to_use(self.tensor), \ utils.ned_to_use(self.tensor_sigma) elif self.ref_frame is 'USE': return self.tensor, self.tensor_sigma else: raise ValueError('Reference frame %s not recognised - cannot ' 'transform to USE!' % self.ref_frame)
[ "def", "_to_use", "(", "self", ")", ":", "if", "self", ".", "ref_frame", "is", "'NED'", ":", "# Rotate", "return", "utils", ".", "ned_to_use", "(", "self", ".", "tensor", ")", ",", "utils", ".", "ned_to_use", "(", "self", ".", "tensor_sigma", ")", "elif", "self", ".", "ref_frame", "is", "'USE'", ":", "# Already USE", "return", "self", ".", "tensor", ",", "self", ".", "tensor_sigma", "else", ":", "raise", "ValueError", "(", "'Reference frame %s not recognised - cannot '", "'transform to USE!'", "%", "self", ".", "ref_frame", ")" ]
Returns a tensor in the USE reference frame
[ "Returns", "a", "tensor", "in", "the", "USE", "reference", "frame" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L208-L221
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTMomentTensor.eigendecompose
def eigendecompose(self, normalise=False): """ Performs and eigendecomposition of the tensor and orders into descending eigenvalues """ self.eigenvalues, self.eigenvectors = utils.eigendecompose(self.tensor, normalise) return self.eigenvalues, self.eigenvectors
python
def eigendecompose(self, normalise=False): self.eigenvalues, self.eigenvectors = utils.eigendecompose(self.tensor, normalise) return self.eigenvalues, self.eigenvectors
[ "def", "eigendecompose", "(", "self", ",", "normalise", "=", "False", ")", ":", "self", ".", "eigenvalues", ",", "self", ".", "eigenvectors", "=", "utils", ".", "eigendecompose", "(", "self", ".", "tensor", ",", "normalise", ")", "return", "self", ".", "eigenvalues", ",", "self", ".", "eigenvectors" ]
Performs and eigendecomposition of the tensor and orders into descending eigenvalues
[ "Performs", "and", "eigendecomposition", "of", "the", "tensor", "and", "orders", "into", "descending", "eigenvalues" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L230-L237
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTMomentTensor.get_nodal_planes
def get_nodal_planes(self): """ Returns the nodal planes by eigendecomposition of the moment tensor """ # Convert reference frame to NED self.tensor, self.tensor_sigma = self._to_ned() self.ref_frame = 'NED' # Eigenvalue decomposition # Tensor _, evect = utils.eigendecompose(self.tensor) # Rotation matrix _, rot_vec = utils.eigendecompose(np.matrix([[0., 0., -1], [0., 0., 0.], [-1., 0., 0.]])) rotation_matrix = (np.matrix(evect * rot_vec.T)).T if np.linalg.det(rotation_matrix) < 0.: rotation_matrix *= -1. flip_dc = np.matrix([[0., 0., -1.], [0., -1., 0.], [-1., 0., 0.]]) rotation_matrices = sorted( [rotation_matrix, flip_dc * rotation_matrix], cmp=cmp_mat) nodal_planes = GCMTNodalPlanes() dip, strike, rake = [(180. / pi) * angle for angle in utils.matrix_to_euler(rotation_matrices[0])] # 1st Nodal Plane nodal_planes.nodal_plane_1 = {'strike': strike % 360, 'dip': dip, 'rake': -rake} # 2nd Nodal Plane dip, strike, rake = [(180. / pi) * angle for angle in utils.matrix_to_euler(rotation_matrices[1])] nodal_planes.nodal_plane_2 = {'strike': strike % 360., 'dip': dip, 'rake': -rake} return nodal_planes
python
def get_nodal_planes(self): self.tensor, self.tensor_sigma = self._to_ned() self.ref_frame = 'NED' _, evect = utils.eigendecompose(self.tensor) _, rot_vec = utils.eigendecompose(np.matrix([[0., 0., -1], [0., 0., 0.], [-1., 0., 0.]])) rotation_matrix = (np.matrix(evect * rot_vec.T)).T if np.linalg.det(rotation_matrix) < 0.: rotation_matrix *= -1. flip_dc = np.matrix([[0., 0., -1.], [0., -1., 0.], [-1., 0., 0.]]) rotation_matrices = sorted( [rotation_matrix, flip_dc * rotation_matrix], cmp=cmp_mat) nodal_planes = GCMTNodalPlanes() dip, strike, rake = [(180. / pi) * angle for angle in utils.matrix_to_euler(rotation_matrices[0])] nodal_planes.nodal_plane_1 = {'strike': strike % 360, 'dip': dip, 'rake': -rake} dip, strike, rake = [(180. / pi) * angle for angle in utils.matrix_to_euler(rotation_matrices[1])] nodal_planes.nodal_plane_2 = {'strike': strike % 360., 'dip': dip, 'rake': -rake} return nodal_planes
[ "def", "get_nodal_planes", "(", "self", ")", ":", "# Convert reference frame to NED", "self", ".", "tensor", ",", "self", ".", "tensor_sigma", "=", "self", ".", "_to_ned", "(", ")", "self", ".", "ref_frame", "=", "'NED'", "# Eigenvalue decomposition", "# Tensor", "_", ",", "evect", "=", "utils", ".", "eigendecompose", "(", "self", ".", "tensor", ")", "# Rotation matrix", "_", ",", "rot_vec", "=", "utils", ".", "eigendecompose", "(", "np", ".", "matrix", "(", "[", "[", "0.", ",", "0.", ",", "-", "1", "]", ",", "[", "0.", ",", "0.", ",", "0.", "]", ",", "[", "-", "1.", ",", "0.", ",", "0.", "]", "]", ")", ")", "rotation_matrix", "=", "(", "np", ".", "matrix", "(", "evect", "*", "rot_vec", ".", "T", ")", ")", ".", "T", "if", "np", ".", "linalg", ".", "det", "(", "rotation_matrix", ")", "<", "0.", ":", "rotation_matrix", "*=", "-", "1.", "flip_dc", "=", "np", ".", "matrix", "(", "[", "[", "0.", ",", "0.", ",", "-", "1.", "]", ",", "[", "0.", ",", "-", "1.", ",", "0.", "]", ",", "[", "-", "1.", ",", "0.", ",", "0.", "]", "]", ")", "rotation_matrices", "=", "sorted", "(", "[", "rotation_matrix", ",", "flip_dc", "*", "rotation_matrix", "]", ",", "cmp", "=", "cmp_mat", ")", "nodal_planes", "=", "GCMTNodalPlanes", "(", ")", "dip", ",", "strike", ",", "rake", "=", "[", "(", "180.", "/", "pi", ")", "*", "angle", "for", "angle", "in", "utils", ".", "matrix_to_euler", "(", "rotation_matrices", "[", "0", "]", ")", "]", "# 1st Nodal Plane", "nodal_planes", ".", "nodal_plane_1", "=", "{", "'strike'", ":", "strike", "%", "360", ",", "'dip'", ":", "dip", ",", "'rake'", ":", "-", "rake", "}", "# 2nd Nodal Plane", "dip", ",", "strike", ",", "rake", "=", "[", "(", "180.", "/", "pi", ")", "*", "angle", "for", "angle", "in", "utils", ".", "matrix_to_euler", "(", "rotation_matrices", "[", "1", "]", ")", "]", "nodal_planes", ".", "nodal_plane_2", "=", "{", "'strike'", ":", "strike", "%", "360.", ",", "'dip'", ":", "dip", ",", "'rake'", ":", "-", "rake", "}", "return", "nodal_planes" ]
Returns the nodal planes by eigendecomposition of the moment tensor
[ "Returns", "the", "nodal", "planes", "by", "eigendecomposition", "of", "the", "moment", "tensor" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L239-L276
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTMomentTensor.get_principal_axes
def get_principal_axes(self): """ Uses the eigendecomposition to extract the principal axes from the moment tensor - returning an instance of the GCMTPrincipalAxes class """ # Perform eigendecomposition - returns in order P, B, T _ = self.eigendecompose(normalise=True) principal_axes = GCMTPrincipalAxes() # Eigenvalues principal_axes.p_axis = {'eigenvalue': self.eigenvalues[0]} principal_axes.b_axis = {'eigenvalue': self.eigenvalues[1]} principal_axes.t_axis = {'eigenvalue': self.eigenvalues[2]} # Eigen vectors # 1) P axis azim, plun = utils.get_azimuth_plunge(self.eigenvectors[:, 0], True) principal_axes.p_axis['azimuth'] = azim principal_axes.p_axis['plunge'] = plun # 2) B axis azim, plun = utils.get_azimuth_plunge(self.eigenvectors[:, 1], True) principal_axes.b_axis['azimuth'] = azim principal_axes.b_axis['plunge'] = plun # 3) T axis azim, plun = utils.get_azimuth_plunge(self.eigenvectors[:, 2], True) principal_axes.t_axis['azimuth'] = azim principal_axes.t_axis['plunge'] = plun return principal_axes
python
def get_principal_axes(self): _ = self.eigendecompose(normalise=True) principal_axes = GCMTPrincipalAxes() principal_axes.p_axis = {'eigenvalue': self.eigenvalues[0]} principal_axes.b_axis = {'eigenvalue': self.eigenvalues[1]} principal_axes.t_axis = {'eigenvalue': self.eigenvalues[2]} azim, plun = utils.get_azimuth_plunge(self.eigenvectors[:, 0], True) principal_axes.p_axis['azimuth'] = azim principal_axes.p_axis['plunge'] = plun azim, plun = utils.get_azimuth_plunge(self.eigenvectors[:, 1], True) principal_axes.b_axis['azimuth'] = azim principal_axes.b_axis['plunge'] = plun azim, plun = utils.get_azimuth_plunge(self.eigenvectors[:, 2], True) principal_axes.t_axis['azimuth'] = azim principal_axes.t_axis['plunge'] = plun return principal_axes
[ "def", "get_principal_axes", "(", "self", ")", ":", "# Perform eigendecomposition - returns in order P, B, T", "_", "=", "self", ".", "eigendecompose", "(", "normalise", "=", "True", ")", "principal_axes", "=", "GCMTPrincipalAxes", "(", ")", "# Eigenvalues", "principal_axes", ".", "p_axis", "=", "{", "'eigenvalue'", ":", "self", ".", "eigenvalues", "[", "0", "]", "}", "principal_axes", ".", "b_axis", "=", "{", "'eigenvalue'", ":", "self", ".", "eigenvalues", "[", "1", "]", "}", "principal_axes", ".", "t_axis", "=", "{", "'eigenvalue'", ":", "self", ".", "eigenvalues", "[", "2", "]", "}", "# Eigen vectors", "# 1) P axis", "azim", ",", "plun", "=", "utils", ".", "get_azimuth_plunge", "(", "self", ".", "eigenvectors", "[", ":", ",", "0", "]", ",", "True", ")", "principal_axes", ".", "p_axis", "[", "'azimuth'", "]", "=", "azim", "principal_axes", ".", "p_axis", "[", "'plunge'", "]", "=", "plun", "# 2) B axis", "azim", ",", "plun", "=", "utils", ".", "get_azimuth_plunge", "(", "self", ".", "eigenvectors", "[", ":", ",", "1", "]", ",", "True", ")", "principal_axes", ".", "b_axis", "[", "'azimuth'", "]", "=", "azim", "principal_axes", ".", "b_axis", "[", "'plunge'", "]", "=", "plun", "# 3) T axis", "azim", ",", "plun", "=", "utils", ".", "get_azimuth_plunge", "(", "self", ".", "eigenvectors", "[", ":", ",", "2", "]", ",", "True", ")", "principal_axes", ".", "t_axis", "[", "'azimuth'", "]", "=", "azim", "principal_axes", ".", "t_axis", "[", "'plunge'", "]", "=", "plun", "return", "principal_axes" ]
Uses the eigendecomposition to extract the principal axes from the moment tensor - returning an instance of the GCMTPrincipalAxes class
[ "Uses", "the", "eigendecomposition", "to", "extract", "the", "principal", "axes", "from", "the", "moment", "tensor", "-", "returning", "an", "instance", "of", "the", "GCMTPrincipalAxes", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L278-L303
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTEvent.get_f_clvd
def get_f_clvd(self): """ Returns the statistic f_clvd: the signed ratio of the sizes of the intermediate and largest principal moments:: f_clvd = -b_axis_eigenvalue / max(|t_axis_eigenvalue|,|p_axis_eigenvalue|) """ if not self.principal_axes: # Principal axes not yet defined for moment tensor - raises error raise ValueError('Principal Axes not defined!') denominator = np.max(np.array([ fabs(self.principal_axes.t_axis['eigenvalue']), fabs(self.principal_axes.p_axis['eigenvalue']) ])) self.f_clvd = -self.principal_axes.b_axis['eigenvalue'] / denominator return self.f_clvd
python
def get_f_clvd(self): if not self.principal_axes: raise ValueError('Principal Axes not defined!') denominator = np.max(np.array([ fabs(self.principal_axes.t_axis['eigenvalue']), fabs(self.principal_axes.p_axis['eigenvalue']) ])) self.f_clvd = -self.principal_axes.b_axis['eigenvalue'] / denominator return self.f_clvd
[ "def", "get_f_clvd", "(", "self", ")", ":", "if", "not", "self", ".", "principal_axes", ":", "# Principal axes not yet defined for moment tensor - raises error", "raise", "ValueError", "(", "'Principal Axes not defined!'", ")", "denominator", "=", "np", ".", "max", "(", "np", ".", "array", "(", "[", "fabs", "(", "self", ".", "principal_axes", ".", "t_axis", "[", "'eigenvalue'", "]", ")", ",", "fabs", "(", "self", ".", "principal_axes", ".", "p_axis", "[", "'eigenvalue'", "]", ")", "]", ")", ")", "self", ".", "f_clvd", "=", "-", "self", ".", "principal_axes", ".", "b_axis", "[", "'eigenvalue'", "]", "/", "denominator", "return", "self", ".", "f_clvd" ]
Returns the statistic f_clvd: the signed ratio of the sizes of the intermediate and largest principal moments:: f_clvd = -b_axis_eigenvalue / max(|t_axis_eigenvalue|,|p_axis_eigenvalue|)
[ "Returns", "the", "statistic", "f_clvd", ":", "the", "signed", "ratio", "of", "the", "sizes", "of", "the", "intermediate", "and", "largest", "principal", "moments", "::" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L326-L342
gem/oq-engine
openquake/hmtk/seismicity/gcmt_catalogue.py
GCMTEvent.get_relative_error
def get_relative_error(self): """ Returns the relative error statistic (e_rel), defined by Frohlich & Davis (1999): `e_rel = sqrt((U:U) / (M:M))` where M is the moment tensor, U is the uncertainty tensor and : is the tensor dot product """ if not self.moment_tensor: raise ValueError('Moment tensor not defined!') numer = np.tensordot(self.moment_tensor.tensor_sigma, self.moment_tensor.tensor_sigma) denom = np.tensordot(self.moment_tensor.tensor, self.moment_tensor.tensor) self.e_rel = sqrt(numer / denom) return self.e_rel
python
def get_relative_error(self): if not self.moment_tensor: raise ValueError('Moment tensor not defined!') numer = np.tensordot(self.moment_tensor.tensor_sigma, self.moment_tensor.tensor_sigma) denom = np.tensordot(self.moment_tensor.tensor, self.moment_tensor.tensor) self.e_rel = sqrt(numer / denom) return self.e_rel
[ "def", "get_relative_error", "(", "self", ")", ":", "if", "not", "self", ".", "moment_tensor", ":", "raise", "ValueError", "(", "'Moment tensor not defined!'", ")", "numer", "=", "np", ".", "tensordot", "(", "self", ".", "moment_tensor", ".", "tensor_sigma", ",", "self", ".", "moment_tensor", ".", "tensor_sigma", ")", "denom", "=", "np", ".", "tensordot", "(", "self", ".", "moment_tensor", ".", "tensor", ",", "self", ".", "moment_tensor", ".", "tensor", ")", "self", ".", "e_rel", "=", "sqrt", "(", "numer", "/", "denom", ")", "return", "self", ".", "e_rel" ]
Returns the relative error statistic (e_rel), defined by Frohlich & Davis (1999): `e_rel = sqrt((U:U) / (M:M))` where M is the moment tensor, U is the uncertainty tensor and : is the tensor dot product
[ "Returns", "the", "relative", "error", "statistic", "(", "e_rel", ")", "defined", "by", "Frohlich", "&", "Davis", "(", "1999", ")", ":", "e_rel", "=", "sqrt", "((", "U", ":", "U", ")", "/", "(", "M", ":", "M", "))", "where", "M", "is", "the", "moment", "tensor", "U", "is", "the", "uncertainty", "tensor", "and", ":", "is", "the", "tensor", "dot", "product" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/gcmt_catalogue.py#L344-L359