repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
gem/oq-engine | openquake/baselib/workerpool.py | WorkerMaster.start | def start(self, streamer=False):
"""
Start multiple workerpools, possibly on remote servers via ssh,
and possibly a streamer, depending on the `streamercls`.
:param streamer:
if True, starts a streamer with multiprocessing.Process
"""
if streamer and not general.socket_ready(self.task_in_url): # started
self.streamer = multiprocessing.Process(
target=_streamer,
args=(self.master_host, self.task_in_port, self.task_out_port))
self.streamer.start()
starting = []
for host, cores in self.host_cores:
if self.status(host)[0][1] == 'running':
print('%s:%s already running' % (host, self.ctrl_port))
continue
ctrl_url = 'tcp://%s:%s' % (host, self.ctrl_port)
if host == '127.0.0.1': # localhost
args = [sys.executable]
else:
args = ['ssh', host, self.remote_python]
args += ['-m', 'openquake.baselib.workerpool',
ctrl_url, self.task_out_url, cores]
starting.append(' '.join(args))
po = subprocess.Popen(args)
self.pids.append(po.pid)
return 'starting %s' % starting | python | def start(self, streamer=False):
"""
Start multiple workerpools, possibly on remote servers via ssh,
and possibly a streamer, depending on the `streamercls`.
:param streamer:
if True, starts a streamer with multiprocessing.Process
"""
if streamer and not general.socket_ready(self.task_in_url): # started
self.streamer = multiprocessing.Process(
target=_streamer,
args=(self.master_host, self.task_in_port, self.task_out_port))
self.streamer.start()
starting = []
for host, cores in self.host_cores:
if self.status(host)[0][1] == 'running':
print('%s:%s already running' % (host, self.ctrl_port))
continue
ctrl_url = 'tcp://%s:%s' % (host, self.ctrl_port)
if host == '127.0.0.1': # localhost
args = [sys.executable]
else:
args = ['ssh', host, self.remote_python]
args += ['-m', 'openquake.baselib.workerpool',
ctrl_url, self.task_out_url, cores]
starting.append(' '.join(args))
po = subprocess.Popen(args)
self.pids.append(po.pid)
return 'starting %s' % starting | [
"def",
"start",
"(",
"self",
",",
"streamer",
"=",
"False",
")",
":",
"if",
"streamer",
"and",
"not",
"general",
".",
"socket_ready",
"(",
"self",
".",
"task_in_url",
")",
":",
"# started",
"self",
".",
"streamer",
"=",
"multiprocessing",
".",
"Process",
... | Start multiple workerpools, possibly on remote servers via ssh,
and possibly a streamer, depending on the `streamercls`.
:param streamer:
if True, starts a streamer with multiprocessing.Process | [
"Start",
"multiple",
"workerpools",
"possibly",
"on",
"remote",
"servers",
"via",
"ssh",
"and",
"possibly",
"a",
"streamer",
"depending",
"on",
"the",
"streamercls",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/workerpool.py#L59-L87 | train | 213,900 |
gem/oq-engine | openquake/baselib/workerpool.py | WorkerMaster.stop | def stop(self):
"""
Send a "stop" command to all worker pools
"""
stopped = []
for host, _ in self.host_cores:
if self.status(host)[0][1] == 'not-running':
print('%s not running' % host)
continue
ctrl_url = 'tcp://%s:%s' % (host, self.ctrl_port)
with z.Socket(ctrl_url, z.zmq.REQ, 'connect') as sock:
sock.send('stop')
stopped.append(host)
if hasattr(self, 'streamer'):
self.streamer.terminate()
return 'stopped %s' % stopped | python | def stop(self):
"""
Send a "stop" command to all worker pools
"""
stopped = []
for host, _ in self.host_cores:
if self.status(host)[0][1] == 'not-running':
print('%s not running' % host)
continue
ctrl_url = 'tcp://%s:%s' % (host, self.ctrl_port)
with z.Socket(ctrl_url, z.zmq.REQ, 'connect') as sock:
sock.send('stop')
stopped.append(host)
if hasattr(self, 'streamer'):
self.streamer.terminate()
return 'stopped %s' % stopped | [
"def",
"stop",
"(",
"self",
")",
":",
"stopped",
"=",
"[",
"]",
"for",
"host",
",",
"_",
"in",
"self",
".",
"host_cores",
":",
"if",
"self",
".",
"status",
"(",
"host",
")",
"[",
"0",
"]",
"[",
"1",
"]",
"==",
"'not-running'",
":",
"print",
"("... | Send a "stop" command to all worker pools | [
"Send",
"a",
"stop",
"command",
"to",
"all",
"worker",
"pools"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/workerpool.py#L89-L104 | train | 213,901 |
gem/oq-engine | openquake/baselib/workerpool.py | WorkerPool.start | def start(self):
"""
Start worker processes and a control loop
"""
setproctitle('oq-zworkerpool %s' % self.ctrl_url[6:]) # strip tcp://
# start workers
self.workers = []
for _ in range(self.num_workers):
sock = z.Socket(self.task_out_port, z.zmq.PULL, 'connect')
proc = multiprocessing.Process(target=self.worker, args=(sock,))
proc.start()
sock.pid = proc.pid
self.workers.append(sock)
# start control loop accepting the commands stop and kill
with z.Socket(self.ctrl_url, z.zmq.REP, 'bind') as ctrlsock:
for cmd in ctrlsock:
if cmd in ('stop', 'kill'):
msg = getattr(self, cmd)()
ctrlsock.send(msg)
break
elif cmd == 'getpid':
ctrlsock.send(self.pid)
elif cmd == 'get_num_workers':
ctrlsock.send(self.num_workers) | python | def start(self):
"""
Start worker processes and a control loop
"""
setproctitle('oq-zworkerpool %s' % self.ctrl_url[6:]) # strip tcp://
# start workers
self.workers = []
for _ in range(self.num_workers):
sock = z.Socket(self.task_out_port, z.zmq.PULL, 'connect')
proc = multiprocessing.Process(target=self.worker, args=(sock,))
proc.start()
sock.pid = proc.pid
self.workers.append(sock)
# start control loop accepting the commands stop and kill
with z.Socket(self.ctrl_url, z.zmq.REP, 'bind') as ctrlsock:
for cmd in ctrlsock:
if cmd in ('stop', 'kill'):
msg = getattr(self, cmd)()
ctrlsock.send(msg)
break
elif cmd == 'getpid':
ctrlsock.send(self.pid)
elif cmd == 'get_num_workers':
ctrlsock.send(self.num_workers) | [
"def",
"start",
"(",
"self",
")",
":",
"setproctitle",
"(",
"'oq-zworkerpool %s'",
"%",
"self",
".",
"ctrl_url",
"[",
"6",
":",
"]",
")",
"# strip tcp://",
"# start workers",
"self",
".",
"workers",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"self",
... | Start worker processes and a control loop | [
"Start",
"worker",
"processes",
"and",
"a",
"control",
"loop"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/workerpool.py#L157-L181 | train | 213,902 |
gem/oq-engine | openquake/baselib/workerpool.py | WorkerPool.stop | def stop(self):
"""
Send a SIGTERM to all worker processes
"""
for sock in self.workers:
os.kill(sock.pid, signal.SIGTERM)
return 'WorkerPool %s stopped' % self.ctrl_url | python | def stop(self):
"""
Send a SIGTERM to all worker processes
"""
for sock in self.workers:
os.kill(sock.pid, signal.SIGTERM)
return 'WorkerPool %s stopped' % self.ctrl_url | [
"def",
"stop",
"(",
"self",
")",
":",
"for",
"sock",
"in",
"self",
".",
"workers",
":",
"os",
".",
"kill",
"(",
"sock",
".",
"pid",
",",
"signal",
".",
"SIGTERM",
")",
"return",
"'WorkerPool %s stopped'",
"%",
"self",
".",
"ctrl_url"
] | Send a SIGTERM to all worker processes | [
"Send",
"a",
"SIGTERM",
"to",
"all",
"worker",
"processes"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/workerpool.py#L183-L189 | train | 213,903 |
gem/oq-engine | openquake/baselib/workerpool.py | WorkerPool.kill | def kill(self):
"""
Send a SIGKILL to all worker processes
"""
for sock in self.workers:
os.kill(sock.pid, signal.SIGKILL)
return 'WorkerPool %s killed' % self.ctrl_url | python | def kill(self):
"""
Send a SIGKILL to all worker processes
"""
for sock in self.workers:
os.kill(sock.pid, signal.SIGKILL)
return 'WorkerPool %s killed' % self.ctrl_url | [
"def",
"kill",
"(",
"self",
")",
":",
"for",
"sock",
"in",
"self",
".",
"workers",
":",
"os",
".",
"kill",
"(",
"sock",
".",
"pid",
",",
"signal",
".",
"SIGKILL",
")",
"return",
"'WorkerPool %s killed'",
"%",
"self",
".",
"ctrl_url"
] | Send a SIGKILL to all worker processes | [
"Send",
"a",
"SIGKILL",
"to",
"all",
"worker",
"processes"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/workerpool.py#L191-L197 | train | 213,904 |
gem/oq-engine | openquake/server/dbserver.py | get_status | def get_status(address=None):
"""
Check if the DbServer is up.
:param address: pair (hostname, port)
:returns: 'running' or 'not-running'
"""
address = address or (config.dbserver.host, DBSERVER_PORT)
return 'running' if socket_ready(address) else 'not-running' | python | def get_status(address=None):
"""
Check if the DbServer is up.
:param address: pair (hostname, port)
:returns: 'running' or 'not-running'
"""
address = address or (config.dbserver.host, DBSERVER_PORT)
return 'running' if socket_ready(address) else 'not-running' | [
"def",
"get_status",
"(",
"address",
"=",
"None",
")",
":",
"address",
"=",
"address",
"or",
"(",
"config",
".",
"dbserver",
".",
"host",
",",
"DBSERVER_PORT",
")",
"return",
"'running'",
"if",
"socket_ready",
"(",
"address",
")",
"else",
"'not-running'"
] | Check if the DbServer is up.
:param address: pair (hostname, port)
:returns: 'running' or 'not-running' | [
"Check",
"if",
"the",
"DbServer",
"is",
"up",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbserver.py#L134-L142 | train | 213,905 |
gem/oq-engine | openquake/server/dbserver.py | check_foreign | def check_foreign():
"""
Check if we the DbServer is the right one
"""
if not config.dbserver.multi_user:
remote_server_path = logs.dbcmd('get_path')
if different_paths(server_path, remote_server_path):
return('You are trying to contact a DbServer from another'
' instance (got %s, expected %s)\n'
'Check the configuration or stop the foreign'
' DbServer instance') % (remote_server_path, server_path) | python | def check_foreign():
"""
Check if we the DbServer is the right one
"""
if not config.dbserver.multi_user:
remote_server_path = logs.dbcmd('get_path')
if different_paths(server_path, remote_server_path):
return('You are trying to contact a DbServer from another'
' instance (got %s, expected %s)\n'
'Check the configuration or stop the foreign'
' DbServer instance') % (remote_server_path, server_path) | [
"def",
"check_foreign",
"(",
")",
":",
"if",
"not",
"config",
".",
"dbserver",
".",
"multi_user",
":",
"remote_server_path",
"=",
"logs",
".",
"dbcmd",
"(",
"'get_path'",
")",
"if",
"different_paths",
"(",
"server_path",
",",
"remote_server_path",
")",
":",
... | Check if we the DbServer is the right one | [
"Check",
"if",
"we",
"the",
"DbServer",
"is",
"the",
"right",
"one"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbserver.py#L145-L155 | train | 213,906 |
gem/oq-engine | openquake/server/dbserver.py | ensure_on | def ensure_on():
"""
Start the DbServer if it is off
"""
if get_status() == 'not-running':
if config.dbserver.multi_user:
sys.exit('Please start the DbServer: '
'see the documentation for details')
# otherwise start the DbServer automatically; NB: I tried to use
# multiprocessing.Process(target=run_server).start() and apparently
# it works, but then run-demos.sh hangs after the end of the first
# calculation, but only if the DbServer is started by oq engine (!?)
subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver',
'-l', 'INFO'])
# wait for the dbserver to start
waiting_seconds = 30
while get_status() == 'not-running':
if waiting_seconds == 0:
sys.exit('The DbServer cannot be started after 30 seconds. '
'Please check the configuration')
time.sleep(1)
waiting_seconds -= 1 | python | def ensure_on():
"""
Start the DbServer if it is off
"""
if get_status() == 'not-running':
if config.dbserver.multi_user:
sys.exit('Please start the DbServer: '
'see the documentation for details')
# otherwise start the DbServer automatically; NB: I tried to use
# multiprocessing.Process(target=run_server).start() and apparently
# it works, but then run-demos.sh hangs after the end of the first
# calculation, but only if the DbServer is started by oq engine (!?)
subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver',
'-l', 'INFO'])
# wait for the dbserver to start
waiting_seconds = 30
while get_status() == 'not-running':
if waiting_seconds == 0:
sys.exit('The DbServer cannot be started after 30 seconds. '
'Please check the configuration')
time.sleep(1)
waiting_seconds -= 1 | [
"def",
"ensure_on",
"(",
")",
":",
"if",
"get_status",
"(",
")",
"==",
"'not-running'",
":",
"if",
"config",
".",
"dbserver",
".",
"multi_user",
":",
"sys",
".",
"exit",
"(",
"'Please start the DbServer: '",
"'see the documentation for details'",
")",
"# otherwise... | Start the DbServer if it is off | [
"Start",
"the",
"DbServer",
"if",
"it",
"is",
"off"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbserver.py#L158-L180 | train | 213,907 |
gem/oq-engine | openquake/server/dbserver.py | run_server | def run_server(dbpath=os.path.expanduser(config.dbserver.file),
dbhostport=None, loglevel='WARN'):
"""
Run the DbServer on the given database file and port. If not given,
use the settings in openquake.cfg.
"""
if dbhostport: # assume a string of the form "dbhost:port"
dbhost, port = dbhostport.split(':')
addr = (dbhost, int(port))
else:
addr = (config.dbserver.listen, DBSERVER_PORT)
# create the db directory if needed
dirname = os.path.dirname(dbpath)
if not os.path.exists(dirname):
os.makedirs(dirname)
# create and upgrade the db if needed
db('PRAGMA foreign_keys = ON') # honor ON DELETE CASCADE
actions.upgrade_db(db)
# the line below is needed to work around a very subtle bug of sqlite;
# we need new connections, see https://github.com/gem/oq-engine/pull/3002
db.close()
# reset any computation left in the 'executing' state
actions.reset_is_running(db)
# configure logging and start the server
logging.basicConfig(level=getattr(logging, loglevel))
DbServer(db, addr).start() | python | def run_server(dbpath=os.path.expanduser(config.dbserver.file),
dbhostport=None, loglevel='WARN'):
"""
Run the DbServer on the given database file and port. If not given,
use the settings in openquake.cfg.
"""
if dbhostport: # assume a string of the form "dbhost:port"
dbhost, port = dbhostport.split(':')
addr = (dbhost, int(port))
else:
addr = (config.dbserver.listen, DBSERVER_PORT)
# create the db directory if needed
dirname = os.path.dirname(dbpath)
if not os.path.exists(dirname):
os.makedirs(dirname)
# create and upgrade the db if needed
db('PRAGMA foreign_keys = ON') # honor ON DELETE CASCADE
actions.upgrade_db(db)
# the line below is needed to work around a very subtle bug of sqlite;
# we need new connections, see https://github.com/gem/oq-engine/pull/3002
db.close()
# reset any computation left in the 'executing' state
actions.reset_is_running(db)
# configure logging and start the server
logging.basicConfig(level=getattr(logging, loglevel))
DbServer(db, addr).start() | [
"def",
"run_server",
"(",
"dbpath",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"config",
".",
"dbserver",
".",
"file",
")",
",",
"dbhostport",
"=",
"None",
",",
"loglevel",
"=",
"'WARN'",
")",
":",
"if",
"dbhostport",
":",
"# assume a string of the fo... | Run the DbServer on the given database file and port. If not given,
use the settings in openquake.cfg. | [
"Run",
"the",
"DbServer",
"on",
"the",
"given",
"database",
"file",
"and",
"port",
".",
"If",
"not",
"given",
"use",
"the",
"settings",
"in",
"openquake",
".",
"cfg",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbserver.py#L184-L213 | train | 213,908 |
gem/oq-engine | openquake/server/dbserver.py | DbServer.start | def start(self):
"""
Start database worker threads
"""
# give a nice name to the process
w.setproctitle('oq-dbserver')
dworkers = []
for _ in range(self.num_workers):
sock = z.Socket(self.backend, z.zmq.REP, 'connect')
threading.Thread(target=self.dworker, args=(sock,)).start()
dworkers.append(sock)
logging.warning('DB server started with %s on %s, pid %d',
sys.executable, self.frontend, self.pid)
if ZMQ:
# start task_in->task_out streamer thread
c = config.zworkers
threading.Thread(
target=w._streamer,
args=(self.master_host, c.task_in_port, c.task_out_port)
).start()
logging.warning('Task streamer started from %s -> %s',
c.task_in_port, c.task_out_port)
# start zworkers and wait a bit for them
msg = self.master.start()
logging.warning(msg)
time.sleep(1)
# start frontend->backend proxy for the database workers
try:
z.zmq.proxy(z.bind(self.frontend, z.zmq.ROUTER),
z.bind(self.backend, z.zmq.DEALER))
except (KeyboardInterrupt, z.zmq.ZMQError):
for sock in dworkers:
sock.running = False
sock.zsocket.close()
logging.warning('DB server stopped')
finally:
self.stop() | python | def start(self):
"""
Start database worker threads
"""
# give a nice name to the process
w.setproctitle('oq-dbserver')
dworkers = []
for _ in range(self.num_workers):
sock = z.Socket(self.backend, z.zmq.REP, 'connect')
threading.Thread(target=self.dworker, args=(sock,)).start()
dworkers.append(sock)
logging.warning('DB server started with %s on %s, pid %d',
sys.executable, self.frontend, self.pid)
if ZMQ:
# start task_in->task_out streamer thread
c = config.zworkers
threading.Thread(
target=w._streamer,
args=(self.master_host, c.task_in_port, c.task_out_port)
).start()
logging.warning('Task streamer started from %s -> %s',
c.task_in_port, c.task_out_port)
# start zworkers and wait a bit for them
msg = self.master.start()
logging.warning(msg)
time.sleep(1)
# start frontend->backend proxy for the database workers
try:
z.zmq.proxy(z.bind(self.frontend, z.zmq.ROUTER),
z.bind(self.backend, z.zmq.DEALER))
except (KeyboardInterrupt, z.zmq.ZMQError):
for sock in dworkers:
sock.running = False
sock.zsocket.close()
logging.warning('DB server stopped')
finally:
self.stop() | [
"def",
"start",
"(",
"self",
")",
":",
"# give a nice name to the process",
"w",
".",
"setproctitle",
"(",
"'oq-dbserver'",
")",
"dworkers",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"self",
".",
"num_workers",
")",
":",
"sock",
"=",
"z",
".",
"Sock... | Start database worker threads | [
"Start",
"database",
"worker",
"threads"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbserver.py#L78-L117 | train | 213,909 |
gem/oq-engine | openquake/server/dbserver.py | DbServer.stop | def stop(self):
"""Stop the DbServer and the zworkers if any"""
if ZMQ:
logging.warning(self.master.stop())
z.context.term()
self.db.close() | python | def stop(self):
"""Stop the DbServer and the zworkers if any"""
if ZMQ:
logging.warning(self.master.stop())
z.context.term()
self.db.close() | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"ZMQ",
":",
"logging",
".",
"warning",
"(",
"self",
".",
"master",
".",
"stop",
"(",
")",
")",
"z",
".",
"context",
".",
"term",
"(",
")",
"self",
".",
"db",
".",
"close",
"(",
")"
] | Stop the DbServer and the zworkers if any | [
"Stop",
"the",
"DbServer",
"and",
"the",
"zworkers",
"if",
"any"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/dbserver.py#L119-L124 | train | 213,910 |
gem/oq-engine | openquake/hazardlib/gsim/climent_1994.py | ClimentEtAl1994._compute_term_3_4 | def _compute_term_3_4(self, dists, C):
"""
Compute term 3 and 4 in equation 1 page 1.
"""
cutoff = 6.056877878
rhypo = dists.rhypo.copy()
rhypo[rhypo <= cutoff] = cutoff
return C['c3'] * np.log(rhypo) + C['c4'] * rhypo | python | def _compute_term_3_4(self, dists, C):
"""
Compute term 3 and 4 in equation 1 page 1.
"""
cutoff = 6.056877878
rhypo = dists.rhypo.copy()
rhypo[rhypo <= cutoff] = cutoff
return C['c3'] * np.log(rhypo) + C['c4'] * rhypo | [
"def",
"_compute_term_3_4",
"(",
"self",
",",
"dists",
",",
"C",
")",
":",
"cutoff",
"=",
"6.056877878",
"rhypo",
"=",
"dists",
".",
"rhypo",
".",
"copy",
"(",
")",
"rhypo",
"[",
"rhypo",
"<=",
"cutoff",
"]",
"=",
"cutoff",
"return",
"C",
"[",
"'c3'"... | Compute term 3 and 4 in equation 1 page 1. | [
"Compute",
"term",
"3",
"and",
"4",
"in",
"equation",
"1",
"page",
"1",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/climent_1994.py#L111-L118 | train | 213,911 |
gem/oq-engine | openquake/hazardlib/gsim/convertito_2012.py | ConvertitoEtAl2012Geysers._compute_distance_scaling | def _compute_distance_scaling(self, C, rhypo):
"""
Returns the distance scaling term accounting for geometric and
anelastic attenuation
"""
return C["c"] * np.log10(np.sqrt((rhypo ** 2.) + (C["h"] ** 2.))) +\
(C["d"] * rhypo) | python | def _compute_distance_scaling(self, C, rhypo):
"""
Returns the distance scaling term accounting for geometric and
anelastic attenuation
"""
return C["c"] * np.log10(np.sqrt((rhypo ** 2.) + (C["h"] ** 2.))) +\
(C["d"] * rhypo) | [
"def",
"_compute_distance_scaling",
"(",
"self",
",",
"C",
",",
"rhypo",
")",
":",
"return",
"C",
"[",
"\"c\"",
"]",
"*",
"np",
".",
"log10",
"(",
"np",
".",
"sqrt",
"(",
"(",
"rhypo",
"**",
"2.",
")",
"+",
"(",
"C",
"[",
"\"h\"",
"]",
"**",
"2... | Returns the distance scaling term accounting for geometric and
anelastic attenuation | [
"Returns",
"the",
"distance",
"scaling",
"term",
"accounting",
"for",
"geometric",
"and",
"anelastic",
"attenuation"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/convertito_2012.py#L101-L107 | train | 213,912 |
gem/oq-engine | openquake/hazardlib/gsim/convertito_2012.py | ConvertitoEtAl2012Geysers._compute_site_scaling | def _compute_site_scaling(self, C, vs30):
"""
Returns the site scaling term as a simple coefficient
"""
site_term = np.zeros(len(vs30), dtype=float)
# For soil sites add on the site coefficient
site_term[vs30 < 760.0] = C["e"]
return site_term | python | def _compute_site_scaling(self, C, vs30):
"""
Returns the site scaling term as a simple coefficient
"""
site_term = np.zeros(len(vs30), dtype=float)
# For soil sites add on the site coefficient
site_term[vs30 < 760.0] = C["e"]
return site_term | [
"def",
"_compute_site_scaling",
"(",
"self",
",",
"C",
",",
"vs30",
")",
":",
"site_term",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"vs30",
")",
",",
"dtype",
"=",
"float",
")",
"# For soil sites add on the site coefficient",
"site_term",
"[",
"vs30",
"<",
... | Returns the site scaling term as a simple coefficient | [
"Returns",
"the",
"site",
"scaling",
"term",
"as",
"a",
"simple",
"coefficient"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/convertito_2012.py#L109-L116 | train | 213,913 |
gem/oq-engine | openquake/hazardlib/contexts.py | ContextMaker.filter | def filter(self, sites, rupture):
"""
Filter the site collection with respect to the rupture.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`.
:param rupture:
Instance of
:class:`openquake.hazardlib.source.rupture.BaseRupture`
:returns:
(filtered sites, distance context)
"""
distances = get_distances(rupture, sites, self.filter_distance)
if self.maximum_distance:
mask = distances <= self.maximum_distance(
rupture.tectonic_region_type, rupture.mag)
if mask.any():
sites, distances = sites.filter(mask), distances[mask]
else:
raise FarAwayRupture(
'%d: %d km' % (rupture.serial, distances.min()))
return sites, DistancesContext([(self.filter_distance, distances)]) | python | def filter(self, sites, rupture):
"""
Filter the site collection with respect to the rupture.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`.
:param rupture:
Instance of
:class:`openquake.hazardlib.source.rupture.BaseRupture`
:returns:
(filtered sites, distance context)
"""
distances = get_distances(rupture, sites, self.filter_distance)
if self.maximum_distance:
mask = distances <= self.maximum_distance(
rupture.tectonic_region_type, rupture.mag)
if mask.any():
sites, distances = sites.filter(mask), distances[mask]
else:
raise FarAwayRupture(
'%d: %d km' % (rupture.serial, distances.min()))
return sites, DistancesContext([(self.filter_distance, distances)]) | [
"def",
"filter",
"(",
"self",
",",
"sites",
",",
"rupture",
")",
":",
"distances",
"=",
"get_distances",
"(",
"rupture",
",",
"sites",
",",
"self",
".",
"filter_distance",
")",
"if",
"self",
".",
"maximum_distance",
":",
"mask",
"=",
"distances",
"<=",
"... | Filter the site collection with respect to the rupture.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`.
:param rupture:
Instance of
:class:`openquake.hazardlib.source.rupture.BaseRupture`
:returns:
(filtered sites, distance context) | [
"Filter",
"the",
"site",
"collection",
"with",
"respect",
"to",
"the",
"rupture",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/contexts.py#L123-L144 | train | 213,914 |
gem/oq-engine | openquake/hazardlib/contexts.py | ContextMaker.add_rup_params | def add_rup_params(self, rupture):
"""
Add .REQUIRES_RUPTURE_PARAMETERS to the rupture
"""
for param in self.REQUIRES_RUPTURE_PARAMETERS:
if param == 'mag':
value = rupture.mag
elif param == 'strike':
value = rupture.surface.get_strike()
elif param == 'dip':
value = rupture.surface.get_dip()
elif param == 'rake':
value = rupture.rake
elif param == 'ztor':
value = rupture.surface.get_top_edge_depth()
elif param == 'hypo_lon':
value = rupture.hypocenter.longitude
elif param == 'hypo_lat':
value = rupture.hypocenter.latitude
elif param == 'hypo_depth':
value = rupture.hypocenter.depth
elif param == 'width':
value = rupture.surface.get_width()
else:
raise ValueError('%s requires unknown rupture parameter %r' %
(type(self).__name__, param))
setattr(rupture, param, value) | python | def add_rup_params(self, rupture):
"""
Add .REQUIRES_RUPTURE_PARAMETERS to the rupture
"""
for param in self.REQUIRES_RUPTURE_PARAMETERS:
if param == 'mag':
value = rupture.mag
elif param == 'strike':
value = rupture.surface.get_strike()
elif param == 'dip':
value = rupture.surface.get_dip()
elif param == 'rake':
value = rupture.rake
elif param == 'ztor':
value = rupture.surface.get_top_edge_depth()
elif param == 'hypo_lon':
value = rupture.hypocenter.longitude
elif param == 'hypo_lat':
value = rupture.hypocenter.latitude
elif param == 'hypo_depth':
value = rupture.hypocenter.depth
elif param == 'width':
value = rupture.surface.get_width()
else:
raise ValueError('%s requires unknown rupture parameter %r' %
(type(self).__name__, param))
setattr(rupture, param, value) | [
"def",
"add_rup_params",
"(",
"self",
",",
"rupture",
")",
":",
"for",
"param",
"in",
"self",
".",
"REQUIRES_RUPTURE_PARAMETERS",
":",
"if",
"param",
"==",
"'mag'",
":",
"value",
"=",
"rupture",
".",
"mag",
"elif",
"param",
"==",
"'strike'",
":",
"value",
... | Add .REQUIRES_RUPTURE_PARAMETERS to the rupture | [
"Add",
".",
"REQUIRES_RUPTURE_PARAMETERS",
"to",
"the",
"rupture"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/contexts.py#L146-L172 | train | 213,915 |
gem/oq-engine | openquake/hazardlib/contexts.py | ContextMaker.make_contexts | def make_contexts(self, sites, rupture):
"""
Filter the site collection with respect to the rupture and
create context objects.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`.
:param rupture:
Instance of
:class:`openquake.hazardlib.source.rupture.BaseRupture`
:returns:
Tuple of two items: sites and distances context.
:raises ValueError:
If any of declared required parameters (site, rupture and
distance parameters) is unknown.
"""
sites, dctx = self.filter(sites, rupture)
for param in self.REQUIRES_DISTANCES - set([self.filter_distance]):
distances = get_distances(rupture, sites, param)
setattr(dctx, param, distances)
reqv_obj = (self.reqv.get(rupture.tectonic_region_type)
if self.reqv else None)
if reqv_obj and isinstance(rupture.surface, PlanarSurface):
reqv = reqv_obj.get(dctx.repi, rupture.mag)
if 'rjb' in self.REQUIRES_DISTANCES:
dctx.rjb = reqv
if 'rrup' in self.REQUIRES_DISTANCES:
reqv_rup = numpy.sqrt(reqv**2 + rupture.hypocenter.depth**2)
dctx.rrup = reqv_rup
self.add_rup_params(rupture)
# NB: returning a SitesContext make sures that the GSIM cannot
# access site parameters different from the ones declared
sctx = SitesContext(self.REQUIRES_SITES_PARAMETERS, sites)
return sctx, dctx | python | def make_contexts(self, sites, rupture):
"""
Filter the site collection with respect to the rupture and
create context objects.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`.
:param rupture:
Instance of
:class:`openquake.hazardlib.source.rupture.BaseRupture`
:returns:
Tuple of two items: sites and distances context.
:raises ValueError:
If any of declared required parameters (site, rupture and
distance parameters) is unknown.
"""
sites, dctx = self.filter(sites, rupture)
for param in self.REQUIRES_DISTANCES - set([self.filter_distance]):
distances = get_distances(rupture, sites, param)
setattr(dctx, param, distances)
reqv_obj = (self.reqv.get(rupture.tectonic_region_type)
if self.reqv else None)
if reqv_obj and isinstance(rupture.surface, PlanarSurface):
reqv = reqv_obj.get(dctx.repi, rupture.mag)
if 'rjb' in self.REQUIRES_DISTANCES:
dctx.rjb = reqv
if 'rrup' in self.REQUIRES_DISTANCES:
reqv_rup = numpy.sqrt(reqv**2 + rupture.hypocenter.depth**2)
dctx.rrup = reqv_rup
self.add_rup_params(rupture)
# NB: returning a SitesContext make sures that the GSIM cannot
# access site parameters different from the ones declared
sctx = SitesContext(self.REQUIRES_SITES_PARAMETERS, sites)
return sctx, dctx | [
"def",
"make_contexts",
"(",
"self",
",",
"sites",
",",
"rupture",
")",
":",
"sites",
",",
"dctx",
"=",
"self",
".",
"filter",
"(",
"sites",
",",
"rupture",
")",
"for",
"param",
"in",
"self",
".",
"REQUIRES_DISTANCES",
"-",
"set",
"(",
"[",
"self",
"... | Filter the site collection with respect to the rupture and
create context objects.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`.
:param rupture:
Instance of
:class:`openquake.hazardlib.source.rupture.BaseRupture`
:returns:
Tuple of two items: sites and distances context.
:raises ValueError:
If any of declared required parameters (site, rupture and
distance parameters) is unknown. | [
"Filter",
"the",
"site",
"collection",
"with",
"respect",
"to",
"the",
"rupture",
"and",
"create",
"context",
"objects",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/contexts.py#L174-L210 | train | 213,916 |
gem/oq-engine | openquake/hazardlib/contexts.py | DistancesContext.roundup | def roundup(self, minimum_distance):
"""
If the minimum_distance is nonzero, returns a copy of the
DistancesContext with updated distances, i.e. the ones below
minimum_distance are rounded up to the minimum_distance. Otherwise,
returns the original DistancesContext unchanged.
"""
if not minimum_distance:
return self
ctx = DistancesContext()
for dist, array in vars(self).items():
small_distances = array < minimum_distance
if small_distances.any():
array = array[:] # make a copy first
array[small_distances] = minimum_distance
setattr(ctx, dist, array)
return ctx | python | def roundup(self, minimum_distance):
"""
If the minimum_distance is nonzero, returns a copy of the
DistancesContext with updated distances, i.e. the ones below
minimum_distance are rounded up to the minimum_distance. Otherwise,
returns the original DistancesContext unchanged.
"""
if not minimum_distance:
return self
ctx = DistancesContext()
for dist, array in vars(self).items():
small_distances = array < minimum_distance
if small_distances.any():
array = array[:] # make a copy first
array[small_distances] = minimum_distance
setattr(ctx, dist, array)
return ctx | [
"def",
"roundup",
"(",
"self",
",",
"minimum_distance",
")",
":",
"if",
"not",
"minimum_distance",
":",
"return",
"self",
"ctx",
"=",
"DistancesContext",
"(",
")",
"for",
"dist",
",",
"array",
"in",
"vars",
"(",
"self",
")",
".",
"items",
"(",
")",
":"... | If the minimum_distance is nonzero, returns a copy of the
DistancesContext with updated distances, i.e. the ones below
minimum_distance are rounded up to the minimum_distance. Otherwise,
returns the original DistancesContext unchanged. | [
"If",
"the",
"minimum_distance",
"is",
"nonzero",
"returns",
"a",
"copy",
"of",
"the",
"DistancesContext",
"with",
"updated",
"distances",
"i",
".",
"e",
".",
"the",
"ones",
"below",
"minimum_distance",
"are",
"rounded",
"up",
"to",
"the",
"minimum_distance",
... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/contexts.py#L432-L448 | train | 213,917 |
gem/oq-engine | openquake/hazardlib/contexts.py | RuptureContext.get_probability_no_exceedance | def get_probability_no_exceedance(self, poes):
"""
Compute and return the probability that in the time span for which the
rupture is defined, the rupture itself never generates a ground motion
value higher than a given level at a given site.
Such calculation is performed starting from the conditional probability
that an occurrence of the current rupture is producing a ground motion
value higher than the level of interest at the site of interest.
The actual formula used for such calculation depends on the temporal
occurrence model the rupture is associated with.
The calculation can be performed for multiple intensity measure levels
and multiple sites in a vectorized fashion.
:param poes:
2D numpy array containing conditional probabilities the the a
rupture occurrence causes a ground shaking value exceeding a
ground motion level at a site. First dimension represent sites,
second dimension intensity measure levels. ``poes`` can be obtained
calling the :meth:`method
<openquake.hazardlib.gsim.base.GroundShakingIntensityModel.get_poes>
"""
if numpy.isnan(self.occurrence_rate): # nonparametric rupture
# Uses the formula
#
# ∑ p(k|T) * p(X<x|rup)^k
#
# where `p(k|T)` is the probability that the rupture occurs k times
# in the time span `T`, `p(X<x|rup)` is the probability that a
# rupture occurrence does not cause a ground motion exceedance, and
# thesummation `∑` is done over the number of occurrences `k`.
#
# `p(k|T)` is given by the attribute probs_occur and
# `p(X<x|rup)` is computed as ``1 - poes``.
# Converting from 1d to 2d
if len(poes.shape) == 1:
poes = numpy.reshape(poes, (-1, len(poes)))
p_kT = self.probs_occur
prob_no_exceed = numpy.array(
[v * ((1 - poes) ** i) for i, v in enumerate(p_kT)])
prob_no_exceed = numpy.sum(prob_no_exceed, axis=0)
prob_no_exceed[prob_no_exceed > 1.] = 1. # sanity check
prob_no_exceed[poes == 0.] = 1. # avoid numeric issues
return prob_no_exceed
# parametric rupture
tom = self.temporal_occurrence_model
return tom.get_probability_no_exceedance(self.occurrence_rate, poes) | python | def get_probability_no_exceedance(self, poes):
"""
Compute and return the probability that in the time span for which the
rupture is defined, the rupture itself never generates a ground motion
value higher than a given level at a given site.
Such calculation is performed starting from the conditional probability
that an occurrence of the current rupture is producing a ground motion
value higher than the level of interest at the site of interest.
The actual formula used for such calculation depends on the temporal
occurrence model the rupture is associated with.
The calculation can be performed for multiple intensity measure levels
and multiple sites in a vectorized fashion.
:param poes:
2D numpy array containing conditional probabilities the the a
rupture occurrence causes a ground shaking value exceeding a
ground motion level at a site. First dimension represent sites,
second dimension intensity measure levels. ``poes`` can be obtained
calling the :meth:`method
<openquake.hazardlib.gsim.base.GroundShakingIntensityModel.get_poes>
"""
if numpy.isnan(self.occurrence_rate): # nonparametric rupture
# Uses the formula
#
# ∑ p(k|T) * p(X<x|rup)^k
#
# where `p(k|T)` is the probability that the rupture occurs k times
# in the time span `T`, `p(X<x|rup)` is the probability that a
# rupture occurrence does not cause a ground motion exceedance, and
# thesummation `∑` is done over the number of occurrences `k`.
#
# `p(k|T)` is given by the attribute probs_occur and
# `p(X<x|rup)` is computed as ``1 - poes``.
# Converting from 1d to 2d
if len(poes.shape) == 1:
poes = numpy.reshape(poes, (-1, len(poes)))
p_kT = self.probs_occur
prob_no_exceed = numpy.array(
[v * ((1 - poes) ** i) for i, v in enumerate(p_kT)])
prob_no_exceed = numpy.sum(prob_no_exceed, axis=0)
prob_no_exceed[prob_no_exceed > 1.] = 1. # sanity check
prob_no_exceed[poes == 0.] = 1. # avoid numeric issues
return prob_no_exceed
# parametric rupture
tom = self.temporal_occurrence_model
return tom.get_probability_no_exceedance(self.occurrence_rate, poes) | [
"def",
"get_probability_no_exceedance",
"(",
"self",
",",
"poes",
")",
":",
"if",
"numpy",
".",
"isnan",
"(",
"self",
".",
"occurrence_rate",
")",
":",
"# nonparametric rupture",
"# Uses the formula",
"#",
"# ∑ p(k|T) * p(X<x|rup)^k",
"#",
"# where `p(k|T)` is the pr... | Compute and return the probability that in the time span for which the
rupture is defined, the rupture itself never generates a ground motion
value higher than a given level at a given site.
Such calculation is performed starting from the conditional probability
that an occurrence of the current rupture is producing a ground motion
value higher than the level of interest at the site of interest.
The actual formula used for such calculation depends on the temporal
occurrence model the rupture is associated with.
The calculation can be performed for multiple intensity measure levels
and multiple sites in a vectorized fashion.
:param poes:
2D numpy array containing conditional probabilities the the a
rupture occurrence causes a ground shaking value exceeding a
ground motion level at a site. First dimension represent sites,
second dimension intensity measure levels. ``poes`` can be obtained
calling the :meth:`method
<openquake.hazardlib.gsim.base.GroundShakingIntensityModel.get_poes> | [
"Compute",
"and",
"return",
"the",
"probability",
"that",
"in",
"the",
"time",
"span",
"for",
"which",
"the",
"rupture",
"is",
"defined",
"the",
"rupture",
"itself",
"never",
"generates",
"a",
"ground",
"motion",
"value",
"higher",
"than",
"a",
"given",
"lev... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/contexts.py#L473-L519 | train | 213,918 |
gem/oq-engine | openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py | CsvCatalogueParser._float_check | def _float_check(self, attribute_array, value, irow, key):
'''Checks if value is valid float, appends to array if valid, appends
nan if not'''
value = value.strip(' ')
try:
if value:
attribute_array = np.hstack([attribute_array, float(value)])
else:
attribute_array = np.hstack([attribute_array, np.nan])
except:
print(irow, key)
msg = 'Input file format error at line: %d' % (irow + 2)
msg += ' key: %s' % (key)
raise ValueError(msg)
return attribute_array | python | def _float_check(self, attribute_array, value, irow, key):
'''Checks if value is valid float, appends to array if valid, appends
nan if not'''
value = value.strip(' ')
try:
if value:
attribute_array = np.hstack([attribute_array, float(value)])
else:
attribute_array = np.hstack([attribute_array, np.nan])
except:
print(irow, key)
msg = 'Input file format error at line: %d' % (irow + 2)
msg += ' key: %s' % (key)
raise ValueError(msg)
return attribute_array | [
"def",
"_float_check",
"(",
"self",
",",
"attribute_array",
",",
"value",
",",
"irow",
",",
"key",
")",
":",
"value",
"=",
"value",
".",
"strip",
"(",
"' '",
")",
"try",
":",
"if",
"value",
":",
"attribute_array",
"=",
"np",
".",
"hstack",
"(",
"[",
... | Checks if value is valid float, appends to array if valid, appends
nan if not | [
"Checks",
"if",
"value",
"is",
"valid",
"float",
"appends",
"to",
"array",
"if",
"valid",
"appends",
"nan",
"if",
"not"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py#L114-L128 | train | 213,919 |
gem/oq-engine | openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py | CsvCatalogueParser._int_check | def _int_check(self, attribute_array, value, irow, key):
'''Checks if value is valid integer, appends to array if valid, appends
nan if not'''
value = value.strip(' ')
try:
if value:
attribute_array = np.hstack([attribute_array, int(value)])
else:
attribute_array = np.hstack([attribute_array, np.nan])
except:
msg = 'Input file format error at line: %d' % (irow + 2)
msg += ' key: %s' % (key)
raise ValueError(msg)
return attribute_array | python | def _int_check(self, attribute_array, value, irow, key):
'''Checks if value is valid integer, appends to array if valid, appends
nan if not'''
value = value.strip(' ')
try:
if value:
attribute_array = np.hstack([attribute_array, int(value)])
else:
attribute_array = np.hstack([attribute_array, np.nan])
except:
msg = 'Input file format error at line: %d' % (irow + 2)
msg += ' key: %s' % (key)
raise ValueError(msg)
return attribute_array | [
"def",
"_int_check",
"(",
"self",
",",
"attribute_array",
",",
"value",
",",
"irow",
",",
"key",
")",
":",
"value",
"=",
"value",
".",
"strip",
"(",
"' '",
")",
"try",
":",
"if",
"value",
":",
"attribute_array",
"=",
"np",
".",
"hstack",
"(",
"[",
... | Checks if value is valid integer, appends to array if valid, appends
nan if not | [
"Checks",
"if",
"value",
"is",
"valid",
"integer",
"appends",
"to",
"array",
"if",
"valid",
"appends",
"nan",
"if",
"not"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py#L130-L143 | train | 213,920 |
gem/oq-engine | openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py | CsvCatalogueWriter.write_file | def write_file(self, catalogue, flag_vector=None, magnitude_table=None):
'''
Writes the catalogue to file, purging events if necessary.
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param numpy.array flag_vector:
Boolean vector specifying whether each event is valid (therefore
written) or otherwise
:param numpy.ndarray magnitude_table:
Magnitude-time table specifying the year and magnitudes of
completeness
'''
# First apply purging conditions
output_catalogue = self.apply_purging(catalogue,
flag_vector,
magnitude_table)
outfile = open(self.output_file, 'wt')
writer = csv.DictWriter(outfile, fieldnames=self.OUTPUT_LIST)
writer.writeheader()
# Quick check to remove nan arrays
for key in self.OUTPUT_LIST:
cond = (isinstance(output_catalogue.data[key], np.ndarray)
and np.all(np.isnan(output_catalogue.data[key])))
if cond:
output_catalogue.data[key] = []
# Write the catalogue
for iloc in range(0, output_catalogue.get_number_events()):
row_dict = {}
for key in self.OUTPUT_LIST:
if len(output_catalogue.data[key]) > 0:
row_dict[key] = output_catalogue.data[key][iloc]
else:
row_dict[key] = ''
writer.writerow(row_dict)
outfile.close() | python | def write_file(self, catalogue, flag_vector=None, magnitude_table=None):
'''
Writes the catalogue to file, purging events if necessary.
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param numpy.array flag_vector:
Boolean vector specifying whether each event is valid (therefore
written) or otherwise
:param numpy.ndarray magnitude_table:
Magnitude-time table specifying the year and magnitudes of
completeness
'''
# First apply purging conditions
output_catalogue = self.apply_purging(catalogue,
flag_vector,
magnitude_table)
outfile = open(self.output_file, 'wt')
writer = csv.DictWriter(outfile, fieldnames=self.OUTPUT_LIST)
writer.writeheader()
# Quick check to remove nan arrays
for key in self.OUTPUT_LIST:
cond = (isinstance(output_catalogue.data[key], np.ndarray)
and np.all(np.isnan(output_catalogue.data[key])))
if cond:
output_catalogue.data[key] = []
# Write the catalogue
for iloc in range(0, output_catalogue.get_number_events()):
row_dict = {}
for key in self.OUTPUT_LIST:
if len(output_catalogue.data[key]) > 0:
row_dict[key] = output_catalogue.data[key][iloc]
else:
row_dict[key] = ''
writer.writerow(row_dict)
outfile.close() | [
"def",
"write_file",
"(",
"self",
",",
"catalogue",
",",
"flag_vector",
"=",
"None",
",",
"magnitude_table",
"=",
"None",
")",
":",
"# First apply purging conditions",
"output_catalogue",
"=",
"self",
".",
"apply_purging",
"(",
"catalogue",
",",
"flag_vector",
","... | Writes the catalogue to file, purging events if necessary.
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param numpy.array flag_vector:
Boolean vector specifying whether each event is valid (therefore
written) or otherwise
:param numpy.ndarray magnitude_table:
Magnitude-time table specifying the year and magnitudes of
completeness | [
"Writes",
"the",
"catalogue",
"to",
"file",
"purging",
"events",
"if",
"necessary",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py#L169-L206 | train | 213,921 |
gem/oq-engine | openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py | CsvCatalogueWriter.apply_purging | def apply_purging(self, catalogue, flag_vector, magnitude_table):
'''
Apply all the various purging conditions, if specified.
:param catalogue:
Earthquake catalogue as instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param numpy.array flag_vector:
Boolean vector specifying whether each event is valid (therefore
written) or otherwise
:param numpy.ndarray magnitude_table:
Magnitude-time table specifying the year and magnitudes of
completeness
'''
output_catalogue = deepcopy(catalogue)
if magnitude_table is not None:
if flag_vector is not None:
output_catalogue.catalogue_mt_filter(
magnitude_table, flag_vector)
return output_catalogue
else:
output_catalogue.catalogue_mt_filter(
magnitude_table)
return output_catalogue
if flag_vector is not None:
output_catalogue.purge_catalogue(flag_vector)
return output_catalogue | python | def apply_purging(self, catalogue, flag_vector, magnitude_table):
'''
Apply all the various purging conditions, if specified.
:param catalogue:
Earthquake catalogue as instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param numpy.array flag_vector:
Boolean vector specifying whether each event is valid (therefore
written) or otherwise
:param numpy.ndarray magnitude_table:
Magnitude-time table specifying the year and magnitudes of
completeness
'''
output_catalogue = deepcopy(catalogue)
if magnitude_table is not None:
if flag_vector is not None:
output_catalogue.catalogue_mt_filter(
magnitude_table, flag_vector)
return output_catalogue
else:
output_catalogue.catalogue_mt_filter(
magnitude_table)
return output_catalogue
if flag_vector is not None:
output_catalogue.purge_catalogue(flag_vector)
return output_catalogue | [
"def",
"apply_purging",
"(",
"self",
",",
"catalogue",
",",
"flag_vector",
",",
"magnitude_table",
")",
":",
"output_catalogue",
"=",
"deepcopy",
"(",
"catalogue",
")",
"if",
"magnitude_table",
"is",
"not",
"None",
":",
"if",
"flag_vector",
"is",
"not",
"None"... | Apply all the various purging conditions, if specified.
:param catalogue:
Earthquake catalogue as instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param numpy.array flag_vector:
Boolean vector specifying whether each event is valid (therefore
written) or otherwise
:param numpy.ndarray magnitude_table:
Magnitude-time table specifying the year and magnitudes of
completeness | [
"Apply",
"all",
"the",
"various",
"purging",
"conditions",
"if",
"specified",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/catalogue/csv_catalogue_parser.py#L208-L234 | train | 213,922 |
gem/oq-engine | openquake/hazardlib/near_fault.py | get_xyz_from_ll | def get_xyz_from_ll(projected, reference):
"""
This method computes the x, y and z coordinates of a set of points
provided a reference point
:param projected:
:class:`~openquake.hazardlib.geo.point.Point` object
representing the coordinates of target point to be projected
:param reference:
:class:`~openquake.hazardlib.geo.point.Point` object
representing the coordinates of the reference point.
:returns:
x
y
z
"""
azims = geod.azimuth(reference.longitude, reference.latitude,
projected.longitude, projected.latitude)
depths = np.subtract(reference.depth, projected.depth)
dists = geod.geodetic_distance(reference.longitude,
reference.latitude,
projected.longitude,
projected.latitude)
return (dists * math.sin(math.radians(azims)),
dists * math.cos(math.radians(azims)),
depths) | python | def get_xyz_from_ll(projected, reference):
"""
This method computes the x, y and z coordinates of a set of points
provided a reference point
:param projected:
:class:`~openquake.hazardlib.geo.point.Point` object
representing the coordinates of target point to be projected
:param reference:
:class:`~openquake.hazardlib.geo.point.Point` object
representing the coordinates of the reference point.
:returns:
x
y
z
"""
azims = geod.azimuth(reference.longitude, reference.latitude,
projected.longitude, projected.latitude)
depths = np.subtract(reference.depth, projected.depth)
dists = geod.geodetic_distance(reference.longitude,
reference.latitude,
projected.longitude,
projected.latitude)
return (dists * math.sin(math.radians(azims)),
dists * math.cos(math.radians(azims)),
depths) | [
"def",
"get_xyz_from_ll",
"(",
"projected",
",",
"reference",
")",
":",
"azims",
"=",
"geod",
".",
"azimuth",
"(",
"reference",
".",
"longitude",
",",
"reference",
".",
"latitude",
",",
"projected",
".",
"longitude",
",",
"projected",
".",
"latitude",
")",
... | This method computes the x, y and z coordinates of a set of points
provided a reference point
:param projected:
:class:`~openquake.hazardlib.geo.point.Point` object
representing the coordinates of target point to be projected
:param reference:
:class:`~openquake.hazardlib.geo.point.Point` object
representing the coordinates of the reference point.
:returns:
x
y
z | [
"This",
"method",
"computes",
"the",
"x",
"y",
"and",
"z",
"coordinates",
"of",
"a",
"set",
"of",
"points",
"provided",
"a",
"reference",
"point"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/near_fault.py#L30-L57 | train | 213,923 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016Asc.get_magnitude_scaling_term | def get_magnitude_scaling_term(self, C, rup):
"""
Returns the magnitude scaling term in equations 1 and 2
"""
if rup.mag <= self.CONSTANTS["m_c"]:
return C["ccr"] * rup.mag
else:
return (C["ccr"] * self.CONSTANTS["m_c"]) +\
(C["dcr"] * (rup.mag - self.CONSTANTS["m_c"])) | python | def get_magnitude_scaling_term(self, C, rup):
"""
Returns the magnitude scaling term in equations 1 and 2
"""
if rup.mag <= self.CONSTANTS["m_c"]:
return C["ccr"] * rup.mag
else:
return (C["ccr"] * self.CONSTANTS["m_c"]) +\
(C["dcr"] * (rup.mag - self.CONSTANTS["m_c"])) | [
"def",
"get_magnitude_scaling_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"if",
"rup",
".",
"mag",
"<=",
"self",
".",
"CONSTANTS",
"[",
"\"m_c\"",
"]",
":",
"return",
"C",
"[",
"\"ccr\"",
"]",
"*",
"rup",
".",
"mag",
"else",
":",
"return",
... | Returns the magnitude scaling term in equations 1 and 2 | [
"Returns",
"the",
"magnitude",
"scaling",
"term",
"in",
"equations",
"1",
"and",
"2"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L104-L112 | train | 213,924 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016Asc.add_site_amplification | def add_site_amplification(self, C, C_SITE, sites, sa_rock, idx, rup):
"""
Applies the site amplification scaling defined in equations from 10
to 15
"""
n_sites = sites.vs30.shape
# Convert from reference rock to hard rock
hard_rock_sa = sa_rock - C["lnSC1AM"]
# Gets the elastic site amplification ratio
ln_a_n_max = self._get_ln_a_n_max(C, n_sites, idx, rup)
# Retrieves coefficients needed to determine smr
sreff, sreffc, f_sr = self._get_smr_coeffs(C, C_SITE, idx, n_sites,
hard_rock_sa)
snc = np.zeros(n_sites)
alpha = self.CONSTANTS["alpha"]
beta = self.CONSTANTS["beta"]
smr = np.zeros(n_sites)
sa_soil = hard_rock_sa + ln_a_n_max
# Get lnSF
ln_sf = self._get_ln_sf(C, C_SITE, idx, n_sites, rup)
lnamax_idx = np.exp(ln_a_n_max) < 1.25
not_lnamax_idx = np.logical_not(lnamax_idx)
for i in range(1, 5):
idx_i = idx[i]
if not np.any(idx_i):
# No sites of the given site class
continue
idx2 = np.logical_and(lnamax_idx, idx_i)
if np.any(idx2):
# Use the approximate method for SRC and SNC
c_a = C_SITE["LnAmax1D{:g}".format(i)] /\
(np.log(beta) - np.log(sreffc[idx2] ** alpha + beta))
c_b = -c_a * np.log(sreffc[idx2] ** alpha + beta)
snc[idx2] = np.exp((c_a * (alpha - 1.) *
np.log(beta) * np.log(10.0 * beta) -
np.log(10.0) * (c_b + ln_sf[idx2])) /
(c_a * (alpha * np.log(10.0 * beta) -
np.log(beta))))
# For the cases when ln_a_n_max >= 1.25
idx2 = np.logical_and(not_lnamax_idx, idx_i)
if np.any(idx2):
snc[idx2] = (np.exp((ln_a_n_max[idx2] *
np.log(sreffc[idx2] ** alpha + beta) -
ln_sf[idx2] * np.log(beta)) /
C_SITE["LnAmax1D{:g}".format(i)]) - beta) **\
(1.0 / alpha)
smr[idx_i] = sreff[idx_i] * (snc[idx_i] / sreffc[idx_i]) *\
f_sr[idx_i]
# For the cases when site class = i and SMR != 0
idx2 = np.logical_and(idx_i, np.fabs(smr) > 0.0)
if np.any(idx2):
sa_soil[idx2] += (-C_SITE["LnAmax1D{:g}".format(i)] *
(np.log(smr[idx2] ** alpha + beta) -
np.log(beta)) /
(np.log(sreffc[idx2] ** alpha + beta) -
np.log(beta)))
return sa_soil | python | def add_site_amplification(self, C, C_SITE, sites, sa_rock, idx, rup):
"""
Applies the site amplification scaling defined in equations from 10
to 15
"""
n_sites = sites.vs30.shape
# Convert from reference rock to hard rock
hard_rock_sa = sa_rock - C["lnSC1AM"]
# Gets the elastic site amplification ratio
ln_a_n_max = self._get_ln_a_n_max(C, n_sites, idx, rup)
# Retrieves coefficients needed to determine smr
sreff, sreffc, f_sr = self._get_smr_coeffs(C, C_SITE, idx, n_sites,
hard_rock_sa)
snc = np.zeros(n_sites)
alpha = self.CONSTANTS["alpha"]
beta = self.CONSTANTS["beta"]
smr = np.zeros(n_sites)
sa_soil = hard_rock_sa + ln_a_n_max
# Get lnSF
ln_sf = self._get_ln_sf(C, C_SITE, idx, n_sites, rup)
lnamax_idx = np.exp(ln_a_n_max) < 1.25
not_lnamax_idx = np.logical_not(lnamax_idx)
for i in range(1, 5):
idx_i = idx[i]
if not np.any(idx_i):
# No sites of the given site class
continue
idx2 = np.logical_and(lnamax_idx, idx_i)
if np.any(idx2):
# Use the approximate method for SRC and SNC
c_a = C_SITE["LnAmax1D{:g}".format(i)] /\
(np.log(beta) - np.log(sreffc[idx2] ** alpha + beta))
c_b = -c_a * np.log(sreffc[idx2] ** alpha + beta)
snc[idx2] = np.exp((c_a * (alpha - 1.) *
np.log(beta) * np.log(10.0 * beta) -
np.log(10.0) * (c_b + ln_sf[idx2])) /
(c_a * (alpha * np.log(10.0 * beta) -
np.log(beta))))
# For the cases when ln_a_n_max >= 1.25
idx2 = np.logical_and(not_lnamax_idx, idx_i)
if np.any(idx2):
snc[idx2] = (np.exp((ln_a_n_max[idx2] *
np.log(sreffc[idx2] ** alpha + beta) -
ln_sf[idx2] * np.log(beta)) /
C_SITE["LnAmax1D{:g}".format(i)]) - beta) **\
(1.0 / alpha)
smr[idx_i] = sreff[idx_i] * (snc[idx_i] / sreffc[idx_i]) *\
f_sr[idx_i]
# For the cases when site class = i and SMR != 0
idx2 = np.logical_and(idx_i, np.fabs(smr) > 0.0)
if np.any(idx2):
sa_soil[idx2] += (-C_SITE["LnAmax1D{:g}".format(i)] *
(np.log(smr[idx2] ** alpha + beta) -
np.log(beta)) /
(np.log(sreffc[idx2] ** alpha + beta) -
np.log(beta)))
return sa_soil | [
"def",
"add_site_amplification",
"(",
"self",
",",
"C",
",",
"C_SITE",
",",
"sites",
",",
"sa_rock",
",",
"idx",
",",
"rup",
")",
":",
"n_sites",
"=",
"sites",
".",
"vs30",
".",
"shape",
"# Convert from reference rock to hard rock",
"hard_rock_sa",
"=",
"sa_ro... | Applies the site amplification scaling defined in equations from 10
to 15 | [
"Applies",
"the",
"site",
"amplification",
"scaling",
"defined",
"in",
"equations",
"from",
"10",
"to",
"15"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L154-L213 | train | 213,925 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016Asc._get_smr_coeffs | def _get_smr_coeffs(self, C, C_SITE, idx, n_sites, sa_rock):
"""
Returns the SReff and SReffC terms needed for equation 14 and 15
"""
# Get SR
sreff = np.zeros(n_sites)
sreffc = np.zeros(n_sites)
f_sr = np.zeros(n_sites)
for i in range(1, 5):
sreff[idx[i]] += (np.exp(sa_rock[idx[i]]) * self.IMF[i])
sreffc[idx[i]] += (C_SITE["Src1D{:g}".format(i)] * self.IMF[i])
# Get f_SR
f_sr[idx[i]] += C_SITE["fsr{:g}".format(i)]
return sreff, sreffc, f_sr | python | def _get_smr_coeffs(self, C, C_SITE, idx, n_sites, sa_rock):
"""
Returns the SReff and SReffC terms needed for equation 14 and 15
"""
# Get SR
sreff = np.zeros(n_sites)
sreffc = np.zeros(n_sites)
f_sr = np.zeros(n_sites)
for i in range(1, 5):
sreff[idx[i]] += (np.exp(sa_rock[idx[i]]) * self.IMF[i])
sreffc[idx[i]] += (C_SITE["Src1D{:g}".format(i)] * self.IMF[i])
# Get f_SR
f_sr[idx[i]] += C_SITE["fsr{:g}".format(i)]
return sreff, sreffc, f_sr | [
"def",
"_get_smr_coeffs",
"(",
"self",
",",
"C",
",",
"C_SITE",
",",
"idx",
",",
"n_sites",
",",
"sa_rock",
")",
":",
"# Get SR",
"sreff",
"=",
"np",
".",
"zeros",
"(",
"n_sites",
")",
"sreffc",
"=",
"np",
".",
"zeros",
"(",
"n_sites",
")",
"f_sr",
... | Returns the SReff and SReffC terms needed for equation 14 and 15 | [
"Returns",
"the",
"SReff",
"and",
"SReffC",
"terms",
"needed",
"for",
"equation",
"14",
"and",
"15"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L215-L228 | train | 213,926 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016Asc._get_ln_a_n_max | def _get_ln_a_n_max(self, C, n_sites, idx, rup):
"""
Defines the rock site amplification defined in equations 10a and 10b
"""
ln_a_n_max = C["lnSC1AM"] * np.ones(n_sites)
for i in [2, 3, 4]:
if np.any(idx[i]):
ln_a_n_max[idx[i]] += C["S{:g}".format(i)]
return ln_a_n_max | python | def _get_ln_a_n_max(self, C, n_sites, idx, rup):
"""
Defines the rock site amplification defined in equations 10a and 10b
"""
ln_a_n_max = C["lnSC1AM"] * np.ones(n_sites)
for i in [2, 3, 4]:
if np.any(idx[i]):
ln_a_n_max[idx[i]] += C["S{:g}".format(i)]
return ln_a_n_max | [
"def",
"_get_ln_a_n_max",
"(",
"self",
",",
"C",
",",
"n_sites",
",",
"idx",
",",
"rup",
")",
":",
"ln_a_n_max",
"=",
"C",
"[",
"\"lnSC1AM\"",
"]",
"*",
"np",
".",
"ones",
"(",
"n_sites",
")",
"for",
"i",
"in",
"[",
"2",
",",
"3",
",",
"4",
"]"... | Defines the rock site amplification defined in equations 10a and 10b | [
"Defines",
"the",
"rock",
"site",
"amplification",
"defined",
"in",
"equations",
"10a",
"and",
"10b"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L230-L238 | train | 213,927 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016Asc._get_ln_sf | def _get_ln_sf(self, C, C_SITE, idx, n_sites, rup):
"""
Returns the log SF term required for equation 12
"""
ln_sf = np.zeros(n_sites)
for i in range(1, 5):
ln_sf_i = (C["lnSC1AM"] - C_SITE["LnAmax1D{:g}".format(i)])
if i > 1:
ln_sf_i += C["S{:g}".format(i)]
ln_sf[idx[i]] += ln_sf_i
return ln_sf | python | def _get_ln_sf(self, C, C_SITE, idx, n_sites, rup):
"""
Returns the log SF term required for equation 12
"""
ln_sf = np.zeros(n_sites)
for i in range(1, 5):
ln_sf_i = (C["lnSC1AM"] - C_SITE["LnAmax1D{:g}".format(i)])
if i > 1:
ln_sf_i += C["S{:g}".format(i)]
ln_sf[idx[i]] += ln_sf_i
return ln_sf | [
"def",
"_get_ln_sf",
"(",
"self",
",",
"C",
",",
"C_SITE",
",",
"idx",
",",
"n_sites",
",",
"rup",
")",
":",
"ln_sf",
"=",
"np",
".",
"zeros",
"(",
"n_sites",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"5",
")",
":",
"ln_sf_i",
"=",
"(",
... | Returns the log SF term required for equation 12 | [
"Returns",
"the",
"log",
"SF",
"term",
"required",
"for",
"equation",
"12"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L240-L250 | train | 213,928 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016Asc._get_site_classification | def _get_site_classification(self, vs30):
"""
Define the site class categories based on Vs30. Returns a
vector of site class values and a dictionary containing logical
vectors for each of the site classes
"""
site_class = np.ones(vs30.shape, dtype=int)
idx = {}
idx[1] = vs30 > 600.
idx[2] = np.logical_and(vs30 > 300., vs30 <= 600.)
idx[3] = np.logical_and(vs30 > 200., vs30 <= 300.)
idx[4] = vs30 <= 200.
for i in [2, 3, 4]:
site_class[idx[i]] = i
return site_class, idx | python | def _get_site_classification(self, vs30):
"""
Define the site class categories based on Vs30. Returns a
vector of site class values and a dictionary containing logical
vectors for each of the site classes
"""
site_class = np.ones(vs30.shape, dtype=int)
idx = {}
idx[1] = vs30 > 600.
idx[2] = np.logical_and(vs30 > 300., vs30 <= 600.)
idx[3] = np.logical_and(vs30 > 200., vs30 <= 300.)
idx[4] = vs30 <= 200.
for i in [2, 3, 4]:
site_class[idx[i]] = i
return site_class, idx | [
"def",
"_get_site_classification",
"(",
"self",
",",
"vs30",
")",
":",
"site_class",
"=",
"np",
".",
"ones",
"(",
"vs30",
".",
"shape",
",",
"dtype",
"=",
"int",
")",
"idx",
"=",
"{",
"}",
"idx",
"[",
"1",
"]",
"=",
"vs30",
">",
"600.",
"idx",
"[... | Define the site class categories based on Vs30. Returns a
vector of site class values and a dictionary containing logical
vectors for each of the site classes | [
"Define",
"the",
"site",
"class",
"categories",
"based",
"on",
"Vs30",
".",
"Returns",
"a",
"vector",
"of",
"site",
"class",
"values",
"and",
"a",
"dictionary",
"containing",
"logical",
"vectors",
"for",
"each",
"of",
"the",
"site",
"classes"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L252-L266 | train | 213,929 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016UpperMantle.get_sof_term | def get_sof_term(self, C, rup):
"""
In the case of the upper mantle events separate coefficients
are considered for normal, reverse and strike-slip
"""
if rup.rake <= -45.0 and rup.rake >= -135.0:
# Normal faulting
return C["FN_UM"]
elif rup.rake > 45.0 and rup.rake < 135.0:
# Reverse faulting
return C["FRV_UM"]
else:
# No adjustment for strike-slip faulting
return 0.0 | python | def get_sof_term(self, C, rup):
"""
In the case of the upper mantle events separate coefficients
are considered for normal, reverse and strike-slip
"""
if rup.rake <= -45.0 and rup.rake >= -135.0:
# Normal faulting
return C["FN_UM"]
elif rup.rake > 45.0 and rup.rake < 135.0:
# Reverse faulting
return C["FRV_UM"]
else:
# No adjustment for strike-slip faulting
return 0.0 | [
"def",
"get_sof_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"if",
"rup",
".",
"rake",
"<=",
"-",
"45.0",
"and",
"rup",
".",
"rake",
">=",
"-",
"135.0",
":",
"# Normal faulting",
"return",
"C",
"[",
"\"FN_UM\"",
"]",
"elif",
"rup",
".",
"rak... | In the case of the upper mantle events separate coefficients
are considered for normal, reverse and strike-slip | [
"In",
"the",
"case",
"of",
"the",
"upper",
"mantle",
"events",
"separate",
"coefficients",
"are",
"considered",
"for",
"normal",
"reverse",
"and",
"strike",
"-",
"slip"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L421-L434 | train | 213,930 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016UpperMantle.get_distance_term | def get_distance_term(self, C, dists, rup):
"""
Returns the distance attenuation term
"""
x_ij = dists.rrup
gn_exp = np.exp(C["c1"] + 6.5 * C["c2"])
g_n = C["gcrN"] * np.log(self.CONSTANTS["xcro"] + 30. + gn_exp) *\
np.ones_like(x_ij)
idx = x_ij <= 30.0
if np.any(idx):
g_n[idx] = C["gcrN"] * np.log(self.CONSTANTS["xcro"] +
x_ij[idx] + gn_exp)
c_m = min(rup.mag, self.CONSTANTS["m_c"])
r_ij = self.CONSTANTS["xcro"] + x_ij + np.exp(C["c1"] + C["c2"] * c_m)
return C["gUM"] * np.log(r_ij) +\
C["gcrL"] * np.log(x_ij + 200.0) +\
g_n + C["eum"] * x_ij + C["ecrV"] * dists.rvolc + C["gamma_S"] | python | def get_distance_term(self, C, dists, rup):
"""
Returns the distance attenuation term
"""
x_ij = dists.rrup
gn_exp = np.exp(C["c1"] + 6.5 * C["c2"])
g_n = C["gcrN"] * np.log(self.CONSTANTS["xcro"] + 30. + gn_exp) *\
np.ones_like(x_ij)
idx = x_ij <= 30.0
if np.any(idx):
g_n[idx] = C["gcrN"] * np.log(self.CONSTANTS["xcro"] +
x_ij[idx] + gn_exp)
c_m = min(rup.mag, self.CONSTANTS["m_c"])
r_ij = self.CONSTANTS["xcro"] + x_ij + np.exp(C["c1"] + C["c2"] * c_m)
return C["gUM"] * np.log(r_ij) +\
C["gcrL"] * np.log(x_ij + 200.0) +\
g_n + C["eum"] * x_ij + C["ecrV"] * dists.rvolc + C["gamma_S"] | [
"def",
"get_distance_term",
"(",
"self",
",",
"C",
",",
"dists",
",",
"rup",
")",
":",
"x_ij",
"=",
"dists",
".",
"rrup",
"gn_exp",
"=",
"np",
".",
"exp",
"(",
"C",
"[",
"\"c1\"",
"]",
"+",
"6.5",
"*",
"C",
"[",
"\"c2\"",
"]",
")",
"g_n",
"=",
... | Returns the distance attenuation term | [
"Returns",
"the",
"distance",
"attenuation",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L442-L458 | train | 213,931 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016SInter.get_magnitude_scaling_term | def get_magnitude_scaling_term(self, C, rup):
"""
Returns magnitude scaling term, which is dependent on top of rupture
depth - as described in equations 1 and 2
"""
if rup.ztor > 25.0:
# Deep interface events
c_int = C["cint"]
else:
c_int = C["cintS"]
if rup.mag <= self.CONSTANTS["m_c"]:
return c_int * rup.mag
else:
return (c_int * self.CONSTANTS["m_c"]) +\
(C["dint"] * (rup.mag - self.CONSTANTS["m_c"])) | python | def get_magnitude_scaling_term(self, C, rup):
"""
Returns magnitude scaling term, which is dependent on top of rupture
depth - as described in equations 1 and 2
"""
if rup.ztor > 25.0:
# Deep interface events
c_int = C["cint"]
else:
c_int = C["cintS"]
if rup.mag <= self.CONSTANTS["m_c"]:
return c_int * rup.mag
else:
return (c_int * self.CONSTANTS["m_c"]) +\
(C["dint"] * (rup.mag - self.CONSTANTS["m_c"])) | [
"def",
"get_magnitude_scaling_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"if",
"rup",
".",
"ztor",
">",
"25.0",
":",
"# Deep interface events",
"c_int",
"=",
"C",
"[",
"\"cint\"",
"]",
"else",
":",
"c_int",
"=",
"C",
"[",
"\"cintS\"",
"]",
"if... | Returns magnitude scaling term, which is dependent on top of rupture
depth - as described in equations 1 and 2 | [
"Returns",
"magnitude",
"scaling",
"term",
"which",
"is",
"dependent",
"on",
"top",
"of",
"rupture",
"depth",
"-",
"as",
"described",
"in",
"equations",
"1",
"and",
"2"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L549-L564 | train | 213,932 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016SSlab.get_magnitude_scaling_term | def get_magnitude_scaling_term(self, C, rup):
"""
Returns the magnitude scaling defined in equation 1
"""
m_c = self.CONSTANTS["m_c"]
if rup.mag <= m_c:
return C["cSL"] * rup.mag +\
C["cSL2"] * ((rup.mag - self.CONSTANTS["m_sc"]) ** 2.)
else:
return C["cSL"] * m_c +\
C["cSL2"] * ((m_c - self.CONSTANTS["m_sc"]) ** 2.) +\
C["dSL"] * (rup.mag - m_c) | python | def get_magnitude_scaling_term(self, C, rup):
"""
Returns the magnitude scaling defined in equation 1
"""
m_c = self.CONSTANTS["m_c"]
if rup.mag <= m_c:
return C["cSL"] * rup.mag +\
C["cSL2"] * ((rup.mag - self.CONSTANTS["m_sc"]) ** 2.)
else:
return C["cSL"] * m_c +\
C["cSL2"] * ((m_c - self.CONSTANTS["m_sc"]) ** 2.) +\
C["dSL"] * (rup.mag - m_c) | [
"def",
"get_magnitude_scaling_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"m_c",
"=",
"self",
".",
"CONSTANTS",
"[",
"\"m_c\"",
"]",
"if",
"rup",
".",
"mag",
"<=",
"m_c",
":",
"return",
"C",
"[",
"\"cSL\"",
"]",
"*",
"rup",
".",
"mag",
"+",... | Returns the magnitude scaling defined in equation 1 | [
"Returns",
"the",
"magnitude",
"scaling",
"defined",
"in",
"equation",
"1"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L774-L785 | train | 213,933 |
gem/oq-engine | openquake/hazardlib/gsim/zhao_2016.py | ZhaoEtAl2016SSlab.get_distance_term | def get_distance_term(self, C, dists, rup):
"""
Returns the distance scaling term in equation 2a
Note that the paper describes a lower and upper cap on Rvolc that
is not found in the Fortran code, and is thus neglected here.
"""
x_ij = dists.rrup
# Get anelastic scaling term in quation 5
if rup.ztor >= 50.:
qslh = C["eSLH"] * (0.02 * rup.ztor - 1.0)
else:
qslh = 0.0
# r_volc = np.copy(dists.rvolc)
# r_volc[np.logical_and(r_volc > 0.0, r_volc <= 12.0)] = 12.0
# r_volc[r_volc >= 80.0] = 80.0
# Get r_ij - distance for geometric spreading (equations 3 and 4)
c_m = min(rup.mag, self.CONSTANTS["m_c"])
r_ij = x_ij + np.exp(C["alpha"] + C["beta"] * c_m)
return C["gSL"] * np.log(r_ij) + \
C["gLL"] * np.log(x_ij + 200.) +\
C["eSL"] * x_ij + qslh * x_ij +\
C["eSLV"] * dists.rvolc + C["gamma"] | python | def get_distance_term(self, C, dists, rup):
"""
Returns the distance scaling term in equation 2a
Note that the paper describes a lower and upper cap on Rvolc that
is not found in the Fortran code, and is thus neglected here.
"""
x_ij = dists.rrup
# Get anelastic scaling term in quation 5
if rup.ztor >= 50.:
qslh = C["eSLH"] * (0.02 * rup.ztor - 1.0)
else:
qslh = 0.0
# r_volc = np.copy(dists.rvolc)
# r_volc[np.logical_and(r_volc > 0.0, r_volc <= 12.0)] = 12.0
# r_volc[r_volc >= 80.0] = 80.0
# Get r_ij - distance for geometric spreading (equations 3 and 4)
c_m = min(rup.mag, self.CONSTANTS["m_c"])
r_ij = x_ij + np.exp(C["alpha"] + C["beta"] * c_m)
return C["gSL"] * np.log(r_ij) + \
C["gLL"] * np.log(x_ij + 200.) +\
C["eSL"] * x_ij + qslh * x_ij +\
C["eSLV"] * dists.rvolc + C["gamma"] | [
"def",
"get_distance_term",
"(",
"self",
",",
"C",
",",
"dists",
",",
"rup",
")",
":",
"x_ij",
"=",
"dists",
".",
"rrup",
"# Get anelastic scaling term in quation 5",
"if",
"rup",
".",
"ztor",
">=",
"50.",
":",
"qslh",
"=",
"C",
"[",
"\"eSLH\"",
"]",
"*"... | Returns the distance scaling term in equation 2a
Note that the paper describes a lower and upper cap on Rvolc that
is not found in the Fortran code, and is thus neglected here. | [
"Returns",
"the",
"distance",
"scaling",
"term",
"in",
"equation",
"2a"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2016.py#L806-L828 | train | 213,934 |
gem/oq-engine | openquake/hazardlib/gsim/edwards_fah_2013a.py | EdwardsFah2013Alpine10Bars._get_stddevs | def _get_stddevs(self, C, stddev_types, num_sites, mag, c1_rrup,
log_phi_ss, mean_phi_ss):
"""
Return standard deviations
"""
phi_ss = _compute_phi_ss(C, mag, c1_rrup, log_phi_ss, mean_phi_ss)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(
C['tau'] * C['tau'] +
phi_ss * phi_ss) +
np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi_ss + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['tau'] + np.zeros(num_sites))
return stddevs | python | def _get_stddevs(self, C, stddev_types, num_sites, mag, c1_rrup,
log_phi_ss, mean_phi_ss):
"""
Return standard deviations
"""
phi_ss = _compute_phi_ss(C, mag, c1_rrup, log_phi_ss, mean_phi_ss)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(
C['tau'] * C['tau'] +
phi_ss * phi_ss) +
np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi_ss + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['tau'] + np.zeros(num_sites))
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"num_sites",
",",
"mag",
",",
"c1_rrup",
",",
"log_phi_ss",
",",
"mean_phi_ss",
")",
":",
"phi_ss",
"=",
"_compute_phi_ss",
"(",
"C",
",",
"mag",
",",
"c1_rrup",
",",
"log_phi_ss",
"... | Return standard deviations | [
"Return",
"standard",
"deviations"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/edwards_fah_2013a.py#L129-L151 | train | 213,935 |
gem/oq-engine | openquake/commonlib/util.py | max_rel_diff | def max_rel_diff(curve_ref, curve, min_value=0.01):
"""
Compute the maximum relative difference between two curves. Only values
greather or equal than the min_value are considered.
>>> curve_ref = [0.01, 0.02, 0.03, 0.05, 1.0]
>>> curve = [0.011, 0.021, 0.031, 0.051, 1.0]
>>> round(max_rel_diff(curve_ref, curve), 2)
0.1
"""
assert len(curve_ref) == len(curve), (len(curve_ref), len(curve))
assert len(curve), 'The curves are empty!'
max_diff = 0
for c1, c2 in zip(curve_ref, curve):
if c1 >= min_value:
max_diff = max(max_diff, abs(c1 - c2) / c1)
return max_diff | python | def max_rel_diff(curve_ref, curve, min_value=0.01):
"""
Compute the maximum relative difference between two curves. Only values
greather or equal than the min_value are considered.
>>> curve_ref = [0.01, 0.02, 0.03, 0.05, 1.0]
>>> curve = [0.011, 0.021, 0.031, 0.051, 1.0]
>>> round(max_rel_diff(curve_ref, curve), 2)
0.1
"""
assert len(curve_ref) == len(curve), (len(curve_ref), len(curve))
assert len(curve), 'The curves are empty!'
max_diff = 0
for c1, c2 in zip(curve_ref, curve):
if c1 >= min_value:
max_diff = max(max_diff, abs(c1 - c2) / c1)
return max_diff | [
"def",
"max_rel_diff",
"(",
"curve_ref",
",",
"curve",
",",
"min_value",
"=",
"0.01",
")",
":",
"assert",
"len",
"(",
"curve_ref",
")",
"==",
"len",
"(",
"curve",
")",
",",
"(",
"len",
"(",
"curve_ref",
")",
",",
"len",
"(",
"curve",
")",
")",
"ass... | Compute the maximum relative difference between two curves. Only values
greather or equal than the min_value are considered.
>>> curve_ref = [0.01, 0.02, 0.03, 0.05, 1.0]
>>> curve = [0.011, 0.021, 0.031, 0.051, 1.0]
>>> round(max_rel_diff(curve_ref, curve), 2)
0.1 | [
"Compute",
"the",
"maximum",
"relative",
"difference",
"between",
"two",
"curves",
".",
"Only",
"values",
"greather",
"or",
"equal",
"than",
"the",
"min_value",
"are",
"considered",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/util.py#L43-L59 | train | 213,936 |
gem/oq-engine | openquake/commonlib/util.py | rmsep | def rmsep(array_ref, array, min_value=0):
"""
Root Mean Square Error Percentage for two arrays.
:param array_ref: reference array
:param array: another array
:param min_value: compare only the elements larger than min_value
:returns: the relative distance between the arrays
>>> curve_ref = numpy.array([[0.01, 0.02, 0.03, 0.05],
... [0.01, 0.02, 0.04, 0.06]])
>>> curve = numpy.array([[0.011, 0.021, 0.031, 0.051],
... [0.012, 0.022, 0.032, 0.051]])
>>> str(round(rmsep(curve_ref, curve, .01), 5))
'0.11292'
"""
bigvalues = array_ref > min_value
reldiffsquare = (1. - array[bigvalues] / array_ref[bigvalues]) ** 2
return numpy.sqrt(reldiffsquare.mean()) | python | def rmsep(array_ref, array, min_value=0):
"""
Root Mean Square Error Percentage for two arrays.
:param array_ref: reference array
:param array: another array
:param min_value: compare only the elements larger than min_value
:returns: the relative distance between the arrays
>>> curve_ref = numpy.array([[0.01, 0.02, 0.03, 0.05],
... [0.01, 0.02, 0.04, 0.06]])
>>> curve = numpy.array([[0.011, 0.021, 0.031, 0.051],
... [0.012, 0.022, 0.032, 0.051]])
>>> str(round(rmsep(curve_ref, curve, .01), 5))
'0.11292'
"""
bigvalues = array_ref > min_value
reldiffsquare = (1. - array[bigvalues] / array_ref[bigvalues]) ** 2
return numpy.sqrt(reldiffsquare.mean()) | [
"def",
"rmsep",
"(",
"array_ref",
",",
"array",
",",
"min_value",
"=",
"0",
")",
":",
"bigvalues",
"=",
"array_ref",
">",
"min_value",
"reldiffsquare",
"=",
"(",
"1.",
"-",
"array",
"[",
"bigvalues",
"]",
"/",
"array_ref",
"[",
"bigvalues",
"]",
")",
"... | Root Mean Square Error Percentage for two arrays.
:param array_ref: reference array
:param array: another array
:param min_value: compare only the elements larger than min_value
:returns: the relative distance between the arrays
>>> curve_ref = numpy.array([[0.01, 0.02, 0.03, 0.05],
... [0.01, 0.02, 0.04, 0.06]])
>>> curve = numpy.array([[0.011, 0.021, 0.031, 0.051],
... [0.012, 0.022, 0.032, 0.051]])
>>> str(round(rmsep(curve_ref, curve, .01), 5))
'0.11292' | [
"Root",
"Mean",
"Square",
"Error",
"Percentage",
"for",
"two",
"arrays",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/util.py#L82-L100 | train | 213,937 |
gem/oq-engine | openquake/commonlib/util.py | log | def log(array, cutoff):
"""
Compute the logarithm of an array with a cutoff on the small values
"""
arr = numpy.copy(array)
arr[arr < cutoff] = cutoff
return numpy.log(arr) | python | def log(array, cutoff):
"""
Compute the logarithm of an array with a cutoff on the small values
"""
arr = numpy.copy(array)
arr[arr < cutoff] = cutoff
return numpy.log(arr) | [
"def",
"log",
"(",
"array",
",",
"cutoff",
")",
":",
"arr",
"=",
"numpy",
".",
"copy",
"(",
"array",
")",
"arr",
"[",
"arr",
"<",
"cutoff",
"]",
"=",
"cutoff",
"return",
"numpy",
".",
"log",
"(",
"arr",
")"
] | Compute the logarithm of an array with a cutoff on the small values | [
"Compute",
"the",
"logarithm",
"of",
"an",
"array",
"with",
"a",
"cutoff",
"on",
"the",
"small",
"values"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/util.py#L103-L109 | train | 213,938 |
gem/oq-engine | openquake/commonlib/util.py | compose_arrays | def compose_arrays(a1, a2, firstfield='etag'):
"""
Compose composite arrays by generating an extended datatype containing
all the fields. The two arrays must have the same length.
"""
assert len(a1) == len(a2), (len(a1), len(a2))
if a1.dtype.names is None and len(a1.shape) == 1:
# the first array is not composite, but it is one-dimensional
a1 = numpy.array(a1, numpy.dtype([(firstfield, a1.dtype)]))
fields1 = [(f, a1.dtype.fields[f][0]) for f in a1.dtype.names]
if a2.dtype.names is None: # the second array is not composite
assert len(a2.shape) == 2, a2.shape
width = a2.shape[1]
fields2 = [('value%d' % i, a2.dtype) for i in range(width)]
composite = numpy.zeros(a1.shape, numpy.dtype(fields1 + fields2))
for f1 in dict(fields1):
composite[f1] = a1[f1]
for i in range(width):
composite['value%d' % i] = a2[:, i]
return composite
fields2 = [(f, a2.dtype.fields[f][0]) for f in a2.dtype.names]
composite = numpy.zeros(a1.shape, numpy.dtype(fields1 + fields2))
for f1 in dict(fields1):
composite[f1] = a1[f1]
for f2 in dict(fields2):
composite[f2] = a2[f2]
return composite | python | def compose_arrays(a1, a2, firstfield='etag'):
"""
Compose composite arrays by generating an extended datatype containing
all the fields. The two arrays must have the same length.
"""
assert len(a1) == len(a2), (len(a1), len(a2))
if a1.dtype.names is None and len(a1.shape) == 1:
# the first array is not composite, but it is one-dimensional
a1 = numpy.array(a1, numpy.dtype([(firstfield, a1.dtype)]))
fields1 = [(f, a1.dtype.fields[f][0]) for f in a1.dtype.names]
if a2.dtype.names is None: # the second array is not composite
assert len(a2.shape) == 2, a2.shape
width = a2.shape[1]
fields2 = [('value%d' % i, a2.dtype) for i in range(width)]
composite = numpy.zeros(a1.shape, numpy.dtype(fields1 + fields2))
for f1 in dict(fields1):
composite[f1] = a1[f1]
for i in range(width):
composite['value%d' % i] = a2[:, i]
return composite
fields2 = [(f, a2.dtype.fields[f][0]) for f in a2.dtype.names]
composite = numpy.zeros(a1.shape, numpy.dtype(fields1 + fields2))
for f1 in dict(fields1):
composite[f1] = a1[f1]
for f2 in dict(fields2):
composite[f2] = a2[f2]
return composite | [
"def",
"compose_arrays",
"(",
"a1",
",",
"a2",
",",
"firstfield",
"=",
"'etag'",
")",
":",
"assert",
"len",
"(",
"a1",
")",
"==",
"len",
"(",
"a2",
")",
",",
"(",
"len",
"(",
"a1",
")",
",",
"len",
"(",
"a2",
")",
")",
"if",
"a1",
".",
"dtype... | Compose composite arrays by generating an extended datatype containing
all the fields. The two arrays must have the same length. | [
"Compose",
"composite",
"arrays",
"by",
"generating",
"an",
"extended",
"datatype",
"containing",
"all",
"the",
"fields",
".",
"The",
"two",
"arrays",
"must",
"have",
"the",
"same",
"length",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/util.py#L128-L156 | train | 213,939 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | check_script | def check_script(upgrade, conn, dry_run=True, debug=True):
"""
An utility to debug upgrade scripts written in Python
:param upgrade: upgrade procedure
:param conn: a DB API 2 connection
:param dry_run: if True, do not change the database
:param debug: if True, print the queries which are executed
"""
conn = WrappedConnection(conn, debug=debug)
try:
upgrade(conn)
except Exception:
conn.rollback()
raise
else:
if dry_run:
conn.rollback()
else:
conn.commit() | python | def check_script(upgrade, conn, dry_run=True, debug=True):
"""
An utility to debug upgrade scripts written in Python
:param upgrade: upgrade procedure
:param conn: a DB API 2 connection
:param dry_run: if True, do not change the database
:param debug: if True, print the queries which are executed
"""
conn = WrappedConnection(conn, debug=debug)
try:
upgrade(conn)
except Exception:
conn.rollback()
raise
else:
if dry_run:
conn.rollback()
else:
conn.commit() | [
"def",
"check_script",
"(",
"upgrade",
",",
"conn",
",",
"dry_run",
"=",
"True",
",",
"debug",
"=",
"True",
")",
":",
"conn",
"=",
"WrappedConnection",
"(",
"conn",
",",
"debug",
"=",
"debug",
")",
"try",
":",
"upgrade",
"(",
"conn",
")",
"except",
"... | An utility to debug upgrade scripts written in Python
:param upgrade: upgrade procedure
:param conn: a DB API 2 connection
:param dry_run: if True, do not change the database
:param debug: if True, print the queries which are executed | [
"An",
"utility",
"to",
"debug",
"upgrade",
"scripts",
"written",
"in",
"Python"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L87-L106 | train | 213,940 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | apply_sql_script | def apply_sql_script(conn, fname):
"""
Apply the given SQL script to the database
:param conn: a DB API 2 connection
:param fname: full path to the creation script
"""
sql = open(fname).read()
try:
# we cannot use conn.executescript which is non transactional
for query in sql.split('\n\n'):
conn.execute(query)
except Exception:
logging.error('Error executing %s' % fname)
raise | python | def apply_sql_script(conn, fname):
"""
Apply the given SQL script to the database
:param conn: a DB API 2 connection
:param fname: full path to the creation script
"""
sql = open(fname).read()
try:
# we cannot use conn.executescript which is non transactional
for query in sql.split('\n\n'):
conn.execute(query)
except Exception:
logging.error('Error executing %s' % fname)
raise | [
"def",
"apply_sql_script",
"(",
"conn",
",",
"fname",
")",
":",
"sql",
"=",
"open",
"(",
"fname",
")",
".",
"read",
"(",
")",
"try",
":",
"# we cannot use conn.executescript which is non transactional",
"for",
"query",
"in",
"sql",
".",
"split",
"(",
"'\\n\\n'... | Apply the given SQL script to the database
:param conn: a DB API 2 connection
:param fname: full path to the creation script | [
"Apply",
"the",
"given",
"SQL",
"script",
"to",
"the",
"database"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L109-L123 | train | 213,941 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | upgrade_db | def upgrade_db(conn, pkg_name='openquake.server.db.schema.upgrades',
skip_versions=()):
"""
Upgrade a database by running several scripts in a single transaction.
:param conn: a DB API 2 connection
:param str pkg_name: the name of the package with the upgrade scripts
:param list skip_versions: the versions to skip
:returns: the version numbers of the new scripts applied the database
"""
upgrader = UpgradeManager.instance(conn, pkg_name)
t0 = time.time()
# run the upgrade scripts
try:
versions_applied = upgrader.upgrade(conn, skip_versions)
except:
conn.rollback()
raise
else:
conn.commit()
dt = time.time() - t0
logging.info('Upgrade completed in %s seconds', dt)
return versions_applied | python | def upgrade_db(conn, pkg_name='openquake.server.db.schema.upgrades',
skip_versions=()):
"""
Upgrade a database by running several scripts in a single transaction.
:param conn: a DB API 2 connection
:param str pkg_name: the name of the package with the upgrade scripts
:param list skip_versions: the versions to skip
:returns: the version numbers of the new scripts applied the database
"""
upgrader = UpgradeManager.instance(conn, pkg_name)
t0 = time.time()
# run the upgrade scripts
try:
versions_applied = upgrader.upgrade(conn, skip_versions)
except:
conn.rollback()
raise
else:
conn.commit()
dt = time.time() - t0
logging.info('Upgrade completed in %s seconds', dt)
return versions_applied | [
"def",
"upgrade_db",
"(",
"conn",
",",
"pkg_name",
"=",
"'openquake.server.db.schema.upgrades'",
",",
"skip_versions",
"=",
"(",
")",
")",
":",
"upgrader",
"=",
"UpgradeManager",
".",
"instance",
"(",
"conn",
",",
"pkg_name",
")",
"t0",
"=",
"time",
".",
"ti... | Upgrade a database by running several scripts in a single transaction.
:param conn: a DB API 2 connection
:param str pkg_name: the name of the package with the upgrade scripts
:param list skip_versions: the versions to skip
:returns: the version numbers of the new scripts applied the database | [
"Upgrade",
"a",
"database",
"by",
"running",
"several",
"scripts",
"in",
"a",
"single",
"transaction",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L331-L353 | train | 213,942 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | WrappedConnection.run | def run(self, templ, *args):
"""
A simple utility to run SQL queries.
:param templ: a query or query template
:param args: the arguments (or the empty tuple)
:returns: the DB API 2 cursor used to run the query
"""
curs = self._conn.cursor()
query = curs.mogrify(templ, args)
if self.debug:
print(query)
curs.execute(query)
return curs | python | def run(self, templ, *args):
"""
A simple utility to run SQL queries.
:param templ: a query or query template
:param args: the arguments (or the empty tuple)
:returns: the DB API 2 cursor used to run the query
"""
curs = self._conn.cursor()
query = curs.mogrify(templ, args)
if self.debug:
print(query)
curs.execute(query)
return curs | [
"def",
"run",
"(",
"self",
",",
"templ",
",",
"*",
"args",
")",
":",
"curs",
"=",
"self",
".",
"_conn",
".",
"cursor",
"(",
")",
"query",
"=",
"curs",
".",
"mogrify",
"(",
"templ",
",",
"args",
")",
"if",
"self",
".",
"debug",
":",
"print",
"("... | A simple utility to run SQL queries.
:param templ: a query or query template
:param args: the arguments (or the empty tuple)
:returns: the DB API 2 cursor used to run the query | [
"A",
"simple",
"utility",
"to",
"run",
"SQL",
"queries",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L70-L83 | train | 213,943 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | UpgradeManager.install_versioning | def install_versioning(self, conn):
"""
Create the version table into an already populated database
and insert the base script.
:param conn: a DB API 2 connection
"""
logging.info('Creating the versioning table %s', self.version_table)
conn.executescript(CREATE_VERSIONING % self.version_table)
self._insert_script(self.read_scripts()[0], conn) | python | def install_versioning(self, conn):
"""
Create the version table into an already populated database
and insert the base script.
:param conn: a DB API 2 connection
"""
logging.info('Creating the versioning table %s', self.version_table)
conn.executescript(CREATE_VERSIONING % self.version_table)
self._insert_script(self.read_scripts()[0], conn) | [
"def",
"install_versioning",
"(",
"self",
",",
"conn",
")",
":",
"logging",
".",
"info",
"(",
"'Creating the versioning table %s'",
",",
"self",
".",
"version_table",
")",
"conn",
".",
"executescript",
"(",
"CREATE_VERSIONING",
"%",
"self",
".",
"version_table",
... | Create the version table into an already populated database
and insert the base script.
:param conn: a DB API 2 connection | [
"Create",
"the",
"version",
"table",
"into",
"an",
"already",
"populated",
"database",
"and",
"insert",
"the",
"base",
"script",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L163-L172 | train | 213,944 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | UpgradeManager.init | def init(self, conn):
"""
Create the version table and run the base script on an empty database.
:param conn: a DB API 2 connection
"""
base = self.read_scripts()[0]['fname']
logging.info('Creating the initial schema from %s', base)
apply_sql_script(conn, os.path.join(self.upgrade_dir, base))
self.install_versioning(conn) | python | def init(self, conn):
"""
Create the version table and run the base script on an empty database.
:param conn: a DB API 2 connection
"""
base = self.read_scripts()[0]['fname']
logging.info('Creating the initial schema from %s', base)
apply_sql_script(conn, os.path.join(self.upgrade_dir, base))
self.install_versioning(conn) | [
"def",
"init",
"(",
"self",
",",
"conn",
")",
":",
"base",
"=",
"self",
".",
"read_scripts",
"(",
")",
"[",
"0",
"]",
"[",
"'fname'",
"]",
"logging",
".",
"info",
"(",
"'Creating the initial schema from %s'",
",",
"base",
")",
"apply_sql_script",
"(",
"c... | Create the version table and run the base script on an empty database.
:param conn: a DB API 2 connection | [
"Create",
"the",
"version",
"table",
"and",
"run",
"the",
"base",
"script",
"on",
"an",
"empty",
"database",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L174-L183 | train | 213,945 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | UpgradeManager.upgrade | def upgrade(self, conn, skip_versions=()):
'''
Upgrade the database from the current version to the maximum
version in the upgrade scripts.
:param conn: a DBAPI 2 connection
:param skip_versions: the versions to skip
'''
db_versions = self.get_db_versions(conn)
self.starting_version = max(db_versions)
to_skip = sorted(db_versions | set(skip_versions))
scripts = self.read_scripts(None, None, to_skip)
if not scripts: # no new scripts to apply
return []
self.ending_version = max(s['version'] for s in scripts)
return self._upgrade(conn, scripts) | python | def upgrade(self, conn, skip_versions=()):
'''
Upgrade the database from the current version to the maximum
version in the upgrade scripts.
:param conn: a DBAPI 2 connection
:param skip_versions: the versions to skip
'''
db_versions = self.get_db_versions(conn)
self.starting_version = max(db_versions)
to_skip = sorted(db_versions | set(skip_versions))
scripts = self.read_scripts(None, None, to_skip)
if not scripts: # no new scripts to apply
return []
self.ending_version = max(s['version'] for s in scripts)
return self._upgrade(conn, scripts) | [
"def",
"upgrade",
"(",
"self",
",",
"conn",
",",
"skip_versions",
"=",
"(",
")",
")",
":",
"db_versions",
"=",
"self",
".",
"get_db_versions",
"(",
"conn",
")",
"self",
".",
"starting_version",
"=",
"max",
"(",
"db_versions",
")",
"to_skip",
"=",
"sorted... | Upgrade the database from the current version to the maximum
version in the upgrade scripts.
:param conn: a DBAPI 2 connection
:param skip_versions: the versions to skip | [
"Upgrade",
"the",
"database",
"from",
"the",
"current",
"version",
"to",
"the",
"maximum",
"version",
"in",
"the",
"upgrade",
"scripts",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L185-L200 | train | 213,946 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | UpgradeManager.get_db_versions | def get_db_versions(self, conn):
"""
Get all the versions stored in the database as a set.
:param conn: a DB API 2 connection
"""
curs = conn.cursor()
query = 'select version from {}'.format(self.version_table)
try:
curs.execute(query)
return set(version for version, in curs.fetchall())
except:
raise VersioningNotInstalled('Run oq engine --upgrade-db') | python | def get_db_versions(self, conn):
"""
Get all the versions stored in the database as a set.
:param conn: a DB API 2 connection
"""
curs = conn.cursor()
query = 'select version from {}'.format(self.version_table)
try:
curs.execute(query)
return set(version for version, in curs.fetchall())
except:
raise VersioningNotInstalled('Run oq engine --upgrade-db') | [
"def",
"get_db_versions",
"(",
"self",
",",
"conn",
")",
":",
"curs",
"=",
"conn",
".",
"cursor",
"(",
")",
"query",
"=",
"'select version from {}'",
".",
"format",
"(",
"self",
".",
"version_table",
")",
"try",
":",
"curs",
".",
"execute",
"(",
"query",... | Get all the versions stored in the database as a set.
:param conn: a DB API 2 connection | [
"Get",
"all",
"the",
"versions",
"stored",
"in",
"the",
"database",
"as",
"a",
"set",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L231-L243 | train | 213,947 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | UpgradeManager.read_scripts | def read_scripts(self, minversion=None, maxversion=None, skip_versions=()):
"""
Extract the upgrade scripts from a directory as a list of
dictionaries, ordered by version.
:param minversion: the minimum version to consider
:param maxversion: the maximum version to consider
:param skipversions: the versions to skip
"""
scripts = []
versions = {} # a script is unique per version
for scriptname in sorted(os.listdir(self.upgrade_dir)):
match = self.parse_script_name(scriptname)
if match:
version = match['version']
if version in skip_versions:
continue # do not collect scripts already applied
elif minversion and version <= minversion:
continue # do not collect versions too old
elif maxversion and version > maxversion:
continue # do not collect versions too new
try:
previousname = versions[version]
except KeyError: # no previous script with the same version
scripts.append(match)
versions[version] = scriptname
else:
raise DuplicatedVersion(
'Duplicated versions {%s,%s}' %
(scriptname, previousname))
return scripts | python | def read_scripts(self, minversion=None, maxversion=None, skip_versions=()):
"""
Extract the upgrade scripts from a directory as a list of
dictionaries, ordered by version.
:param minversion: the minimum version to consider
:param maxversion: the maximum version to consider
:param skipversions: the versions to skip
"""
scripts = []
versions = {} # a script is unique per version
for scriptname in sorted(os.listdir(self.upgrade_dir)):
match = self.parse_script_name(scriptname)
if match:
version = match['version']
if version in skip_versions:
continue # do not collect scripts already applied
elif minversion and version <= minversion:
continue # do not collect versions too old
elif maxversion and version > maxversion:
continue # do not collect versions too new
try:
previousname = versions[version]
except KeyError: # no previous script with the same version
scripts.append(match)
versions[version] = scriptname
else:
raise DuplicatedVersion(
'Duplicated versions {%s,%s}' %
(scriptname, previousname))
return scripts | [
"def",
"read_scripts",
"(",
"self",
",",
"minversion",
"=",
"None",
",",
"maxversion",
"=",
"None",
",",
"skip_versions",
"=",
"(",
")",
")",
":",
"scripts",
"=",
"[",
"]",
"versions",
"=",
"{",
"}",
"# a script is unique per version",
"for",
"scriptname",
... | Extract the upgrade scripts from a directory as a list of
dictionaries, ordered by version.
:param minversion: the minimum version to consider
:param maxversion: the maximum version to consider
:param skipversions: the versions to skip | [
"Extract",
"the",
"upgrade",
"scripts",
"from",
"a",
"directory",
"as",
"a",
"list",
"of",
"dictionaries",
"ordered",
"by",
"version",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L259-L289 | train | 213,948 |
gem/oq-engine | openquake/server/db/upgrade_manager.py | UpgradeManager.extract_upgrade_scripts | def extract_upgrade_scripts(self):
"""
Extract the OpenQuake upgrade scripts from the links in the GitHub page
"""
link_pattern = '>\s*{0}\s*<'.format(self.pattern[1:-1])
page = urllib.request.urlopen(self.upgrades_url).read()
for mo in re.finditer(link_pattern, page):
scriptname = mo.group(0)[1:-1].strip()
yield self.parse_script_name(scriptname) | python | def extract_upgrade_scripts(self):
"""
Extract the OpenQuake upgrade scripts from the links in the GitHub page
"""
link_pattern = '>\s*{0}\s*<'.format(self.pattern[1:-1])
page = urllib.request.urlopen(self.upgrades_url).read()
for mo in re.finditer(link_pattern, page):
scriptname = mo.group(0)[1:-1].strip()
yield self.parse_script_name(scriptname) | [
"def",
"extract_upgrade_scripts",
"(",
"self",
")",
":",
"link_pattern",
"=",
"'>\\s*{0}\\s*<'",
".",
"format",
"(",
"self",
".",
"pattern",
"[",
"1",
":",
"-",
"1",
"]",
")",
"page",
"=",
"urllib",
".",
"request",
".",
"urlopen",
"(",
"self",
".",
"up... | Extract the OpenQuake upgrade scripts from the links in the GitHub page | [
"Extract",
"the",
"OpenQuake",
"upgrade",
"scripts",
"from",
"the",
"links",
"in",
"the",
"GitHub",
"page"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/upgrade_manager.py#L291-L299 | train | 213,949 |
gem/oq-engine | openquake/hazardlib/geo/line.py | Line.average_azimuth | def average_azimuth(self):
"""
Calculate and return weighted average azimuth of all line's segments
in decimal degrees.
Uses formula from
http://en.wikipedia.org/wiki/Mean_of_circular_quantities
>>> from openquake.hazardlib.geo.point import Point as P
>>> '%.1f' % Line([P(0, 0), P(1e-5, 1e-5)]).average_azimuth()
'45.0'
>>> '%.1f' % Line([P(0, 0), P(0, 1e-5), P(1e-5, 1e-5)]).average_azimuth()
'45.0'
>>> line = Line([P(0, 0), P(-2e-5, 0), P(-2e-5, 1.154e-5)])
>>> '%.1f' % line.average_azimuth()
'300.0'
"""
if len(self.points) == 2:
return self.points[0].azimuth(self.points[1])
lons = numpy.array([point.longitude for point in self.points])
lats = numpy.array([point.latitude for point in self.points])
azimuths = geodetic.azimuth(lons[:-1], lats[:-1], lons[1:], lats[1:])
distances = geodetic.geodetic_distance(lons[:-1], lats[:-1],
lons[1:], lats[1:])
azimuths = numpy.radians(azimuths)
# convert polar coordinates to Cartesian ones and calculate
# the average coordinate of each component
avg_x = numpy.mean(distances * numpy.sin(azimuths))
avg_y = numpy.mean(distances * numpy.cos(azimuths))
# find the mean azimuth from that mean vector
azimuth = numpy.degrees(numpy.arctan2(avg_x, avg_y))
if azimuth < 0:
azimuth += 360
return azimuth | python | def average_azimuth(self):
"""
Calculate and return weighted average azimuth of all line's segments
in decimal degrees.
Uses formula from
http://en.wikipedia.org/wiki/Mean_of_circular_quantities
>>> from openquake.hazardlib.geo.point import Point as P
>>> '%.1f' % Line([P(0, 0), P(1e-5, 1e-5)]).average_azimuth()
'45.0'
>>> '%.1f' % Line([P(0, 0), P(0, 1e-5), P(1e-5, 1e-5)]).average_azimuth()
'45.0'
>>> line = Line([P(0, 0), P(-2e-5, 0), P(-2e-5, 1.154e-5)])
>>> '%.1f' % line.average_azimuth()
'300.0'
"""
if len(self.points) == 2:
return self.points[0].azimuth(self.points[1])
lons = numpy.array([point.longitude for point in self.points])
lats = numpy.array([point.latitude for point in self.points])
azimuths = geodetic.azimuth(lons[:-1], lats[:-1], lons[1:], lats[1:])
distances = geodetic.geodetic_distance(lons[:-1], lats[:-1],
lons[1:], lats[1:])
azimuths = numpy.radians(azimuths)
# convert polar coordinates to Cartesian ones and calculate
# the average coordinate of each component
avg_x = numpy.mean(distances * numpy.sin(azimuths))
avg_y = numpy.mean(distances * numpy.cos(azimuths))
# find the mean azimuth from that mean vector
azimuth = numpy.degrees(numpy.arctan2(avg_x, avg_y))
if azimuth < 0:
azimuth += 360
return azimuth | [
"def",
"average_azimuth",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"points",
")",
"==",
"2",
":",
"return",
"self",
".",
"points",
"[",
"0",
"]",
".",
"azimuth",
"(",
"self",
".",
"points",
"[",
"1",
"]",
")",
"lons",
"=",
"numpy",
... | Calculate and return weighted average azimuth of all line's segments
in decimal degrees.
Uses formula from
http://en.wikipedia.org/wiki/Mean_of_circular_quantities
>>> from openquake.hazardlib.geo.point import Point as P
>>> '%.1f' % Line([P(0, 0), P(1e-5, 1e-5)]).average_azimuth()
'45.0'
>>> '%.1f' % Line([P(0, 0), P(0, 1e-5), P(1e-5, 1e-5)]).average_azimuth()
'45.0'
>>> line = Line([P(0, 0), P(-2e-5, 0), P(-2e-5, 1.154e-5)])
>>> '%.1f' % line.average_azimuth()
'300.0' | [
"Calculate",
"and",
"return",
"weighted",
"average",
"azimuth",
"of",
"all",
"line",
"s",
"segments",
"in",
"decimal",
"degrees",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/line.py#L93-L126 | train | 213,950 |
gem/oq-engine | openquake/hazardlib/geo/line.py | Line.resample | def resample(self, section_length):
"""
Resample this line into sections.
The first point in the resampled line corresponds
to the first point in the original line.
Starting from the first point in the original line, a line
segment is defined as the line connecting the last point in the
resampled line and the next point in the original line.
The line segment is then split into sections of length equal to
``section_length``. The resampled line is obtained
by concatenating all sections.
The number of sections in a line segment is calculated as follows:
``round(segment_length / section_length)``.
Note that the resulting line has a length that is an exact multiple of
``section_length``, therefore its length is in general smaller
or greater (depending on the rounding) than the length
of the original line.
For a straight line, the difference between the resulting length
and the original length is at maximum half of the ``section_length``.
For a curved line, the difference my be larger,
because of corners getting cut.
:param section_length:
The length of the section, in km.
:type section_length:
float
:returns:
A new line resampled into sections based on the given length.
:rtype:
An instance of :class:`Line`
"""
if len(self.points) < 2:
return Line(self.points)
resampled_points = []
# 1. Resample the first section. 2. Loop over the remaining points
# in the line and resample the remaining sections.
# 3. Extend the list with the resampled points, except the first one
# (because it's already contained in the previous set of
# resampled points).
resampled_points.extend(
self.points[0].equally_spaced_points(self.points[1],
section_length)
)
# Skip the first point, it's already resampled
for i in range(2, len(self.points)):
points = resampled_points[-1].equally_spaced_points(
self.points[i], section_length
)
resampled_points.extend(points[1:])
return Line(resampled_points) | python | def resample(self, section_length):
"""
Resample this line into sections.
The first point in the resampled line corresponds
to the first point in the original line.
Starting from the first point in the original line, a line
segment is defined as the line connecting the last point in the
resampled line and the next point in the original line.
The line segment is then split into sections of length equal to
``section_length``. The resampled line is obtained
by concatenating all sections.
The number of sections in a line segment is calculated as follows:
``round(segment_length / section_length)``.
Note that the resulting line has a length that is an exact multiple of
``section_length``, therefore its length is in general smaller
or greater (depending on the rounding) than the length
of the original line.
For a straight line, the difference between the resulting length
and the original length is at maximum half of the ``section_length``.
For a curved line, the difference my be larger,
because of corners getting cut.
:param section_length:
The length of the section, in km.
:type section_length:
float
:returns:
A new line resampled into sections based on the given length.
:rtype:
An instance of :class:`Line`
"""
if len(self.points) < 2:
return Line(self.points)
resampled_points = []
# 1. Resample the first section. 2. Loop over the remaining points
# in the line and resample the remaining sections.
# 3. Extend the list with the resampled points, except the first one
# (because it's already contained in the previous set of
# resampled points).
resampled_points.extend(
self.points[0].equally_spaced_points(self.points[1],
section_length)
)
# Skip the first point, it's already resampled
for i in range(2, len(self.points)):
points = resampled_points[-1].equally_spaced_points(
self.points[i], section_length
)
resampled_points.extend(points[1:])
return Line(resampled_points) | [
"def",
"resample",
"(",
"self",
",",
"section_length",
")",
":",
"if",
"len",
"(",
"self",
".",
"points",
")",
"<",
"2",
":",
"return",
"Line",
"(",
"self",
".",
"points",
")",
"resampled_points",
"=",
"[",
"]",
"# 1. Resample the first section. 2. Loop over... | Resample this line into sections.
The first point in the resampled line corresponds
to the first point in the original line.
Starting from the first point in the original line, a line
segment is defined as the line connecting the last point in the
resampled line and the next point in the original line.
The line segment is then split into sections of length equal to
``section_length``. The resampled line is obtained
by concatenating all sections.
The number of sections in a line segment is calculated as follows:
``round(segment_length / section_length)``.
Note that the resulting line has a length that is an exact multiple of
``section_length``, therefore its length is in general smaller
or greater (depending on the rounding) than the length
of the original line.
For a straight line, the difference between the resulting length
and the original length is at maximum half of the ``section_length``.
For a curved line, the difference my be larger,
because of corners getting cut.
:param section_length:
The length of the section, in km.
:type section_length:
float
:returns:
A new line resampled into sections based on the given length.
:rtype:
An instance of :class:`Line` | [
"Resample",
"this",
"line",
"into",
"sections",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/line.py#L128-L189 | train | 213,951 |
gem/oq-engine | openquake/hazardlib/geo/line.py | Line.get_length | def get_length(self):
"""
Calculate and return the length of the line as a sum of lengths
of all its segments.
:returns:
Total length in km.
"""
length = 0
for i, point in enumerate(self.points):
if i != 0:
length += point.distance(self.points[i - 1])
return length | python | def get_length(self):
"""
Calculate and return the length of the line as a sum of lengths
of all its segments.
:returns:
Total length in km.
"""
length = 0
for i, point in enumerate(self.points):
if i != 0:
length += point.distance(self.points[i - 1])
return length | [
"def",
"get_length",
"(",
"self",
")",
":",
"length",
"=",
"0",
"for",
"i",
",",
"point",
"in",
"enumerate",
"(",
"self",
".",
"points",
")",
":",
"if",
"i",
"!=",
"0",
":",
"length",
"+=",
"point",
".",
"distance",
"(",
"self",
".",
"points",
"[... | Calculate and return the length of the line as a sum of lengths
of all its segments.
:returns:
Total length in km. | [
"Calculate",
"and",
"return",
"the",
"length",
"of",
"the",
"line",
"as",
"a",
"sum",
"of",
"lengths",
"of",
"all",
"its",
"segments",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/line.py#L191-L203 | train | 213,952 |
gem/oq-engine | openquake/hazardlib/geo/line.py | Line.resample_to_num_points | def resample_to_num_points(self, num_points):
"""
Resample the line to a specified number of points.
:param num_points:
Integer number of points the resulting line should have.
:returns:
A new line with that many points as requested.
"""
assert len(self.points) > 1, "can not resample the line of one point"
section_length = self.get_length() / (num_points - 1)
resampled_points = [self.points[0]]
segment = 0
acc_length = 0
last_segment_length = 0
for i in range(num_points - 1):
tot_length = (i + 1) * section_length
while tot_length > acc_length and segment < len(self.points) - 1:
last_segment_length = self.points[segment].distance(
self.points[segment + 1]
)
acc_length += last_segment_length
segment += 1
p1, p2 = self.points[segment - 1:segment + 1]
offset = tot_length - (acc_length - last_segment_length)
if offset < 1e-5:
# forward geodetic transformations for very small distances
# are very inefficient (and also unneeded). if target point
# is just 1 cm away from original (non-resampled) line vertex,
# don't even bother doing geodetic calculations.
resampled = p1
else:
resampled = p1.equally_spaced_points(p2, offset)[1]
resampled_points.append(resampled)
return Line(resampled_points) | python | def resample_to_num_points(self, num_points):
"""
Resample the line to a specified number of points.
:param num_points:
Integer number of points the resulting line should have.
:returns:
A new line with that many points as requested.
"""
assert len(self.points) > 1, "can not resample the line of one point"
section_length = self.get_length() / (num_points - 1)
resampled_points = [self.points[0]]
segment = 0
acc_length = 0
last_segment_length = 0
for i in range(num_points - 1):
tot_length = (i + 1) * section_length
while tot_length > acc_length and segment < len(self.points) - 1:
last_segment_length = self.points[segment].distance(
self.points[segment + 1]
)
acc_length += last_segment_length
segment += 1
p1, p2 = self.points[segment - 1:segment + 1]
offset = tot_length - (acc_length - last_segment_length)
if offset < 1e-5:
# forward geodetic transformations for very small distances
# are very inefficient (and also unneeded). if target point
# is just 1 cm away from original (non-resampled) line vertex,
# don't even bother doing geodetic calculations.
resampled = p1
else:
resampled = p1.equally_spaced_points(p2, offset)[1]
resampled_points.append(resampled)
return Line(resampled_points) | [
"def",
"resample_to_num_points",
"(",
"self",
",",
"num_points",
")",
":",
"assert",
"len",
"(",
"self",
".",
"points",
")",
">",
"1",
",",
"\"can not resample the line of one point\"",
"section_length",
"=",
"self",
".",
"get_length",
"(",
")",
"/",
"(",
"num... | Resample the line to a specified number of points.
:param num_points:
Integer number of points the resulting line should have.
:returns:
A new line with that many points as requested. | [
"Resample",
"the",
"line",
"to",
"a",
"specified",
"number",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/line.py#L205-L243 | train | 213,953 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | cena_tau | def cena_tau(imt, mag, params):
"""
Returns the inter-event standard deviation, tau, for the CENA case
"""
if imt.name == "PGV":
C = params["PGV"]
else:
C = params["SA"]
if mag > 6.5:
return C["tau3"]
elif (mag > 5.5) and (mag <= 6.5):
return ITPL(mag, C["tau3"], C["tau2"], 5.5, 1.0)
elif (mag > 5.0) and (mag <= 5.5):
return ITPL(mag, C["tau2"], C["tau1"], 5.0, 0.5)
else:
return C["tau1"] | python | def cena_tau(imt, mag, params):
"""
Returns the inter-event standard deviation, tau, for the CENA case
"""
if imt.name == "PGV":
C = params["PGV"]
else:
C = params["SA"]
if mag > 6.5:
return C["tau3"]
elif (mag > 5.5) and (mag <= 6.5):
return ITPL(mag, C["tau3"], C["tau2"], 5.5, 1.0)
elif (mag > 5.0) and (mag <= 5.5):
return ITPL(mag, C["tau2"], C["tau1"], 5.0, 0.5)
else:
return C["tau1"] | [
"def",
"cena_tau",
"(",
"imt",
",",
"mag",
",",
"params",
")",
":",
"if",
"imt",
".",
"name",
"==",
"\"PGV\"",
":",
"C",
"=",
"params",
"[",
"\"PGV\"",
"]",
"else",
":",
"C",
"=",
"params",
"[",
"\"SA\"",
"]",
"if",
"mag",
">",
"6.5",
":",
"ret... | Returns the inter-event standard deviation, tau, for the CENA case | [
"Returns",
"the",
"inter",
"-",
"event",
"standard",
"deviation",
"tau",
"for",
"the",
"CENA",
"case"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L174-L189 | train | 213,954 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | get_tau_at_quantile | def get_tau_at_quantile(mean, stddev, quantile):
"""
Returns the value of tau at a given quantile in the form of a dictionary
organised by intensity measure
"""
tau_model = {}
for imt in mean:
tau_model[imt] = {}
for key in mean[imt]:
if quantile is None:
tau_model[imt][key] = mean[imt][key]
else:
tau_model[imt][key] = _at_percentile(mean[imt][key],
stddev[imt][key],
quantile)
return tau_model | python | def get_tau_at_quantile(mean, stddev, quantile):
"""
Returns the value of tau at a given quantile in the form of a dictionary
organised by intensity measure
"""
tau_model = {}
for imt in mean:
tau_model[imt] = {}
for key in mean[imt]:
if quantile is None:
tau_model[imt][key] = mean[imt][key]
else:
tau_model[imt][key] = _at_percentile(mean[imt][key],
stddev[imt][key],
quantile)
return tau_model | [
"def",
"get_tau_at_quantile",
"(",
"mean",
",",
"stddev",
",",
"quantile",
")",
":",
"tau_model",
"=",
"{",
"}",
"for",
"imt",
"in",
"mean",
":",
"tau_model",
"[",
"imt",
"]",
"=",
"{",
"}",
"for",
"key",
"in",
"mean",
"[",
"imt",
"]",
":",
"if",
... | Returns the value of tau at a given quantile in the form of a dictionary
organised by intensity measure | [
"Returns",
"the",
"value",
"of",
"tau",
"at",
"a",
"given",
"quantile",
"in",
"the",
"form",
"of",
"a",
"dictionary",
"organised",
"by",
"intensity",
"measure"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L192-L207 | train | 213,955 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | NGAEastBaseGMPE.get_stddevs | def get_stddevs(self, mag, imt, stddev_types, num_sites):
"""
Returns the standard deviations for either the ergodic or
non-ergodic models
"""
tau = self._get_tau(imt, mag)
phi = self._get_phi(imt, mag)
sigma = np.sqrt(tau ** 2. + phi ** 2.)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(sigma + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau + np.zeros(num_sites))
return stddevs | python | def get_stddevs(self, mag, imt, stddev_types, num_sites):
"""
Returns the standard deviations for either the ergodic or
non-ergodic models
"""
tau = self._get_tau(imt, mag)
phi = self._get_phi(imt, mag)
sigma = np.sqrt(tau ** 2. + phi ** 2.)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(sigma + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau + np.zeros(num_sites))
return stddevs | [
"def",
"get_stddevs",
"(",
"self",
",",
"mag",
",",
"imt",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"tau",
"=",
"self",
".",
"_get_tau",
"(",
"imt",
",",
"mag",
")",
"phi",
"=",
"self",
".",
"_get_phi",
"(",
"imt",
",",
"mag",
")",
"sigma",... | Returns the standard deviations for either the ergodic or
non-ergodic models | [
"Returns",
"the",
"standard",
"deviations",
"for",
"either",
"the",
"ergodic",
"or",
"non",
"-",
"ergodic",
"models"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L511-L528 | train | 213,956 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | NGAEastBaseGMPETotalSigma.get_stddevs | def get_stddevs(self, mag, imt, stddev_types, num_sites):
"""
Returns the total standard deviation
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
sigma = self._get_total_sigma(imt, mag)
stddevs.append(sigma + np.zeros(num_sites))
return stddevs | python | def get_stddevs(self, mag, imt, stddev_types, num_sites):
"""
Returns the total standard deviation
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
sigma = self._get_total_sigma(imt, mag)
stddevs.append(sigma + np.zeros(num_sites))
return stddevs | [
"def",
"get_stddevs",
"(",
"self",
",",
"mag",
",",
"imt",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_T... | Returns the total standard deviation | [
"Returns",
"the",
"total",
"standard",
"deviation"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L627-L637 | train | 213,957 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | NGAEastBaseGMPETotalSigma._get_sigma_at_quantile | def _get_sigma_at_quantile(self, sigma_quantile):
"""
Calculates the total standard deviation at the specified quantile
"""
# Mean mean is found in self.TAU. Get the variance in tau
tau_std = TAU_SETUP[self.tau_model]["STD"]
# Mean phiss is found in self.PHI_SS. Get the variance in phi
phi_std = deepcopy(self.PHI_SS.sa_coeffs)
phi_std.update(self.PHI_SS.non_sa_coeffs)
for key in phi_std:
phi_std[key] = {"a": PHI_SETUP[self.phi_model][key]["var_a"],
"b": PHI_SETUP[self.phi_model][key]["var_b"]}
if self.ergodic:
# IMT list should be taken from the PHI_S2SS_MODEL
imt_list = list(
PHI_S2SS_MODEL[self.phi_s2ss_model].non_sa_coeffs.keys())
imt_list += \
list(PHI_S2SS_MODEL[self.phi_s2ss_model].sa_coeffs.keys())
else:
imt_list = phi_std.keys()
phi_std = CoeffsTable(sa_damping=5, table=phi_std)
tau_bar, tau_std = self._get_tau_vector(self.TAU, tau_std, imt_list)
phi_bar, phi_std = self._get_phi_vector(self.PHI_SS, phi_std, imt_list)
sigma = {}
# Calculate the total standard deviation
for imt in imt_list:
sigma[imt] = {}
for i, key in enumerate(self.tau_keys):
# Calculates the expected standard deviation
sigma_bar = np.sqrt(tau_bar[imt][i] ** 2. +
phi_bar[imt][i] ** 2.)
# Calculated the variance in the standard deviation
sigma_std = np.sqrt(tau_std[imt][i] ** 2. +
phi_std[imt][i] ** 2.)
# The keys swap from tau to sigma
new_key = key.replace("tau", "sigma")
if sigma_quantile is not None:
sigma[imt][new_key] =\
_at_percentile(sigma_bar, sigma_std, sigma_quantile)
else:
sigma[imt][new_key] = sigma_bar
self.tau_keys[i] = new_key
self.SIGMA = CoeffsTable(sa_damping=5, table=sigma) | python | def _get_sigma_at_quantile(self, sigma_quantile):
"""
Calculates the total standard deviation at the specified quantile
"""
# Mean mean is found in self.TAU. Get the variance in tau
tau_std = TAU_SETUP[self.tau_model]["STD"]
# Mean phiss is found in self.PHI_SS. Get the variance in phi
phi_std = deepcopy(self.PHI_SS.sa_coeffs)
phi_std.update(self.PHI_SS.non_sa_coeffs)
for key in phi_std:
phi_std[key] = {"a": PHI_SETUP[self.phi_model][key]["var_a"],
"b": PHI_SETUP[self.phi_model][key]["var_b"]}
if self.ergodic:
# IMT list should be taken from the PHI_S2SS_MODEL
imt_list = list(
PHI_S2SS_MODEL[self.phi_s2ss_model].non_sa_coeffs.keys())
imt_list += \
list(PHI_S2SS_MODEL[self.phi_s2ss_model].sa_coeffs.keys())
else:
imt_list = phi_std.keys()
phi_std = CoeffsTable(sa_damping=5, table=phi_std)
tau_bar, tau_std = self._get_tau_vector(self.TAU, tau_std, imt_list)
phi_bar, phi_std = self._get_phi_vector(self.PHI_SS, phi_std, imt_list)
sigma = {}
# Calculate the total standard deviation
for imt in imt_list:
sigma[imt] = {}
for i, key in enumerate(self.tau_keys):
# Calculates the expected standard deviation
sigma_bar = np.sqrt(tau_bar[imt][i] ** 2. +
phi_bar[imt][i] ** 2.)
# Calculated the variance in the standard deviation
sigma_std = np.sqrt(tau_std[imt][i] ** 2. +
phi_std[imt][i] ** 2.)
# The keys swap from tau to sigma
new_key = key.replace("tau", "sigma")
if sigma_quantile is not None:
sigma[imt][new_key] =\
_at_percentile(sigma_bar, sigma_std, sigma_quantile)
else:
sigma[imt][new_key] = sigma_bar
self.tau_keys[i] = new_key
self.SIGMA = CoeffsTable(sa_damping=5, table=sigma) | [
"def",
"_get_sigma_at_quantile",
"(",
"self",
",",
"sigma_quantile",
")",
":",
"# Mean mean is found in self.TAU. Get the variance in tau",
"tau_std",
"=",
"TAU_SETUP",
"[",
"self",
".",
"tau_model",
"]",
"[",
"\"STD\"",
"]",
"# Mean phiss is found in self.PHI_SS. Get the var... | Calculates the total standard deviation at the specified quantile | [
"Calculates",
"the",
"total",
"standard",
"deviation",
"at",
"the",
"specified",
"quantile"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L639-L681 | train | 213,958 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | NGAEastBaseGMPETotalSigma._get_tau_vector | def _get_tau_vector(self, tau_mean, tau_std, imt_list):
"""
Gets the vector of mean and variance of tau values corresponding to
the specific model and returns them as dictionaries
"""
self.magnitude_limits = MAG_LIMS_KEYS[self.tau_model]["mag"]
self.tau_keys = MAG_LIMS_KEYS[self.tau_model]["keys"]
t_bar = {}
t_std = {}
for imt in imt_list:
t_bar[imt] = []
t_std[imt] = []
for mag, key in zip(self.magnitude_limits, self.tau_keys):
t_bar[imt].append(
TAU_EXECUTION[self.tau_model](imt, mag, tau_mean))
t_std[imt].append(
TAU_EXECUTION[self.tau_model](imt, mag, tau_std))
return t_bar, t_std | python | def _get_tau_vector(self, tau_mean, tau_std, imt_list):
"""
Gets the vector of mean and variance of tau values corresponding to
the specific model and returns them as dictionaries
"""
self.magnitude_limits = MAG_LIMS_KEYS[self.tau_model]["mag"]
self.tau_keys = MAG_LIMS_KEYS[self.tau_model]["keys"]
t_bar = {}
t_std = {}
for imt in imt_list:
t_bar[imt] = []
t_std[imt] = []
for mag, key in zip(self.magnitude_limits, self.tau_keys):
t_bar[imt].append(
TAU_EXECUTION[self.tau_model](imt, mag, tau_mean))
t_std[imt].append(
TAU_EXECUTION[self.tau_model](imt, mag, tau_std))
return t_bar, t_std | [
"def",
"_get_tau_vector",
"(",
"self",
",",
"tau_mean",
",",
"tau_std",
",",
"imt_list",
")",
":",
"self",
".",
"magnitude_limits",
"=",
"MAG_LIMS_KEYS",
"[",
"self",
".",
"tau_model",
"]",
"[",
"\"mag\"",
"]",
"self",
".",
"tau_keys",
"=",
"MAG_LIMS_KEYS",
... | Gets the vector of mean and variance of tau values corresponding to
the specific model and returns them as dictionaries | [
"Gets",
"the",
"vector",
"of",
"mean",
"and",
"variance",
"of",
"tau",
"values",
"corresponding",
"to",
"the",
"specific",
"model",
"and",
"returns",
"them",
"as",
"dictionaries"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L683-L700 | train | 213,959 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | NGAEastBaseGMPETotalSigma._get_phi_vector | def _get_phi_vector(self, phi_mean, phi_std, imt_list):
"""
Gets the vector of mean and variance of phi values corresponding to
the specific model and returns them as dictionaries
"""
p_bar = {}
p_std = {}
for imt in imt_list:
p_bar[imt] = []
p_std[imt] = []
for mag in self.magnitude_limits:
phi_ss_mean = get_phi_ss(imt, mag, phi_mean)
phi_ss_std = get_phi_ss(imt, mag, phi_std)
if self.ergodic:
# Add on the phi_s2ss term according to Eqs. 5.15 and 5.16
# of Al Atik (2015)
phi_ss_mean = np.sqrt(
phi_ss_mean ** 2. +
PHI_S2SS_MODEL[self.phi_s2ss_model][imt]["mean"] ** 2.
)
phi_ss_std = np.sqrt(
phi_ss_std ** 2. +
PHI_S2SS_MODEL[self.phi_s2ss_model][imt]["var"] ** 2.
)
p_bar[imt].append(phi_ss_mean)
p_std[imt].append(phi_ss_std)
return p_bar, p_std | python | def _get_phi_vector(self, phi_mean, phi_std, imt_list):
"""
Gets the vector of mean and variance of phi values corresponding to
the specific model and returns them as dictionaries
"""
p_bar = {}
p_std = {}
for imt in imt_list:
p_bar[imt] = []
p_std[imt] = []
for mag in self.magnitude_limits:
phi_ss_mean = get_phi_ss(imt, mag, phi_mean)
phi_ss_std = get_phi_ss(imt, mag, phi_std)
if self.ergodic:
# Add on the phi_s2ss term according to Eqs. 5.15 and 5.16
# of Al Atik (2015)
phi_ss_mean = np.sqrt(
phi_ss_mean ** 2. +
PHI_S2SS_MODEL[self.phi_s2ss_model][imt]["mean"] ** 2.
)
phi_ss_std = np.sqrt(
phi_ss_std ** 2. +
PHI_S2SS_MODEL[self.phi_s2ss_model][imt]["var"] ** 2.
)
p_bar[imt].append(phi_ss_mean)
p_std[imt].append(phi_ss_std)
return p_bar, p_std | [
"def",
"_get_phi_vector",
"(",
"self",
",",
"phi_mean",
",",
"phi_std",
",",
"imt_list",
")",
":",
"p_bar",
"=",
"{",
"}",
"p_std",
"=",
"{",
"}",
"for",
"imt",
"in",
"imt_list",
":",
"p_bar",
"[",
"imt",
"]",
"=",
"[",
"]",
"p_std",
"[",
"imt",
... | Gets the vector of mean and variance of phi values corresponding to
the specific model and returns them as dictionaries | [
"Gets",
"the",
"vector",
"of",
"mean",
"and",
"variance",
"of",
"phi",
"values",
"corresponding",
"to",
"the",
"specific",
"model",
"and",
"returns",
"them",
"as",
"dictionaries"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L702-L729 | train | 213,960 |
gem/oq-engine | openquake/hazardlib/gsim/nga_east.py | NGAEastBaseGMPETotalSigma._get_total_sigma | def _get_total_sigma(self, imt, mag):
"""
Returns the estimated total standard deviation for a given intensity
measure type and magnitude
"""
C = self.SIGMA[imt]
if mag <= self.magnitude_limits[0]:
# The CENA constant model is always returned here
return C[self.tau_keys[0]]
elif mag > self.magnitude_limits[-1]:
return C[self.tau_keys[-1]]
else:
# Needs interpolation
for i in range(len(self.tau_keys) - 1):
l_m = self.magnitude_limits[i]
u_m = self.magnitude_limits[i + 1]
if mag > l_m and mag <= u_m:
return ITPL(mag,
C[self.tau_keys[i + 1]],
C[self.tau_keys[i]],
l_m,
u_m - l_m) | python | def _get_total_sigma(self, imt, mag):
"""
Returns the estimated total standard deviation for a given intensity
measure type and magnitude
"""
C = self.SIGMA[imt]
if mag <= self.magnitude_limits[0]:
# The CENA constant model is always returned here
return C[self.tau_keys[0]]
elif mag > self.magnitude_limits[-1]:
return C[self.tau_keys[-1]]
else:
# Needs interpolation
for i in range(len(self.tau_keys) - 1):
l_m = self.magnitude_limits[i]
u_m = self.magnitude_limits[i + 1]
if mag > l_m and mag <= u_m:
return ITPL(mag,
C[self.tau_keys[i + 1]],
C[self.tau_keys[i]],
l_m,
u_m - l_m) | [
"def",
"_get_total_sigma",
"(",
"self",
",",
"imt",
",",
"mag",
")",
":",
"C",
"=",
"self",
".",
"SIGMA",
"[",
"imt",
"]",
"if",
"mag",
"<=",
"self",
".",
"magnitude_limits",
"[",
"0",
"]",
":",
"# The CENA constant model is always returned here",
"return",
... | Returns the estimated total standard deviation for a given intensity
measure type and magnitude | [
"Returns",
"the",
"estimated",
"total",
"standard",
"deviation",
"for",
"a",
"given",
"intensity",
"measure",
"type",
"and",
"magnitude"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nga_east.py#L731-L752 | train | 213,961 |
gem/oq-engine | openquake/commands/show.py | get_hcurves_and_means | def get_hcurves_and_means(dstore):
"""
Extract hcurves from the datastore and compute their means.
:returns: curves_by_rlz, mean_curves
"""
rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
getter = getters.PmapGetter(dstore, rlzs_assoc)
pmaps = getter.get_pmaps()
return dict(zip(getter.rlzs, pmaps)), dstore['hcurves/mean'] | python | def get_hcurves_and_means(dstore):
"""
Extract hcurves from the datastore and compute their means.
:returns: curves_by_rlz, mean_curves
"""
rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
getter = getters.PmapGetter(dstore, rlzs_assoc)
pmaps = getter.get_pmaps()
return dict(zip(getter.rlzs, pmaps)), dstore['hcurves/mean'] | [
"def",
"get_hcurves_and_means",
"(",
"dstore",
")",
":",
"rlzs_assoc",
"=",
"dstore",
"[",
"'csm_info'",
"]",
".",
"get_rlzs_assoc",
"(",
")",
"getter",
"=",
"getters",
".",
"PmapGetter",
"(",
"dstore",
",",
"rlzs_assoc",
")",
"pmaps",
"=",
"getter",
".",
... | Extract hcurves from the datastore and compute their means.
:returns: curves_by_rlz, mean_curves | [
"Extract",
"hcurves",
"from",
"the",
"datastore",
"and",
"compute",
"their",
"means",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/show.py#L33-L42 | train | 213,962 |
gem/oq-engine | openquake/hazardlib/gsim/lin_lee_2008.py | LinLee2008SInter._compute_mean | def _compute_mean(self, C, mag, rhypo, hypo_depth, mean, idx):
"""
Compute mean value according to equations 10 and 11 page 226.
"""
mean[idx] = (C['C1'] + C['C2'] * mag + C['C3'] * np.log(rhypo[idx] +
C['C4'] * np.exp(C['C5'] * mag)) + C['C6'] * hypo_depth) | python | def _compute_mean(self, C, mag, rhypo, hypo_depth, mean, idx):
"""
Compute mean value according to equations 10 and 11 page 226.
"""
mean[idx] = (C['C1'] + C['C2'] * mag + C['C3'] * np.log(rhypo[idx] +
C['C4'] * np.exp(C['C5'] * mag)) + C['C6'] * hypo_depth) | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rhypo",
",",
"hypo_depth",
",",
"mean",
",",
"idx",
")",
":",
"mean",
"[",
"idx",
"]",
"=",
"(",
"C",
"[",
"'C1'",
"]",
"+",
"C",
"[",
"'C2'",
"]",
"*",
"mag",
"+",
"C",
"[",
... | Compute mean value according to equations 10 and 11 page 226. | [
"Compute",
"mean",
"value",
"according",
"to",
"equations",
"10",
"and",
"11",
"page",
"226",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_lee_2008.py#L102-L107 | train | 213,963 |
gem/oq-engine | openquake/hazardlib/gsim/lin_lee_2008.py | LinLee2008SInter._compute_std | def _compute_std(self, C, stddevs, idx):
"""
Compute total standard deviation, see tables 3 and 4, pages 227 and
228.
"""
for stddev in stddevs:
stddev[idx] += C['sigma'] | python | def _compute_std(self, C, stddevs, idx):
"""
Compute total standard deviation, see tables 3 and 4, pages 227 and
228.
"""
for stddev in stddevs:
stddev[idx] += C['sigma'] | [
"def",
"_compute_std",
"(",
"self",
",",
"C",
",",
"stddevs",
",",
"idx",
")",
":",
"for",
"stddev",
"in",
"stddevs",
":",
"stddev",
"[",
"idx",
"]",
"+=",
"C",
"[",
"'sigma'",
"]"
] | Compute total standard deviation, see tables 3 and 4, pages 227 and
228. | [
"Compute",
"total",
"standard",
"deviation",
"see",
"tables",
"3",
"and",
"4",
"pages",
"227",
"and",
"228",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_lee_2008.py#L109-L115 | train | 213,964 |
gem/oq-engine | openquake/hmtk/seismicity/max_magnitude/kijko_nonparametric_gaussian.py | check_config | def check_config(config):
'''Check config file inputs and overwrite bad values with the defaults'''
essential_keys = ['number_earthquakes']
for key in essential_keys:
if key not in config:
raise ValueError('For Kijko Nonparametric Gaussian the key %s '
'needs to be set in the configuation' % key)
if config.get('tolerance', 0.0) <= 0.0:
config['tolerance'] = 0.05
if config.get('maximum_iterations', 0) < 1:
config['maximum_iterations'] = 100
if config.get('number_samples', 0) < 2:
config['number_samples'] = 51
return config | python | def check_config(config):
'''Check config file inputs and overwrite bad values with the defaults'''
essential_keys = ['number_earthquakes']
for key in essential_keys:
if key not in config:
raise ValueError('For Kijko Nonparametric Gaussian the key %s '
'needs to be set in the configuation' % key)
if config.get('tolerance', 0.0) <= 0.0:
config['tolerance'] = 0.05
if config.get('maximum_iterations', 0) < 1:
config['maximum_iterations'] = 100
if config.get('number_samples', 0) < 2:
config['number_samples'] = 51
return config | [
"def",
"check_config",
"(",
"config",
")",
":",
"essential_keys",
"=",
"[",
"'number_earthquakes'",
"]",
"for",
"key",
"in",
"essential_keys",
":",
"if",
"key",
"not",
"in",
"config",
":",
"raise",
"ValueError",
"(",
"'For Kijko Nonparametric Gaussian the key %s '",... | Check config file inputs and overwrite bad values with the defaults | [
"Check",
"config",
"file",
"inputs",
"and",
"overwrite",
"bad",
"values",
"with",
"the",
"defaults"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/max_magnitude/kijko_nonparametric_gaussian.py#L59-L77 | train | 213,965 |
gem/oq-engine | openquake/hmtk/seismicity/max_magnitude/kijko_nonparametric_gaussian.py | _get_exponential_spaced_values | def _get_exponential_spaced_values(mmin, mmax, number_samples):
'''
Function to return a set of exponentially spaced values between mmin and
mmax
:param float mmin:
Minimum value
:param float mmax:
Maximum value
:param float number_samples:
Number of exponentially spaced samples
:return np.ndarray:
Set of 'number_samples' exponentially spaced values
'''
lhs = np.exp(mmin) + np.arange(0., number_samples - 1., 1.) *\
((np.exp(mmax) - np.exp(mmin)) / (number_samples - 1.))
magval = np.hstack([lhs, np.exp(mmax)])
return np.log(magval) | python | def _get_exponential_spaced_values(mmin, mmax, number_samples):
'''
Function to return a set of exponentially spaced values between mmin and
mmax
:param float mmin:
Minimum value
:param float mmax:
Maximum value
:param float number_samples:
Number of exponentially spaced samples
:return np.ndarray:
Set of 'number_samples' exponentially spaced values
'''
lhs = np.exp(mmin) + np.arange(0., number_samples - 1., 1.) *\
((np.exp(mmax) - np.exp(mmin)) / (number_samples - 1.))
magval = np.hstack([lhs, np.exp(mmax)])
return np.log(magval) | [
"def",
"_get_exponential_spaced_values",
"(",
"mmin",
",",
"mmax",
",",
"number_samples",
")",
":",
"lhs",
"=",
"np",
".",
"exp",
"(",
"mmin",
")",
"+",
"np",
".",
"arange",
"(",
"0.",
",",
"number_samples",
"-",
"1.",
",",
"1.",
")",
"*",
"(",
"(",
... | Function to return a set of exponentially spaced values between mmin and
mmax
:param float mmin:
Minimum value
:param float mmax:
Maximum value
:param float number_samples:
Number of exponentially spaced samples
:return np.ndarray:
Set of 'number_samples' exponentially spaced values | [
"Function",
"to",
"return",
"a",
"set",
"of",
"exponentially",
"spaced",
"values",
"between",
"mmin",
"and",
"mmax"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/max_magnitude/kijko_nonparametric_gaussian.py#L80-L98 | train | 213,966 |
gem/oq-engine | openquake/commonlib/logs.py | dbcmd | def dbcmd(action, *args):
"""
A dispatcher to the database server.
:param action: database action to perform
:param args: arguments
"""
global sock
if sock is None:
sock = zeromq.Socket(
'tcp://%s:%s' % (config.dbserver.host, DBSERVER_PORT),
zeromq.zmq.REQ, 'connect').__enter__()
# the socket will be closed when the calculation ends
res = sock.send((action,) + args)
if isinstance(res, parallel.Result):
return res.get()
return res | python | def dbcmd(action, *args):
"""
A dispatcher to the database server.
:param action: database action to perform
:param args: arguments
"""
global sock
if sock is None:
sock = zeromq.Socket(
'tcp://%s:%s' % (config.dbserver.host, DBSERVER_PORT),
zeromq.zmq.REQ, 'connect').__enter__()
# the socket will be closed when the calculation ends
res = sock.send((action,) + args)
if isinstance(res, parallel.Result):
return res.get()
return res | [
"def",
"dbcmd",
"(",
"action",
",",
"*",
"args",
")",
":",
"global",
"sock",
"if",
"sock",
"is",
"None",
":",
"sock",
"=",
"zeromq",
".",
"Socket",
"(",
"'tcp://%s:%s'",
"%",
"(",
"config",
".",
"dbserver",
".",
"host",
",",
"DBSERVER_PORT",
")",
","... | A dispatcher to the database server.
:param action: database action to perform
:param args: arguments | [
"A",
"dispatcher",
"to",
"the",
"database",
"server",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/logs.py#L40-L56 | train | 213,967 |
gem/oq-engine | openquake/commonlib/logs.py | _update_log_record | def _update_log_record(self, record):
"""
Massage a log record before emitting it. Intended to be used by the
custom log handlers defined in this module.
"""
if not hasattr(record, 'hostname'):
record.hostname = '-'
if not hasattr(record, 'job_id'):
record.job_id = self.job_id | python | def _update_log_record(self, record):
"""
Massage a log record before emitting it. Intended to be used by the
custom log handlers defined in this module.
"""
if not hasattr(record, 'hostname'):
record.hostname = '-'
if not hasattr(record, 'job_id'):
record.job_id = self.job_id | [
"def",
"_update_log_record",
"(",
"self",
",",
"record",
")",
":",
"if",
"not",
"hasattr",
"(",
"record",
",",
"'hostname'",
")",
":",
"record",
".",
"hostname",
"=",
"'-'",
"if",
"not",
"hasattr",
"(",
"record",
",",
"'job_id'",
")",
":",
"record",
".... | Massage a log record before emitting it. Intended to be used by the
custom log handlers defined in this module. | [
"Massage",
"a",
"log",
"record",
"before",
"emitting",
"it",
".",
"Intended",
"to",
"be",
"used",
"by",
"the",
"custom",
"log",
"handlers",
"defined",
"in",
"this",
"module",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/logs.py#L68-L76 | train | 213,968 |
gem/oq-engine | openquake/commonlib/logs.py | handle | def handle(job_id, log_level='info', log_file=None):
"""
Context manager adding and removing log handlers.
:param job_id:
ID of the current job
:param log_level:
one of debug, info, warn, error, critical
:param log_file:
log file path (if None, logs on stdout only)
"""
handlers = [LogDatabaseHandler(job_id)] # log on db always
if log_file is None:
# add a StreamHandler if not already there
if not any(h for h in logging.root.handlers
if isinstance(h, logging.StreamHandler)):
handlers.append(LogStreamHandler(job_id))
else:
handlers.append(LogFileHandler(job_id, log_file))
for handler in handlers:
logging.root.addHandler(handler)
init(job_id, LEVELS.get(log_level, logging.WARNING))
try:
yield
finally:
# sanity check to make sure that the logging on file is working
if (log_file and log_file != os.devnull and
os.path.getsize(log_file) == 0):
logging.root.warn('The log file %s is empty!?' % log_file)
for handler in handlers:
logging.root.removeHandler(handler) | python | def handle(job_id, log_level='info', log_file=None):
"""
Context manager adding and removing log handlers.
:param job_id:
ID of the current job
:param log_level:
one of debug, info, warn, error, critical
:param log_file:
log file path (if None, logs on stdout only)
"""
handlers = [LogDatabaseHandler(job_id)] # log on db always
if log_file is None:
# add a StreamHandler if not already there
if not any(h for h in logging.root.handlers
if isinstance(h, logging.StreamHandler)):
handlers.append(LogStreamHandler(job_id))
else:
handlers.append(LogFileHandler(job_id, log_file))
for handler in handlers:
logging.root.addHandler(handler)
init(job_id, LEVELS.get(log_level, logging.WARNING))
try:
yield
finally:
# sanity check to make sure that the logging on file is working
if (log_file and log_file != os.devnull and
os.path.getsize(log_file) == 0):
logging.root.warn('The log file %s is empty!?' % log_file)
for handler in handlers:
logging.root.removeHandler(handler) | [
"def",
"handle",
"(",
"job_id",
",",
"log_level",
"=",
"'info'",
",",
"log_file",
"=",
"None",
")",
":",
"handlers",
"=",
"[",
"LogDatabaseHandler",
"(",
"job_id",
")",
"]",
"# log on db always",
"if",
"log_file",
"is",
"None",
":",
"# add a StreamHandler if n... | Context manager adding and removing log handlers.
:param job_id:
ID of the current job
:param log_level:
one of debug, info, warn, error, critical
:param log_file:
log file path (if None, logs on stdout only) | [
"Context",
"manager",
"adding",
"and",
"removing",
"log",
"handlers",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/logs.py#L122-L152 | train | 213,969 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994_qcss.py | WC1994_QCSS.get_median_area | def get_median_area(self, mag, rake):
"""
The values are a function of magnitude.
"""
# strike slip
length = 10.0 ** (-2.57 + 0.62 * mag)
seis_wid = 20.0
# estimate area based on length
if length < seis_wid:
return length ** 2.
else:
return length * seis_wid | python | def get_median_area(self, mag, rake):
"""
The values are a function of magnitude.
"""
# strike slip
length = 10.0 ** (-2.57 + 0.62 * mag)
seis_wid = 20.0
# estimate area based on length
if length < seis_wid:
return length ** 2.
else:
return length * seis_wid | [
"def",
"get_median_area",
"(",
"self",
",",
"mag",
",",
"rake",
")",
":",
"# strike slip",
"length",
"=",
"10.0",
"**",
"(",
"-",
"2.57",
"+",
"0.62",
"*",
"mag",
")",
"seis_wid",
"=",
"20.0",
"# estimate area based on length",
"if",
"length",
"<",
"seis_w... | The values are a function of magnitude. | [
"The",
"values",
"are",
"a",
"function",
"of",
"magnitude",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994_qcss.py#L43-L55 | train | 213,970 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | _construct_surface | def _construct_surface(lons, lats, upper_depth, lower_depth):
"""
Utility method that constructs and return a simple fault surface with top
edge specified by `lons` and `lats` and extending vertically from
`upper_depth` to `lower_depth`.
The underlying mesh is built by repeating the same coordinates
(`lons` and `lats`) at the two specified depth levels.
"""
depths = np.array([
np.zeros_like(lons) + upper_depth,
np.zeros_like(lats) + lower_depth
])
mesh = RectangularMesh(
np.tile(lons, (2, 1)), np.tile(lats, (2, 1)), depths
)
return SimpleFaultSurface(mesh) | python | def _construct_surface(lons, lats, upper_depth, lower_depth):
"""
Utility method that constructs and return a simple fault surface with top
edge specified by `lons` and `lats` and extending vertically from
`upper_depth` to `lower_depth`.
The underlying mesh is built by repeating the same coordinates
(`lons` and `lats`) at the two specified depth levels.
"""
depths = np.array([
np.zeros_like(lons) + upper_depth,
np.zeros_like(lats) + lower_depth
])
mesh = RectangularMesh(
np.tile(lons, (2, 1)), np.tile(lats, (2, 1)), depths
)
return SimpleFaultSurface(mesh) | [
"def",
"_construct_surface",
"(",
"lons",
",",
"lats",
",",
"upper_depth",
",",
"lower_depth",
")",
":",
"depths",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"zeros_like",
"(",
"lons",
")",
"+",
"upper_depth",
",",
"np",
".",
"zeros_like",
"(",
"lat... | Utility method that constructs and return a simple fault surface with top
edge specified by `lons` and `lats` and extending vertically from
`upper_depth` to `lower_depth`.
The underlying mesh is built by repeating the same coordinates
(`lons` and `lats`) at the two specified depth levels. | [
"Utility",
"method",
"that",
"constructs",
"and",
"return",
"a",
"simple",
"fault",
"surface",
"with",
"top",
"edge",
"specified",
"by",
"lons",
"and",
"lats",
"and",
"extending",
"vertically",
"from",
"upper_depth",
"to",
"lower_depth",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L57-L74 | train | 213,971 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | _get_min_distance_to_sub_trench | def _get_min_distance_to_sub_trench(lons, lats):
"""
Compute and return minimum distance between subduction trench
and points specified by 'lon' and 'lat'
The method creates an instance of
:class:`openquake.hazardlib.geo.SimpleFaultSurface` to model the subduction
trench. The surface is assumed vertical and extending from 0 to 10 km
depth.
The 10 km depth value is arbitrary given that distance calculation depend
only on top edge depth. The method calls then
:meth:`openquake.hazardlib.geo.base.BaseSurface.get_rx_distance`
and return its absolute value.
"""
trench = _construct_surface(SUB_TRENCH_LONS, SUB_TRENCH_LATS, 0., 10.)
sites = Mesh(lons, lats, None)
return np.abs(trench.get_rx_distance(sites)) | python | def _get_min_distance_to_sub_trench(lons, lats):
"""
Compute and return minimum distance between subduction trench
and points specified by 'lon' and 'lat'
The method creates an instance of
:class:`openquake.hazardlib.geo.SimpleFaultSurface` to model the subduction
trench. The surface is assumed vertical and extending from 0 to 10 km
depth.
The 10 km depth value is arbitrary given that distance calculation depend
only on top edge depth. The method calls then
:meth:`openquake.hazardlib.geo.base.BaseSurface.get_rx_distance`
and return its absolute value.
"""
trench = _construct_surface(SUB_TRENCH_LONS, SUB_TRENCH_LATS, 0., 10.)
sites = Mesh(lons, lats, None)
return np.abs(trench.get_rx_distance(sites)) | [
"def",
"_get_min_distance_to_sub_trench",
"(",
"lons",
",",
"lats",
")",
":",
"trench",
"=",
"_construct_surface",
"(",
"SUB_TRENCH_LONS",
",",
"SUB_TRENCH_LATS",
",",
"0.",
",",
"10.",
")",
"sites",
"=",
"Mesh",
"(",
"lons",
",",
"lats",
",",
"None",
")",
... | Compute and return minimum distance between subduction trench
and points specified by 'lon' and 'lat'
The method creates an instance of
:class:`openquake.hazardlib.geo.SimpleFaultSurface` to model the subduction
trench. The surface is assumed vertical and extending from 0 to 10 km
depth.
The 10 km depth value is arbitrary given that distance calculation depend
only on top edge depth. The method calls then
:meth:`openquake.hazardlib.geo.base.BaseSurface.get_rx_distance`
and return its absolute value. | [
"Compute",
"and",
"return",
"minimum",
"distance",
"between",
"subduction",
"trench",
"and",
"points",
"specified",
"by",
"lon",
"and",
"lat"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L77-L93 | train | 213,972 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | _get_min_distance_to_volcanic_front | def _get_min_distance_to_volcanic_front(lons, lats):
"""
Compute and return minimum distance between volcanic front and points
specified by 'lon' and 'lat'.
Distance is negative if point is located east of the volcanic front,
positive otherwise.
The method uses the same approach as :meth:`_get_min_distance_to_sub_trench`
but final distance is returned without taking the absolute value.
"""
vf = _construct_surface(VOLCANIC_FRONT_LONS, VOLCANIC_FRONT_LATS, 0., 10.)
sites = Mesh(lons, lats, None)
return vf.get_rx_distance(sites) | python | def _get_min_distance_to_volcanic_front(lons, lats):
"""
Compute and return minimum distance between volcanic front and points
specified by 'lon' and 'lat'.
Distance is negative if point is located east of the volcanic front,
positive otherwise.
The method uses the same approach as :meth:`_get_min_distance_to_sub_trench`
but final distance is returned without taking the absolute value.
"""
vf = _construct_surface(VOLCANIC_FRONT_LONS, VOLCANIC_FRONT_LATS, 0., 10.)
sites = Mesh(lons, lats, None)
return vf.get_rx_distance(sites) | [
"def",
"_get_min_distance_to_volcanic_front",
"(",
"lons",
",",
"lats",
")",
":",
"vf",
"=",
"_construct_surface",
"(",
"VOLCANIC_FRONT_LONS",
",",
"VOLCANIC_FRONT_LATS",
",",
"0.",
",",
"10.",
")",
"sites",
"=",
"Mesh",
"(",
"lons",
",",
"lats",
",",
"None",
... | Compute and return minimum distance between volcanic front and points
specified by 'lon' and 'lat'.
Distance is negative if point is located east of the volcanic front,
positive otherwise.
The method uses the same approach as :meth:`_get_min_distance_to_sub_trench`
but final distance is returned without taking the absolute value. | [
"Compute",
"and",
"return",
"minimum",
"distance",
"between",
"volcanic",
"front",
"and",
"points",
"specified",
"by",
"lon",
"and",
"lat",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L96-L109 | train | 213,973 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | _apply_subduction_trench_correction | def _apply_subduction_trench_correction(mean, x_tr, H, rrup, imt):
"""
Implement equation for subduction trench correction as described in
equation 3.5.2-1, page 3-148 of "Technical Reports on National Seismic
Hazard Maps for Japan"
"""
if imt.name == 'PGV':
V1 = 10 ** ((-4.021e-5 * x_tr + 9.905e-3) * (H - 30))
V2 = np.maximum(1., (10 ** (-0.012)) * ((rrup / 300.) ** 2.064))
corr = V2
if H > 30:
corr *= V1
else:
V2 = np.maximum(1., (10 ** (+0.13)) * ((rrup / 300.) ** 3.2))
corr = V2
if H > 30:
V1 = 10 ** ((-8.1e-5 * x_tr + 2.0e-2) * (H - 30))
corr *= V1
return np.log(np.exp(mean) * corr) | python | def _apply_subduction_trench_correction(mean, x_tr, H, rrup, imt):
"""
Implement equation for subduction trench correction as described in
equation 3.5.2-1, page 3-148 of "Technical Reports on National Seismic
Hazard Maps for Japan"
"""
if imt.name == 'PGV':
V1 = 10 ** ((-4.021e-5 * x_tr + 9.905e-3) * (H - 30))
V2 = np.maximum(1., (10 ** (-0.012)) * ((rrup / 300.) ** 2.064))
corr = V2
if H > 30:
corr *= V1
else:
V2 = np.maximum(1., (10 ** (+0.13)) * ((rrup / 300.) ** 3.2))
corr = V2
if H > 30:
V1 = 10 ** ((-8.1e-5 * x_tr + 2.0e-2) * (H - 30))
corr *= V1
return np.log(np.exp(mean) * corr) | [
"def",
"_apply_subduction_trench_correction",
"(",
"mean",
",",
"x_tr",
",",
"H",
",",
"rrup",
",",
"imt",
")",
":",
"if",
"imt",
".",
"name",
"==",
"'PGV'",
":",
"V1",
"=",
"10",
"**",
"(",
"(",
"-",
"4.021e-5",
"*",
"x_tr",
"+",
"9.905e-3",
")",
... | Implement equation for subduction trench correction as described in
equation 3.5.2-1, page 3-148 of "Technical Reports on National Seismic
Hazard Maps for Japan" | [
"Implement",
"equation",
"for",
"subduction",
"trench",
"correction",
"as",
"described",
"in",
"equation",
"3",
".",
"5",
".",
"2",
"-",
"1",
"page",
"3",
"-",
"148",
"of",
"Technical",
"Reports",
"on",
"National",
"Seismic",
"Hazard",
"Maps",
"for",
"Japa... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L112-L130 | train | 213,974 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | _apply_volcanic_front_correction | def _apply_volcanic_front_correction(mean, x_vf, H, imt):
"""
Implement equation for volcanic front correction as described in equation
3.5.2.-2, page 3-149 of "Technical Reports on National Seismic
Hazard Maps for Japan"
"""
V1 = np.zeros_like(x_vf)
if imt.name == 'PGV':
idx = x_vf <= 75
V1[idx] = 4.28e-5 * x_vf[idx] * (H - 30)
idx = x_vf > 75
V1[idx] = 3.21e-3 * (H - 30)
V1 = 10 ** V1
else:
idx = x_vf <= 75
V1[idx] = 7.06e-5 * x_vf[idx] * (H - 30)
idx = x_vf > 75
V1[idx] = 5.30e-3 * (H - 30)
V1 = 10 ** V1
return np.log(np.exp(mean) * V1) | python | def _apply_volcanic_front_correction(mean, x_vf, H, imt):
"""
Implement equation for volcanic front correction as described in equation
3.5.2.-2, page 3-149 of "Technical Reports on National Seismic
Hazard Maps for Japan"
"""
V1 = np.zeros_like(x_vf)
if imt.name == 'PGV':
idx = x_vf <= 75
V1[idx] = 4.28e-5 * x_vf[idx] * (H - 30)
idx = x_vf > 75
V1[idx] = 3.21e-3 * (H - 30)
V1 = 10 ** V1
else:
idx = x_vf <= 75
V1[idx] = 7.06e-5 * x_vf[idx] * (H - 30)
idx = x_vf > 75
V1[idx] = 5.30e-3 * (H - 30)
V1 = 10 ** V1
return np.log(np.exp(mean) * V1) | [
"def",
"_apply_volcanic_front_correction",
"(",
"mean",
",",
"x_vf",
",",
"H",
",",
"imt",
")",
":",
"V1",
"=",
"np",
".",
"zeros_like",
"(",
"x_vf",
")",
"if",
"imt",
".",
"name",
"==",
"'PGV'",
":",
"idx",
"=",
"x_vf",
"<=",
"75",
"V1",
"[",
"idx... | Implement equation for volcanic front correction as described in equation
3.5.2.-2, page 3-149 of "Technical Reports on National Seismic
Hazard Maps for Japan" | [
"Implement",
"equation",
"for",
"volcanic",
"front",
"correction",
"as",
"described",
"in",
"equation",
"3",
".",
"5",
".",
"2",
".",
"-",
"2",
"page",
"3",
"-",
"149",
"of",
"Technical",
"Reports",
"on",
"National",
"Seismic",
"Hazard",
"Maps",
"for",
"... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L133-L152 | train | 213,975 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | SiMidorikawa1999Asc.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
Implements equation 3.5.1-1 page 148 for mean value and equation
3.5.5-2 page 151 for total standard deviation.
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
mean = self._get_mean(imt, rup.mag, rup.hypo_depth, dists.rrup, d=0)
stddevs = self._get_stddevs(stddev_types, dists.rrup)
mean = self._apply_amplification_factor(mean, sites.vs30)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
Implements equation 3.5.1-1 page 148 for mean value and equation
3.5.5-2 page 151 for total standard deviation.
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
mean = self._get_mean(imt, rup.mag, rup.hypo_depth, dists.rrup, d=0)
stddevs = self._get_stddevs(stddev_types, dists.rrup)
mean = self._apply_amplification_factor(mean, sites.vs30)
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"mean",
"=",
"self",
".",
"_get_mean",
"(",
"imt",
",",
"rup",
".",
"mag",
",",
"rup",
".",
"hypo_depth",
",",
"dists",
"."... | Implements equation 3.5.1-1 page 148 for mean value and equation
3.5.5-2 page 151 for total standard deviation.
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"Implements",
"equation",
"3",
".",
"5",
".",
"1",
"-",
"1",
"page",
"148",
"for",
"mean",
"value",
"and",
"equation",
"3",
".",
"5",
".",
"5",
"-",
"2",
"page",
"151",
"for",
"total",
"standard",
"deviation",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L195-L207 | train | 213,976 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | SiMidorikawa1999Asc._get_mean | def _get_mean(self, imt, mag, hypo_depth, rrup, d):
"""
Return mean value as defined in equation 3.5.1-1 page 148
"""
# clip magnitude at 8.3 as per note at page 3-36 in table Table 3.3.2-6
# in "Technical Reports on National Seismic Hazard Maps for Japan"
mag = min(mag, 8.3)
if imt.name == 'PGV':
mean = (
0.58 * mag +
0.0038 * hypo_depth +
d -
1.29 -
np.log10(rrup + 0.0028 * 10 ** (0.5 * mag)) -
0.002 * rrup
)
else:
mean = (
0.50 * mag +
0.0043 * hypo_depth +
d +
0.61 -
np.log10(rrup + 0.0055 * 10 ** (0.5 * mag)) -
0.003 * rrup
)
mean = np.log10(10**(mean)/(g*100))
return mean | python | def _get_mean(self, imt, mag, hypo_depth, rrup, d):
"""
Return mean value as defined in equation 3.5.1-1 page 148
"""
# clip magnitude at 8.3 as per note at page 3-36 in table Table 3.3.2-6
# in "Technical Reports on National Seismic Hazard Maps for Japan"
mag = min(mag, 8.3)
if imt.name == 'PGV':
mean = (
0.58 * mag +
0.0038 * hypo_depth +
d -
1.29 -
np.log10(rrup + 0.0028 * 10 ** (0.5 * mag)) -
0.002 * rrup
)
else:
mean = (
0.50 * mag +
0.0043 * hypo_depth +
d +
0.61 -
np.log10(rrup + 0.0055 * 10 ** (0.5 * mag)) -
0.003 * rrup
)
mean = np.log10(10**(mean)/(g*100))
return mean | [
"def",
"_get_mean",
"(",
"self",
",",
"imt",
",",
"mag",
",",
"hypo_depth",
",",
"rrup",
",",
"d",
")",
":",
"# clip magnitude at 8.3 as per note at page 3-36 in table Table 3.3.2-6",
"# in \"Technical Reports on National Seismic Hazard Maps for Japan\"",
"mag",
"=",
"min",
... | Return mean value as defined in equation 3.5.1-1 page 148 | [
"Return",
"mean",
"value",
"as",
"defined",
"in",
"equation",
"3",
".",
"5",
".",
"1",
"-",
"1",
"page",
"148"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L209-L236 | train | 213,977 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | SiMidorikawa1999Asc._get_stddevs | def _get_stddevs(self, stddev_types, rrup):
"""
Return standard deviations as defined in equation 3.5.5-2 page 151
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
std = np.zeros_like(rrup)
std[rrup <= 20] = 0.23
idx = (rrup > 20) & (rrup <= 30)
std[idx] = 0.23 - 0.03 * np.log10(rrup[idx] / 20) / np.log10(30. / 20.)
std[rrup > 30] = 0.20
# convert from log10 to ln
std = np.log(10 ** std)
return [std for stddev_type in stddev_types] | python | def _get_stddevs(self, stddev_types, rrup):
"""
Return standard deviations as defined in equation 3.5.5-2 page 151
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
std = np.zeros_like(rrup)
std[rrup <= 20] = 0.23
idx = (rrup > 20) & (rrup <= 30)
std[idx] = 0.23 - 0.03 * np.log10(rrup[idx] / 20) / np.log10(30. / 20.)
std[rrup > 30] = 0.20
# convert from log10 to ln
std = np.log(10 ** std)
return [std for stddev_type in stddev_types] | [
"def",
"_get_stddevs",
"(",
"self",
",",
"stddev_types",
",",
"rrup",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"std",
"=",
"np",
".",
"zeros_like",
... | Return standard deviations as defined in equation 3.5.5-2 page 151 | [
"Return",
"standard",
"deviations",
"as",
"defined",
"in",
"equation",
"3",
".",
"5",
".",
"5",
"-",
"2",
"page",
"151"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L238-L251 | train | 213,978 |
gem/oq-engine | openquake/hazardlib/gsim/si_midorikawa_1999.py | SiMidorikawa1999SInter._get_stddevs | def _get_stddevs(self, stddev_types, pgv):
"""
Return standard deviations as defined in equation 3.5.5-1 page 151
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
std = np.zeros_like(pgv)
std[pgv <= 25] = 0.20
idx = (pgv > 25) & (pgv <= 50)
std[idx] = 0.20 - 0.05 * (pgv[idx] - 25) / 25
std[pgv > 50] = 0.15
# convert from log10 to ln
std = np.log(10 ** std)
return [std for stddev_type in stddev_types] | python | def _get_stddevs(self, stddev_types, pgv):
"""
Return standard deviations as defined in equation 3.5.5-1 page 151
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
std = np.zeros_like(pgv)
std[pgv <= 25] = 0.20
idx = (pgv > 25) & (pgv <= 50)
std[idx] = 0.20 - 0.05 * (pgv[idx] - 25) / 25
std[pgv > 50] = 0.15
# convert from log10 to ln
std = np.log(10 ** std)
return [std for stddev_type in stddev_types] | [
"def",
"_get_stddevs",
"(",
"self",
",",
"stddev_types",
",",
"pgv",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"std",
"=",
"np",
".",
"zeros_like",
... | Return standard deviations as defined in equation 3.5.5-1 page 151 | [
"Return",
"standard",
"deviations",
"as",
"defined",
"in",
"equation",
"3",
".",
"5",
".",
"5",
"-",
"1",
"page",
"151"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/si_midorikawa_1999.py#L305-L318 | train | 213,979 |
gem/oq-engine | openquake/commands/plot_memory.py | plot_memory | def plot_memory(calc_id=-1):
"""
Plot the memory occupation
"""
dstore = util.read(calc_id)
plots = []
for task_name in dstore['task_info']:
mem = dstore['task_info/' + task_name]['mem_gb']
plots.append((task_name, mem))
plt = make_figure(plots)
plt.show() | python | def plot_memory(calc_id=-1):
"""
Plot the memory occupation
"""
dstore = util.read(calc_id)
plots = []
for task_name in dstore['task_info']:
mem = dstore['task_info/' + task_name]['mem_gb']
plots.append((task_name, mem))
plt = make_figure(plots)
plt.show() | [
"def",
"plot_memory",
"(",
"calc_id",
"=",
"-",
"1",
")",
":",
"dstore",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"plots",
"=",
"[",
"]",
"for",
"task_name",
"in",
"dstore",
"[",
"'task_info'",
"]",
":",
"mem",
"=",
"dstore",
"[",
"'task_info/'... | Plot the memory occupation | [
"Plot",
"the",
"memory",
"occupation"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_memory.py#L42-L52 | train | 213,980 |
gem/oq-engine | openquake/commonlib/calc.py | convert_to_array | def convert_to_array(pmap, nsites, imtls, inner_idx=0):
"""
Convert the probability map into a composite array with header
of the form PGA-0.1, PGA-0.2 ...
:param pmap: probability map
:param nsites: total number of sites
:param imtls: a DictArray with IMT and levels
:returns: a composite array of lenght nsites
"""
lst = []
# build the export dtype, of the form PGA-0.1, PGA-0.2 ...
for imt, imls in imtls.items():
for iml in imls:
lst.append(('%s-%s' % (imt, iml), F32))
curves = numpy.zeros(nsites, numpy.dtype(lst))
for sid, pcurve in pmap.items():
curve = curves[sid]
idx = 0
for imt, imls in imtls.items():
for iml in imls:
curve['%s-%s' % (imt, iml)] = pcurve.array[idx, inner_idx]
idx += 1
return curves | python | def convert_to_array(pmap, nsites, imtls, inner_idx=0):
"""
Convert the probability map into a composite array with header
of the form PGA-0.1, PGA-0.2 ...
:param pmap: probability map
:param nsites: total number of sites
:param imtls: a DictArray with IMT and levels
:returns: a composite array of lenght nsites
"""
lst = []
# build the export dtype, of the form PGA-0.1, PGA-0.2 ...
for imt, imls in imtls.items():
for iml in imls:
lst.append(('%s-%s' % (imt, iml), F32))
curves = numpy.zeros(nsites, numpy.dtype(lst))
for sid, pcurve in pmap.items():
curve = curves[sid]
idx = 0
for imt, imls in imtls.items():
for iml in imls:
curve['%s-%s' % (imt, iml)] = pcurve.array[idx, inner_idx]
idx += 1
return curves | [
"def",
"convert_to_array",
"(",
"pmap",
",",
"nsites",
",",
"imtls",
",",
"inner_idx",
"=",
"0",
")",
":",
"lst",
"=",
"[",
"]",
"# build the export dtype, of the form PGA-0.1, PGA-0.2 ...",
"for",
"imt",
",",
"imls",
"in",
"imtls",
".",
"items",
"(",
")",
"... | Convert the probability map into a composite array with header
of the form PGA-0.1, PGA-0.2 ...
:param pmap: probability map
:param nsites: total number of sites
:param imtls: a DictArray with IMT and levels
:returns: a composite array of lenght nsites | [
"Convert",
"the",
"probability",
"map",
"into",
"a",
"composite",
"array",
"with",
"header",
"of",
"the",
"form",
"PGA",
"-",
"0",
".",
"1",
"PGA",
"-",
"0",
".",
"2",
"..."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L45-L68 | train | 213,981 |
gem/oq-engine | openquake/commonlib/calc.py | compute_hazard_maps | def compute_hazard_maps(curves, imls, poes):
"""
Given a set of hazard curve poes, interpolate a hazard map at the specified
``poe``.
:param curves:
2D array of floats. Each row represents a curve, where the values
in the row are the PoEs (Probabilities of Exceedance) corresponding to
``imls``. Each curve corresponds to a geographical location.
:param imls:
Intensity Measure Levels associated with these hazard ``curves``. Type
should be an array-like of floats.
:param poes:
Value(s) on which to interpolate a hazard map from the input
``curves``. Can be an array-like or scalar value (for a single PoE).
:returns:
An array of shape N x P, where N is the number of curves and P the
number of poes.
"""
poes = numpy.array(poes)
if len(poes.shape) == 0:
# `poes` was passed in as a scalar;
# convert it to 1D array of 1 element
poes = poes.reshape(1)
if len(curves.shape) == 1:
# `curves` was passed as 1 dimensional array, there is a single site
curves = curves.reshape((1,) + curves.shape) # 1 x L
L = curves.shape[1] # number of levels
if L != len(imls):
raise ValueError('The curves have %d levels, %d were passed' %
(L, len(imls)))
result = []
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# avoid RuntimeWarning: divide by zero encountered in log
# happening in the classical_tiling tests
imls = numpy.log(numpy.array(imls[::-1]))
for curve in curves:
# the hazard curve, having replaced the too small poes with EPSILON
curve_cutoff = [max(poe, EPSILON) for poe in curve[::-1]]
hmap_val = []
for poe in poes:
# special case when the interpolation poe is bigger than the
# maximum, i.e the iml must be smaller than the minumum
if poe > curve_cutoff[-1]: # the greatest poes in the curve
# extrapolate the iml to zero as per
# https://bugs.launchpad.net/oq-engine/+bug/1292093
# a consequence is that if all poes are zero any poe > 0
# is big and the hmap goes automatically to zero
hmap_val.append(0)
else:
# exp-log interpolation, to reduce numerical errors
# see https://bugs.launchpad.net/oq-engine/+bug/1252770
val = numpy.exp(
numpy.interp(
numpy.log(poe), numpy.log(curve_cutoff), imls))
hmap_val.append(val)
result.append(hmap_val)
return numpy.array(result) | python | def compute_hazard_maps(curves, imls, poes):
"""
Given a set of hazard curve poes, interpolate a hazard map at the specified
``poe``.
:param curves:
2D array of floats. Each row represents a curve, where the values
in the row are the PoEs (Probabilities of Exceedance) corresponding to
``imls``. Each curve corresponds to a geographical location.
:param imls:
Intensity Measure Levels associated with these hazard ``curves``. Type
should be an array-like of floats.
:param poes:
Value(s) on which to interpolate a hazard map from the input
``curves``. Can be an array-like or scalar value (for a single PoE).
:returns:
An array of shape N x P, where N is the number of curves and P the
number of poes.
"""
poes = numpy.array(poes)
if len(poes.shape) == 0:
# `poes` was passed in as a scalar;
# convert it to 1D array of 1 element
poes = poes.reshape(1)
if len(curves.shape) == 1:
# `curves` was passed as 1 dimensional array, there is a single site
curves = curves.reshape((1,) + curves.shape) # 1 x L
L = curves.shape[1] # number of levels
if L != len(imls):
raise ValueError('The curves have %d levels, %d were passed' %
(L, len(imls)))
result = []
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# avoid RuntimeWarning: divide by zero encountered in log
# happening in the classical_tiling tests
imls = numpy.log(numpy.array(imls[::-1]))
for curve in curves:
# the hazard curve, having replaced the too small poes with EPSILON
curve_cutoff = [max(poe, EPSILON) for poe in curve[::-1]]
hmap_val = []
for poe in poes:
# special case when the interpolation poe is bigger than the
# maximum, i.e the iml must be smaller than the minumum
if poe > curve_cutoff[-1]: # the greatest poes in the curve
# extrapolate the iml to zero as per
# https://bugs.launchpad.net/oq-engine/+bug/1292093
# a consequence is that if all poes are zero any poe > 0
# is big and the hmap goes automatically to zero
hmap_val.append(0)
else:
# exp-log interpolation, to reduce numerical errors
# see https://bugs.launchpad.net/oq-engine/+bug/1252770
val = numpy.exp(
numpy.interp(
numpy.log(poe), numpy.log(curve_cutoff), imls))
hmap_val.append(val)
result.append(hmap_val)
return numpy.array(result) | [
"def",
"compute_hazard_maps",
"(",
"curves",
",",
"imls",
",",
"poes",
")",
":",
"poes",
"=",
"numpy",
".",
"array",
"(",
"poes",
")",
"if",
"len",
"(",
"poes",
".",
"shape",
")",
"==",
"0",
":",
"# `poes` was passed in as a scalar;",
"# convert it to 1D arr... | Given a set of hazard curve poes, interpolate a hazard map at the specified
``poe``.
:param curves:
2D array of floats. Each row represents a curve, where the values
in the row are the PoEs (Probabilities of Exceedance) corresponding to
``imls``. Each curve corresponds to a geographical location.
:param imls:
Intensity Measure Levels associated with these hazard ``curves``. Type
should be an array-like of floats.
:param poes:
Value(s) on which to interpolate a hazard map from the input
``curves``. Can be an array-like or scalar value (for a single PoE).
:returns:
An array of shape N x P, where N is the number of curves and P the
number of poes. | [
"Given",
"a",
"set",
"of",
"hazard",
"curve",
"poes",
"interpolate",
"a",
"hazard",
"map",
"at",
"the",
"specified",
"poe",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L77-L139 | train | 213,982 |
gem/oq-engine | openquake/commonlib/calc.py | make_hmap | def make_hmap(pmap, imtls, poes):
"""
Compute the hazard maps associated to the passed probability map.
:param pmap: hazard curves in the form of a ProbabilityMap
:param imtls: DictArray with M intensity measure types
:param poes: P PoEs where to compute the maps
:returns: a ProbabilityMap with size (N, M, P)
"""
M, P = len(imtls), len(poes)
hmap = probability_map.ProbabilityMap.build(M, P, pmap, dtype=F32)
if len(pmap) == 0:
return hmap # empty hazard map
for i, imt in enumerate(imtls):
curves = numpy.array([pmap[sid].array[imtls(imt), 0]
for sid in pmap.sids])
data = compute_hazard_maps(curves, imtls[imt], poes) # array (N, P)
for sid, value in zip(pmap.sids, data):
array = hmap[sid].array
for j, val in enumerate(value):
array[i, j] = val
return hmap | python | def make_hmap(pmap, imtls, poes):
"""
Compute the hazard maps associated to the passed probability map.
:param pmap: hazard curves in the form of a ProbabilityMap
:param imtls: DictArray with M intensity measure types
:param poes: P PoEs where to compute the maps
:returns: a ProbabilityMap with size (N, M, P)
"""
M, P = len(imtls), len(poes)
hmap = probability_map.ProbabilityMap.build(M, P, pmap, dtype=F32)
if len(pmap) == 0:
return hmap # empty hazard map
for i, imt in enumerate(imtls):
curves = numpy.array([pmap[sid].array[imtls(imt), 0]
for sid in pmap.sids])
data = compute_hazard_maps(curves, imtls[imt], poes) # array (N, P)
for sid, value in zip(pmap.sids, data):
array = hmap[sid].array
for j, val in enumerate(value):
array[i, j] = val
return hmap | [
"def",
"make_hmap",
"(",
"pmap",
",",
"imtls",
",",
"poes",
")",
":",
"M",
",",
"P",
"=",
"len",
"(",
"imtls",
")",
",",
"len",
"(",
"poes",
")",
"hmap",
"=",
"probability_map",
".",
"ProbabilityMap",
".",
"build",
"(",
"M",
",",
"P",
",",
"pmap"... | Compute the hazard maps associated to the passed probability map.
:param pmap: hazard curves in the form of a ProbabilityMap
:param imtls: DictArray with M intensity measure types
:param poes: P PoEs where to compute the maps
:returns: a ProbabilityMap with size (N, M, P) | [
"Compute",
"the",
"hazard",
"maps",
"associated",
"to",
"the",
"passed",
"probability",
"map",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L184-L205 | train | 213,983 |
gem/oq-engine | openquake/commonlib/calc.py | make_uhs | def make_uhs(hmap, info):
"""
Make Uniform Hazard Spectra curves for each location.
:param hmap:
array of shape (N, M, P)
:param info:
a dictionary with keys poes, imtls, uhs_dt
:returns:
a composite array containing uniform hazard spectra
"""
uhs = numpy.zeros(len(hmap), info['uhs_dt'])
for p, poe in enumerate(info['poes']):
for m, imt in enumerate(info['imtls']):
if imt.startswith(('PGA', 'SA')):
uhs[str(poe)][imt] = hmap[:, m, p]
return uhs | python | def make_uhs(hmap, info):
"""
Make Uniform Hazard Spectra curves for each location.
:param hmap:
array of shape (N, M, P)
:param info:
a dictionary with keys poes, imtls, uhs_dt
:returns:
a composite array containing uniform hazard spectra
"""
uhs = numpy.zeros(len(hmap), info['uhs_dt'])
for p, poe in enumerate(info['poes']):
for m, imt in enumerate(info['imtls']):
if imt.startswith(('PGA', 'SA')):
uhs[str(poe)][imt] = hmap[:, m, p]
return uhs | [
"def",
"make_uhs",
"(",
"hmap",
",",
"info",
")",
":",
"uhs",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"hmap",
")",
",",
"info",
"[",
"'uhs_dt'",
"]",
")",
"for",
"p",
",",
"poe",
"in",
"enumerate",
"(",
"info",
"[",
"'poes'",
"]",
")",
":"... | Make Uniform Hazard Spectra curves for each location.
:param hmap:
array of shape (N, M, P)
:param info:
a dictionary with keys poes, imtls, uhs_dt
:returns:
a composite array containing uniform hazard spectra | [
"Make",
"Uniform",
"Hazard",
"Spectra",
"curves",
"for",
"each",
"location",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L232-L248 | train | 213,984 |
gem/oq-engine | openquake/commonlib/calc.py | RuptureData.to_array | def to_array(self, ebruptures):
"""
Convert a list of ebruptures into an array of dtype RuptureRata.dt
"""
data = []
for ebr in ebruptures:
rup = ebr.rupture
self.cmaker.add_rup_params(rup)
ruptparams = tuple(getattr(rup, param) for param in self.params)
point = rup.surface.get_middle_point()
multi_lons, multi_lats = rup.surface.get_surface_boundaries()
bounds = ','.join('((%s))' % ','.join(
'%.5f %.5f' % (lon, lat) for lon, lat in zip(lons, lats))
for lons, lats in zip(multi_lons, multi_lats))
try:
rate = ebr.rupture.occurrence_rate
except AttributeError: # for nonparametric sources
rate = numpy.nan
data.append(
(ebr.serial, ebr.srcidx, ebr.n_occ, rate,
rup.mag, point.x, point.y, point.z, rup.surface.get_strike(),
rup.surface.get_dip(), rup.rake,
'MULTIPOLYGON(%s)' % decode(bounds)) + ruptparams)
return numpy.array(data, self.dt) | python | def to_array(self, ebruptures):
"""
Convert a list of ebruptures into an array of dtype RuptureRata.dt
"""
data = []
for ebr in ebruptures:
rup = ebr.rupture
self.cmaker.add_rup_params(rup)
ruptparams = tuple(getattr(rup, param) for param in self.params)
point = rup.surface.get_middle_point()
multi_lons, multi_lats = rup.surface.get_surface_boundaries()
bounds = ','.join('((%s))' % ','.join(
'%.5f %.5f' % (lon, lat) for lon, lat in zip(lons, lats))
for lons, lats in zip(multi_lons, multi_lats))
try:
rate = ebr.rupture.occurrence_rate
except AttributeError: # for nonparametric sources
rate = numpy.nan
data.append(
(ebr.serial, ebr.srcidx, ebr.n_occ, rate,
rup.mag, point.x, point.y, point.z, rup.surface.get_strike(),
rup.surface.get_dip(), rup.rake,
'MULTIPOLYGON(%s)' % decode(bounds)) + ruptparams)
return numpy.array(data, self.dt) | [
"def",
"to_array",
"(",
"self",
",",
"ebruptures",
")",
":",
"data",
"=",
"[",
"]",
"for",
"ebr",
"in",
"ebruptures",
":",
"rup",
"=",
"ebr",
".",
"rupture",
"self",
".",
"cmaker",
".",
"add_rup_params",
"(",
"rup",
")",
"ruptparams",
"=",
"tuple",
"... | Convert a list of ebruptures into an array of dtype RuptureRata.dt | [
"Convert",
"a",
"list",
"of",
"ebruptures",
"into",
"an",
"array",
"of",
"dtype",
"RuptureRata",
".",
"dt"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L268-L291 | train | 213,985 |
gem/oq-engine | openquake/commonlib/calc.py | RuptureSerializer.save | def save(self, rup_array):
"""
Store the ruptures in array format.
"""
self.nruptures += len(rup_array)
offset = len(self.datastore['rupgeoms'])
rup_array.array['gidx1'] += offset
rup_array.array['gidx2'] += offset
previous = self.datastore.get_attr('ruptures', 'nbytes', 0)
self.datastore.extend(
'ruptures', rup_array, nbytes=previous + rup_array.nbytes)
self.datastore.extend('rupgeoms', rup_array.geom)
# TODO: PMFs for nonparametric ruptures are not stored
self.datastore.flush() | python | def save(self, rup_array):
"""
Store the ruptures in array format.
"""
self.nruptures += len(rup_array)
offset = len(self.datastore['rupgeoms'])
rup_array.array['gidx1'] += offset
rup_array.array['gidx2'] += offset
previous = self.datastore.get_attr('ruptures', 'nbytes', 0)
self.datastore.extend(
'ruptures', rup_array, nbytes=previous + rup_array.nbytes)
self.datastore.extend('rupgeoms', rup_array.geom)
# TODO: PMFs for nonparametric ruptures are not stored
self.datastore.flush() | [
"def",
"save",
"(",
"self",
",",
"rup_array",
")",
":",
"self",
".",
"nruptures",
"+=",
"len",
"(",
"rup_array",
")",
"offset",
"=",
"len",
"(",
"self",
".",
"datastore",
"[",
"'rupgeoms'",
"]",
")",
"rup_array",
".",
"array",
"[",
"'gidx1'",
"]",
"+... | Store the ruptures in array format. | [
"Store",
"the",
"ruptures",
"in",
"array",
"format",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L307-L320 | train | 213,986 |
gem/oq-engine | openquake/commonlib/calc.py | RuptureSerializer.close | def close(self):
"""
Save information about the rupture codes as attributes of the
'ruptures' dataset.
"""
if 'ruptures' not in self.datastore: # for UCERF
return
codes = numpy.unique(self.datastore['ruptures']['code'])
attr = {'code_%d' % code: ' '.join(
cls.__name__ for cls in BaseRupture.types[code])
for code in codes}
self.datastore.set_attrs('ruptures', **attr) | python | def close(self):
"""
Save information about the rupture codes as attributes of the
'ruptures' dataset.
"""
if 'ruptures' not in self.datastore: # for UCERF
return
codes = numpy.unique(self.datastore['ruptures']['code'])
attr = {'code_%d' % code: ' '.join(
cls.__name__ for cls in BaseRupture.types[code])
for code in codes}
self.datastore.set_attrs('ruptures', **attr) | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"'ruptures'",
"not",
"in",
"self",
".",
"datastore",
":",
"# for UCERF",
"return",
"codes",
"=",
"numpy",
".",
"unique",
"(",
"self",
".",
"datastore",
"[",
"'ruptures'",
"]",
"[",
"'code'",
"]",
")",
"attr"... | Save information about the rupture codes as attributes of the
'ruptures' dataset. | [
"Save",
"information",
"about",
"the",
"rupture",
"codes",
"as",
"attributes",
"of",
"the",
"ruptures",
"dataset",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/calc.py#L322-L333 | train | 213,987 |
gem/oq-engine | openquake/hazardlib/gsim/hong_goda_2007.py | HongGoda2007._compute_nonlinear_magnitude_term | def _compute_nonlinear_magnitude_term(self, C, mag):
"""
Computes the non-linear magnitude term
"""
return self._compute_linear_magnitude_term(C, mag) +\
C["b3"] * ((mag - 7.0) ** 2.) | python | def _compute_nonlinear_magnitude_term(self, C, mag):
"""
Computes the non-linear magnitude term
"""
return self._compute_linear_magnitude_term(C, mag) +\
C["b3"] * ((mag - 7.0) ** 2.) | [
"def",
"_compute_nonlinear_magnitude_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"return",
"self",
".",
"_compute_linear_magnitude_term",
"(",
"C",
",",
"mag",
")",
"+",
"C",
"[",
"\"b3\"",
"]",
"*",
"(",
"(",
"mag",
"-",
"7.0",
")",
"**",
"2."... | Computes the non-linear magnitude term | [
"Computes",
"the",
"non",
"-",
"linear",
"magnitude",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/hong_goda_2007.py#L110-L115 | train | 213,988 |
gem/oq-engine | openquake/hazardlib/gsim/hong_goda_2007.py | HongGoda2007._compute_magnitude_distance_term | def _compute_magnitude_distance_term(self, C, rjb, mag):
"""
Returns the magntude dependent distance term
"""
rval = np.sqrt(rjb ** 2. + C["h"] ** 2.)
return (C["b4"] + C["b5"] * (mag - 4.5)) * np.log(rval) | python | def _compute_magnitude_distance_term(self, C, rjb, mag):
"""
Returns the magntude dependent distance term
"""
rval = np.sqrt(rjb ** 2. + C["h"] ** 2.)
return (C["b4"] + C["b5"] * (mag - 4.5)) * np.log(rval) | [
"def",
"_compute_magnitude_distance_term",
"(",
"self",
",",
"C",
",",
"rjb",
",",
"mag",
")",
":",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2.",
"+",
"C",
"[",
"\"h\"",
"]",
"**",
"2.",
")",
"return",
"(",
"C",
"[",
"\"b4\"",
"]",
"+",... | Returns the magntude dependent distance term | [
"Returns",
"the",
"magntude",
"dependent",
"distance",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/hong_goda_2007.py#L123-L128 | train | 213,989 |
gem/oq-engine | openquake/hazardlib/gsim/hong_goda_2007.py | HongGoda2007._get_bnl | def _get_bnl(self, C_AMP, vs30):
"""
Gets the nonlinear term, given by equation 8 of Atkinson & Boore 2006
"""
# Default case 8d
bnl = np.zeros_like(vs30)
if np.all(vs30 >= self.CONSTS["Vref"]):
return bnl
# Case 8a
bnl[vs30 < self.CONSTS["v1"]] = C_AMP["b1sa"]
# Cade 8b
idx = np.logical_and(vs30 > self.CONSTS["v1"],
vs30 <= self.CONSTS["v2"])
if np.any(idx):
bnl[idx] = (C_AMP["b1sa"] - C_AMP["b2sa"]) *\
(np.log(vs30[idx] / self.CONSTS["v2"]) /
np.log(self.CONSTS["v1"] / self.CONSTS["v2"])) + C_AMP["b2sa"]
# Case 8c
idx = np.logical_and(vs30 > self.CONSTS["v2"],
vs30 < self.CONSTS["Vref"])
if np.any(idx):
bnl[idx] = C_AMP["b2sa"] *\
np.log(vs30[idx] / self.CONSTS["Vref"]) /\
np.log(self.CONSTS["v2"] / self.CONSTS["Vref"])
return bnl | python | def _get_bnl(self, C_AMP, vs30):
"""
Gets the nonlinear term, given by equation 8 of Atkinson & Boore 2006
"""
# Default case 8d
bnl = np.zeros_like(vs30)
if np.all(vs30 >= self.CONSTS["Vref"]):
return bnl
# Case 8a
bnl[vs30 < self.CONSTS["v1"]] = C_AMP["b1sa"]
# Cade 8b
idx = np.logical_and(vs30 > self.CONSTS["v1"],
vs30 <= self.CONSTS["v2"])
if np.any(idx):
bnl[idx] = (C_AMP["b1sa"] - C_AMP["b2sa"]) *\
(np.log(vs30[idx] / self.CONSTS["v2"]) /
np.log(self.CONSTS["v1"] / self.CONSTS["v2"])) + C_AMP["b2sa"]
# Case 8c
idx = np.logical_and(vs30 > self.CONSTS["v2"],
vs30 < self.CONSTS["Vref"])
if np.any(idx):
bnl[idx] = C_AMP["b2sa"] *\
np.log(vs30[idx] / self.CONSTS["Vref"]) /\
np.log(self.CONSTS["v2"] / self.CONSTS["Vref"])
return bnl | [
"def",
"_get_bnl",
"(",
"self",
",",
"C_AMP",
",",
"vs30",
")",
":",
"# Default case 8d",
"bnl",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"if",
"np",
".",
"all",
"(",
"vs30",
">=",
"self",
".",
"CONSTS",
"[",
"\"Vref\"",
"]",
")",
":",
"retu... | Gets the nonlinear term, given by equation 8 of Atkinson & Boore 2006 | [
"Gets",
"the",
"nonlinear",
"term",
"given",
"by",
"equation",
"8",
"of",
"Atkinson",
"&",
"Boore",
"2006"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/hong_goda_2007.py#L145-L170 | train | 213,990 |
gem/oq-engine | openquake/hazardlib/gsim/hong_goda_2007.py | HongGoda2007._get_stddevs | def _get_stddevs(self, C, stddev_types, stddev_shape):
"""
Returns the standard deviations given in Table 2
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(C["sigtot"] + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(C['sig2'] + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['sig1'] + np.zeros(stddev_shape))
return stddevs | python | def _get_stddevs(self, C, stddev_types, stddev_shape):
"""
Returns the standard deviations given in Table 2
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(C["sigtot"] + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(C['sig2'] + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['sig1'] + np.zeros(stddev_shape))
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"stddev_shape",
")",
":",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",... | Returns the standard deviations given in Table 2 | [
"Returns",
"the",
"standard",
"deviations",
"given",
"in",
"Table",
"2"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/hong_goda_2007.py#L172-L185 | train | 213,991 |
gem/oq-engine | openquake/commands/tidy.py | tidy | def tidy(fnames):
"""
Reformat a NRML file in a canonical form. That also means reducing the
precision of the floats to a standard value. If the file is invalid,
a clear error message is shown.
"""
for fname in fnames:
try:
node = nrml.read(fname)
except ValueError as err:
print(err)
return
with open(fname + '.bak', 'wb') as f:
f.write(open(fname, 'rb').read())
with open(fname, 'wb') as f:
# make sure the xmlns i.e. the NRML version is unchanged
nrml.write(node.nodes, f, writers.FIVEDIGITS, xmlns=node['xmlns'])
print('Reformatted %s, original left in %s.bak' % (fname, fname)) | python | def tidy(fnames):
"""
Reformat a NRML file in a canonical form. That also means reducing the
precision of the floats to a standard value. If the file is invalid,
a clear error message is shown.
"""
for fname in fnames:
try:
node = nrml.read(fname)
except ValueError as err:
print(err)
return
with open(fname + '.bak', 'wb') as f:
f.write(open(fname, 'rb').read())
with open(fname, 'wb') as f:
# make sure the xmlns i.e. the NRML version is unchanged
nrml.write(node.nodes, f, writers.FIVEDIGITS, xmlns=node['xmlns'])
print('Reformatted %s, original left in %s.bak' % (fname, fname)) | [
"def",
"tidy",
"(",
"fnames",
")",
":",
"for",
"fname",
"in",
"fnames",
":",
"try",
":",
"node",
"=",
"nrml",
".",
"read",
"(",
"fname",
")",
"except",
"ValueError",
"as",
"err",
":",
"print",
"(",
"err",
")",
"return",
"with",
"open",
"(",
"fname"... | Reformat a NRML file in a canonical form. That also means reducing the
precision of the floats to a standard value. If the file is invalid,
a clear error message is shown. | [
"Reformat",
"a",
"NRML",
"file",
"in",
"a",
"canonical",
"form",
".",
"That",
"also",
"means",
"reducing",
"the",
"precision",
"of",
"the",
"floats",
"to",
"a",
"standard",
"value",
".",
"If",
"the",
"file",
"is",
"invalid",
"a",
"clear",
"error",
"messa... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/tidy.py#L24-L41 | train | 213,992 |
gem/oq-engine | openquake/commands/to_hdf5.py | to_hdf5 | def to_hdf5(input):
"""
Convert .xml and .npz files to .hdf5 files.
"""
with performance.Monitor('to_hdf5') as mon:
for input_file in input:
if input_file.endswith('.npz'):
output = convert_npz_hdf5(input_file, input_file[:-3] + 'hdf5')
elif input_file.endswith('.xml'): # for source model files
output = convert_xml_hdf5(input_file, input_file[:-3] + 'hdf5')
else:
continue
print('Generated %s' % output)
print(mon) | python | def to_hdf5(input):
"""
Convert .xml and .npz files to .hdf5 files.
"""
with performance.Monitor('to_hdf5') as mon:
for input_file in input:
if input_file.endswith('.npz'):
output = convert_npz_hdf5(input_file, input_file[:-3] + 'hdf5')
elif input_file.endswith('.xml'): # for source model files
output = convert_xml_hdf5(input_file, input_file[:-3] + 'hdf5')
else:
continue
print('Generated %s' % output)
print(mon) | [
"def",
"to_hdf5",
"(",
"input",
")",
":",
"with",
"performance",
".",
"Monitor",
"(",
"'to_hdf5'",
")",
"as",
"mon",
":",
"for",
"input_file",
"in",
"input",
":",
"if",
"input_file",
".",
"endswith",
"(",
"'.npz'",
")",
":",
"output",
"=",
"convert_npz_h... | Convert .xml and .npz files to .hdf5 files. | [
"Convert",
".",
"xml",
"and",
".",
"npz",
"files",
"to",
".",
"hdf5",
"files",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/to_hdf5.py#L47-L60 | train | 213,993 |
gem/oq-engine | openquake/hazardlib/calc/stochastic.py | get_rup_array | def get_rup_array(ebruptures, srcfilter=nofilter):
"""
Convert a list of EBRuptures into a numpy composite array, by filtering
out the ruptures far away from every site
"""
if not BaseRupture._code:
BaseRupture.init() # initialize rupture codes
rups = []
geoms = []
nbytes = 0
offset = 0
for ebrupture in ebruptures:
rup = ebrupture.rupture
mesh = surface_to_array(rup.surface)
sy, sz = mesh.shape[1:] # sanity checks
assert sy < TWO16, 'Too many multisurfaces: %d' % sy
assert sz < TWO16, 'The rupture mesh spacing is too small'
points = mesh.reshape(3, -1).T # shape (n, 3)
minlon = points[:, 0].min()
minlat = points[:, 1].min()
maxlon = points[:, 0].max()
maxlat = points[:, 1].max()
if srcfilter.integration_distance and len(srcfilter.close_sids(
(minlon, minlat, maxlon, maxlat),
rup.tectonic_region_type, rup.mag)) == 0:
continue
hypo = rup.hypocenter.x, rup.hypocenter.y, rup.hypocenter.z
rate = getattr(rup, 'occurrence_rate', numpy.nan)
tup = (ebrupture.serial, ebrupture.srcidx, ebrupture.grp_id,
rup.code, ebrupture.n_occ, rup.mag, rup.rake, rate,
minlon, minlat, maxlon, maxlat,
hypo, offset, offset + len(points), sy, sz)
offset += len(points)
rups.append(tup)
geoms.append(numpy.array([tuple(p) for p in points], point3d))
nbytes += rupture_dt.itemsize + mesh.nbytes
if not rups:
return ()
dic = dict(geom=numpy.concatenate(geoms), nbytes=nbytes)
# TODO: PMFs for nonparametric ruptures are not converted
return hdf5.ArrayWrapper(numpy.array(rups, rupture_dt), dic) | python | def get_rup_array(ebruptures, srcfilter=nofilter):
"""
Convert a list of EBRuptures into a numpy composite array, by filtering
out the ruptures far away from every site
"""
if not BaseRupture._code:
BaseRupture.init() # initialize rupture codes
rups = []
geoms = []
nbytes = 0
offset = 0
for ebrupture in ebruptures:
rup = ebrupture.rupture
mesh = surface_to_array(rup.surface)
sy, sz = mesh.shape[1:] # sanity checks
assert sy < TWO16, 'Too many multisurfaces: %d' % sy
assert sz < TWO16, 'The rupture mesh spacing is too small'
points = mesh.reshape(3, -1).T # shape (n, 3)
minlon = points[:, 0].min()
minlat = points[:, 1].min()
maxlon = points[:, 0].max()
maxlat = points[:, 1].max()
if srcfilter.integration_distance and len(srcfilter.close_sids(
(minlon, minlat, maxlon, maxlat),
rup.tectonic_region_type, rup.mag)) == 0:
continue
hypo = rup.hypocenter.x, rup.hypocenter.y, rup.hypocenter.z
rate = getattr(rup, 'occurrence_rate', numpy.nan)
tup = (ebrupture.serial, ebrupture.srcidx, ebrupture.grp_id,
rup.code, ebrupture.n_occ, rup.mag, rup.rake, rate,
minlon, minlat, maxlon, maxlat,
hypo, offset, offset + len(points), sy, sz)
offset += len(points)
rups.append(tup)
geoms.append(numpy.array([tuple(p) for p in points], point3d))
nbytes += rupture_dt.itemsize + mesh.nbytes
if not rups:
return ()
dic = dict(geom=numpy.concatenate(geoms), nbytes=nbytes)
# TODO: PMFs for nonparametric ruptures are not converted
return hdf5.ArrayWrapper(numpy.array(rups, rupture_dt), dic) | [
"def",
"get_rup_array",
"(",
"ebruptures",
",",
"srcfilter",
"=",
"nofilter",
")",
":",
"if",
"not",
"BaseRupture",
".",
"_code",
":",
"BaseRupture",
".",
"init",
"(",
")",
"# initialize rupture codes",
"rups",
"=",
"[",
"]",
"geoms",
"=",
"[",
"]",
"nbyte... | Convert a list of EBRuptures into a numpy composite array, by filtering
out the ruptures far away from every site | [
"Convert",
"a",
"list",
"of",
"EBRuptures",
"into",
"a",
"numpy",
"composite",
"array",
"by",
"filtering",
"out",
"the",
"ruptures",
"far",
"away",
"from",
"every",
"site"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/stochastic.py#L97-L138 | train | 213,994 |
gem/oq-engine | openquake/hazardlib/calc/stochastic.py | sample_cluster | def sample_cluster(sources, srcfilter, num_ses, param):
"""
Yields ruptures generated by a cluster of sources.
:param sources:
A sequence of sources of the same group
:param num_ses:
Number of stochastic event sets
:param param:
a dictionary of additional parameters including
ses_per_logic_tree_path
:yields:
dictionaries with keys rup_array, calc_times, eff_ruptures
"""
eb_ruptures = []
numpy.random.seed(sources[0].serial)
[grp_id] = set(src.src_group_id for src in sources)
# AccumDict of arrays with 3 elements weight, nsites, calc_time
calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
# Set the parameters required to compute the number of occurrences
# of the group of sources
# assert param['oqparam'].number_of_logic_tree_samples > 0
samples = getattr(sources[0], 'samples', 1)
tom = getattr(sources, 'temporal_occurrence_model')
rate = tom.occurrence_rate
time_span = tom.time_span
# Note that using a single time interval corresponding to the product
# of the investigation time and the number of realisations as we do
# here is admitted only in the case of a time-independent model
grp_num_occ = numpy.random.poisson(rate * time_span * samples *
num_ses)
# Now we process the sources included in the group. Possible cases:
# * The group is a cluster. In this case we choose one rupture per each
# source; uncertainty in the ruptures can be handled in this case
# using mutually exclusive ruptures (note that this is admitted
# only for nons-parametric sources).
# * The group contains mutually exclusive sources. In this case we
# choose one source and then one rupture from this source.
rup_counter = {}
rup_data = {}
eff_ruptures = 0
for rlz_num in range(grp_num_occ):
if sources.cluster:
for src, _sites in srcfilter(sources):
# Sum Ruptures
if rlz_num == 0:
eff_ruptures += src.num_ruptures
# Track calculation time
t0 = time.time()
rup = src.get_one_rupture()
# The problem here is that we do not know a-priori the
# number of occurrences of a given rupture.
if src.id not in rup_counter:
rup_counter[src.id] = {}
rup_data[src.id] = {}
if rup.idx not in rup_counter[src.id]:
rup_counter[src.id][rup.idx] = 1
rup_data[src.id][rup.idx] = [rup, src.id, grp_id]
else:
rup_counter[src.id][rup.idx] += 1
# Store info
dt = time.time() - t0
calc_times[src.id] += numpy.array([len(rup_data[src.id]),
src.nsites, dt])
elif param['src_interdep'] == 'mutex':
print('Not yet implemented')
exit(0)
# Create event based ruptures
for src_key in rup_data:
for rup_key in rup_data[src_key]:
dat = rup_data[src_key][rup_key]
cnt = rup_counter[src_key][rup_key]
ebr = EBRupture(dat[0], dat[1], dat[2], cnt, samples)
eb_ruptures.append(ebr)
return eb_ruptures, calc_times, eff_ruptures, grp_id | python | def sample_cluster(sources, srcfilter, num_ses, param):
"""
Yields ruptures generated by a cluster of sources.
:param sources:
A sequence of sources of the same group
:param num_ses:
Number of stochastic event sets
:param param:
a dictionary of additional parameters including
ses_per_logic_tree_path
:yields:
dictionaries with keys rup_array, calc_times, eff_ruptures
"""
eb_ruptures = []
numpy.random.seed(sources[0].serial)
[grp_id] = set(src.src_group_id for src in sources)
# AccumDict of arrays with 3 elements weight, nsites, calc_time
calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
# Set the parameters required to compute the number of occurrences
# of the group of sources
# assert param['oqparam'].number_of_logic_tree_samples > 0
samples = getattr(sources[0], 'samples', 1)
tom = getattr(sources, 'temporal_occurrence_model')
rate = tom.occurrence_rate
time_span = tom.time_span
# Note that using a single time interval corresponding to the product
# of the investigation time and the number of realisations as we do
# here is admitted only in the case of a time-independent model
grp_num_occ = numpy.random.poisson(rate * time_span * samples *
num_ses)
# Now we process the sources included in the group. Possible cases:
# * The group is a cluster. In this case we choose one rupture per each
# source; uncertainty in the ruptures can be handled in this case
# using mutually exclusive ruptures (note that this is admitted
# only for nons-parametric sources).
# * The group contains mutually exclusive sources. In this case we
# choose one source and then one rupture from this source.
rup_counter = {}
rup_data = {}
eff_ruptures = 0
for rlz_num in range(grp_num_occ):
if sources.cluster:
for src, _sites in srcfilter(sources):
# Sum Ruptures
if rlz_num == 0:
eff_ruptures += src.num_ruptures
# Track calculation time
t0 = time.time()
rup = src.get_one_rupture()
# The problem here is that we do not know a-priori the
# number of occurrences of a given rupture.
if src.id not in rup_counter:
rup_counter[src.id] = {}
rup_data[src.id] = {}
if rup.idx not in rup_counter[src.id]:
rup_counter[src.id][rup.idx] = 1
rup_data[src.id][rup.idx] = [rup, src.id, grp_id]
else:
rup_counter[src.id][rup.idx] += 1
# Store info
dt = time.time() - t0
calc_times[src.id] += numpy.array([len(rup_data[src.id]),
src.nsites, dt])
elif param['src_interdep'] == 'mutex':
print('Not yet implemented')
exit(0)
# Create event based ruptures
for src_key in rup_data:
for rup_key in rup_data[src_key]:
dat = rup_data[src_key][rup_key]
cnt = rup_counter[src_key][rup_key]
ebr = EBRupture(dat[0], dat[1], dat[2], cnt, samples)
eb_ruptures.append(ebr)
return eb_ruptures, calc_times, eff_ruptures, grp_id | [
"def",
"sample_cluster",
"(",
"sources",
",",
"srcfilter",
",",
"num_ses",
",",
"param",
")",
":",
"eb_ruptures",
"=",
"[",
"]",
"numpy",
".",
"random",
".",
"seed",
"(",
"sources",
"[",
"0",
"]",
".",
"serial",
")",
"[",
"grp_id",
"]",
"=",
"set",
... | Yields ruptures generated by a cluster of sources.
:param sources:
A sequence of sources of the same group
:param num_ses:
Number of stochastic event sets
:param param:
a dictionary of additional parameters including
ses_per_logic_tree_path
:yields:
dictionaries with keys rup_array, calc_times, eff_ruptures | [
"Yields",
"ruptures",
"generated",
"by",
"a",
"cluster",
"of",
"sources",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/stochastic.py#L141-L216 | train | 213,995 |
gem/oq-engine | openquake/commands/plot_sites.py | plot_sites | def plot_sites(calc_id=-1):
"""
Plot the sites
"""
# NB: matplotlib is imported inside since it is a costly import
import matplotlib.pyplot as p
dstore = util.read(calc_id)
sitecol = dstore['sitecol']
lons, lats = sitecol.lons, sitecol.lats
if len(lons) > 1 and cross_idl(*lons):
lons %= 360
fig, ax = p.subplots()
ax.grid(True)
if 'site_model' in dstore:
sm = dstore['site_model']
sm_lons, sm_lats = sm['lon'], sm['lat']
if len(sm_lons) > 1 and cross_idl(*sm_lons):
sm_lons %= 360
p.scatter(sm_lons, sm_lats, marker='.', color='orange')
p.scatter(lons, lats, marker='+')
p.show() | python | def plot_sites(calc_id=-1):
"""
Plot the sites
"""
# NB: matplotlib is imported inside since it is a costly import
import matplotlib.pyplot as p
dstore = util.read(calc_id)
sitecol = dstore['sitecol']
lons, lats = sitecol.lons, sitecol.lats
if len(lons) > 1 and cross_idl(*lons):
lons %= 360
fig, ax = p.subplots()
ax.grid(True)
if 'site_model' in dstore:
sm = dstore['site_model']
sm_lons, sm_lats = sm['lon'], sm['lat']
if len(sm_lons) > 1 and cross_idl(*sm_lons):
sm_lons %= 360
p.scatter(sm_lons, sm_lats, marker='.', color='orange')
p.scatter(lons, lats, marker='+')
p.show() | [
"def",
"plot_sites",
"(",
"calc_id",
"=",
"-",
"1",
")",
":",
"# NB: matplotlib is imported inside since it is a costly import",
"import",
"matplotlib",
".",
"pyplot",
"as",
"p",
"dstore",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"sitecol",
"=",
"dstore",
... | Plot the sites | [
"Plot",
"the",
"sites"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_sites.py#L24-L45 | train | 213,996 |
gem/oq-engine | openquake/hazardlib/gsim/ghofrani_atkinson_2014.py | GhofraniAtkinson2014._get_distance_term | def _get_distance_term(self, C, rrup, backarc):
"""
Returns the distance scaling term, which varies depending on whether
the site is in the forearc or the backarc
"""
# Geometric attenuation function
distance_scale = -np.log10(np.sqrt(rrup ** 2 + 3600.0))
# Anelastic attenuation in the backarc
distance_scale[backarc] += (C["c2"] * rrup[backarc])
# Anelastic Attenuation in the forearc
idx = np.logical_not(backarc)
distance_scale[idx] += (C["c1"] * rrup[idx])
return distance_scale | python | def _get_distance_term(self, C, rrup, backarc):
"""
Returns the distance scaling term, which varies depending on whether
the site is in the forearc or the backarc
"""
# Geometric attenuation function
distance_scale = -np.log10(np.sqrt(rrup ** 2 + 3600.0))
# Anelastic attenuation in the backarc
distance_scale[backarc] += (C["c2"] * rrup[backarc])
# Anelastic Attenuation in the forearc
idx = np.logical_not(backarc)
distance_scale[idx] += (C["c1"] * rrup[idx])
return distance_scale | [
"def",
"_get_distance_term",
"(",
"self",
",",
"C",
",",
"rrup",
",",
"backarc",
")",
":",
"# Geometric attenuation function",
"distance_scale",
"=",
"-",
"np",
".",
"log10",
"(",
"np",
".",
"sqrt",
"(",
"rrup",
"**",
"2",
"+",
"3600.0",
")",
")",
"# Ane... | Returns the distance scaling term, which varies depending on whether
the site is in the forearc or the backarc | [
"Returns",
"the",
"distance",
"scaling",
"term",
"which",
"varies",
"depending",
"on",
"whether",
"the",
"site",
"is",
"in",
"the",
"forearc",
"or",
"the",
"backarc"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/ghofrani_atkinson_2014.py#L103-L115 | train | 213,997 |
gem/oq-engine | openquake/hazardlib/gsim/ghofrani_atkinson_2014.py | GhofraniAtkinson2014CascadiaUpper._get_scaling_term | def _get_scaling_term(self, C, rrup):
"""
Applies the Cascadia correction factor from Table 2 and the positive
correction factor given on Page 567
"""
a_f = 0.15 + 0.0007 * rrup
a_f[a_f > 0.35] = 0.35
return C["af"] + a_f | python | def _get_scaling_term(self, C, rrup):
"""
Applies the Cascadia correction factor from Table 2 and the positive
correction factor given on Page 567
"""
a_f = 0.15 + 0.0007 * rrup
a_f[a_f > 0.35] = 0.35
return C["af"] + a_f | [
"def",
"_get_scaling_term",
"(",
"self",
",",
"C",
",",
"rrup",
")",
":",
"a_f",
"=",
"0.15",
"+",
"0.0007",
"*",
"rrup",
"a_f",
"[",
"a_f",
">",
"0.35",
"]",
"=",
"0.35",
"return",
"C",
"[",
"\"af\"",
"]",
"+",
"a_f"
] | Applies the Cascadia correction factor from Table 2 and the positive
correction factor given on Page 567 | [
"Applies",
"the",
"Cascadia",
"correction",
"factor",
"from",
"Table",
"2",
"and",
"the",
"positive",
"correction",
"factor",
"given",
"on",
"Page",
"567"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/ghofrani_atkinson_2014.py#L225-L232 | train | 213,998 |
gem/oq-engine | openquake/hazardlib/gsim/somerville_2009.py | SomervilleEtAl2009NonCratonic._compute_mean | def _compute_mean(self, C, mag, rjb):
"""
Compute mean value, see table 2.
"""
m1 = 6.4
r1 = 50.
h = 6.
R = np.sqrt(rjb ** 2 + h ** 2)
R1 = np.sqrt(r1 ** 2 + h ** 2)
less_r1 = rjb < r1
ge_r1 = rjb >= r1
mean = (C['c1'] + C['c4'] * (mag - m1) * np.log(R) + C['c5'] * rjb +
C['c8'] * (8.5 - mag) ** 2)
mean[less_r1] += C['c3'] * np.log(R[less_r1])
mean[ge_r1] += (C['c3'] * np.log(R1) +
C['c6'] * (np.log(R[ge_r1]) - np.log(R1)))
if mag < m1:
mean += C['c2'] * (mag - m1)
else:
mean += C['c7'] * (mag - m1)
return mean | python | def _compute_mean(self, C, mag, rjb):
"""
Compute mean value, see table 2.
"""
m1 = 6.4
r1 = 50.
h = 6.
R = np.sqrt(rjb ** 2 + h ** 2)
R1 = np.sqrt(r1 ** 2 + h ** 2)
less_r1 = rjb < r1
ge_r1 = rjb >= r1
mean = (C['c1'] + C['c4'] * (mag - m1) * np.log(R) + C['c5'] * rjb +
C['c8'] * (8.5 - mag) ** 2)
mean[less_r1] += C['c3'] * np.log(R[less_r1])
mean[ge_r1] += (C['c3'] * np.log(R1) +
C['c6'] * (np.log(R[ge_r1]) - np.log(R1)))
if mag < m1:
mean += C['c2'] * (mag - m1)
else:
mean += C['c7'] * (mag - m1)
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
")",
":",
"m1",
"=",
"6.4",
"r1",
"=",
"50.",
"h",
"=",
"6.",
"R",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2",
"+",
"h",
"**",
"2",
")",
"R1",
"=",
"np",
".",
"sqrt... | Compute mean value, see table 2. | [
"Compute",
"mean",
"value",
"see",
"table",
"2",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/somerville_2009.py#L86-L110 | train | 213,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.