Focal Mechanism
In [1]:
Copied!
import os
import pandas as pd
import json
from glob import glob
from tqdm.auto import tqdm
import obspy
import obspy.taup
import warnings
import numpy as np
import matplotlib.pyplot as plt
import multiprocessing as mp
from sklearn.neighbors import NearestNeighbors
import random
warnings.filterwarnings("ignore")
import os
import pandas as pd
import json
from glob import glob
from tqdm.auto import tqdm
import obspy
import obspy.taup
import warnings
import numpy as np
import matplotlib.pyplot as plt
import multiprocessing as mp
from sklearn.neighbors import NearestNeighbors
import random
warnings.filterwarnings("ignore")
/home/zhuwq/.local/miniconda3/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html from .autonotebook import tqdm as notebook_tqdm
Run QuakeFlow_demo.ipynb first to generate the input files¶
In [2]:
Copied!
root_path = "./QuakeFlow/slurm/local"
region = "demo"
result_path = f"{root_path}/{region}/hash"
if not os.path.exists(result_path):
os.makedirs(result_path)
with open(f"{root_path}/{region}/config.json", "r") as f:
config = json.load(f)
with open(f"{root_path}/{region}/obspy/stations.json", "r") as f:
stations = json.load(f)
stations = pd.DataFrame.from_dict(stations, orient='index')
stations.index.name = "station_id"
events = pd.read_csv(f"{root_path}/{region}/gamma/gamma_events.csv", parse_dates=['time'])
events['time'] = events['time'].dt.tz_localize(None)
if "event_id" in events.columns:
events.rename(columns={"event_id": "event_index"}, inplace=True)
events.set_index("event_index", inplace=True, drop=True)
picks = pd.read_csv(f"{root_path}/{region}/gamma/gamma_picks.csv", parse_dates=['phase_time'])
root_path = "./QuakeFlow/slurm/local"
region = "demo"
result_path = f"{root_path}/{region}/hash"
if not os.path.exists(result_path):
os.makedirs(result_path)
with open(f"{root_path}/{region}/config.json", "r") as f:
config = json.load(f)
with open(f"{root_path}/{region}/obspy/stations.json", "r") as f:
stations = json.load(f)
stations = pd.DataFrame.from_dict(stations, orient='index')
stations.index.name = "station_id"
events = pd.read_csv(f"{root_path}/{region}/gamma/gamma_events.csv", parse_dates=['time'])
events['time'] = events['time'].dt.tz_localize(None)
if "event_id" in events.columns:
events.rename(columns={"event_id": "event_index"}, inplace=True)
events.set_index("event_index", inplace=True, drop=True)
picks = pd.read_csv(f"{root_path}/{region}/gamma/gamma_picks.csv", parse_dates=['phase_time'])
In [3]:
Copied!
stations[:3]
stations[:3]
Out[3]:
network | station | location | instrument | component | sensitivity | latitude | longitude | elevation_m | depth_km | x_km | y_km | z_km | provider | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
station_id | ||||||||||||||
CI.CCC..BH | CI | CCC | BH | ENZ | [627368000.0, 627368000.0, 627368000.0] | 35.52495 | -117.36453 | 670.0 | -0.67 | 12.65 | -19.968 | -0.67 | SCEDC | |
CI.CCC..HH | CI | CCC | HH | ENZ | [627368000.0, 627368000.0, 627368000.0] | 35.52495 | -117.36453 | 670.0 | -0.67 | 12.65 | -19.968 | -0.67 | SCEDC | |
CI.CCC..HN | CI | CCC | HN | ENZ | [213979.0, 214322.0, 213808.0] | 35.52495 | -117.36453 | 670.0 | -0.67 | 12.65 | -19.968 | -0.67 | SCEDC |
In [4]:
Copied!
events[:3]
events[:3]
Out[4]:
time | magnitude | sigma_time | sigma_amp | cov_time_amp | gamma_score | num_picks | num_p_picks | num_s_picks | x(km) | y(km) | z(km) | longitude | latitude | depth_km | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
event_index | |||||||||||||||
142 | 2019-07-04 17:02:55.043 | 3.959 | 0.324 | 0.488 | 0.016 | 128.000 | 78 | 35 | 43 | 0.401 | 0.854 | 16.255 | -117.500 | 35.713 | 16.255 |
223 | 2019-07-04 17:04:02.558 | 1.992 | 0.210 | 0.328 | 0.037 | 52.807 | 40 | 20 | 20 | -1.312 | 0.010 | 13.710 | -117.518 | 35.705 | 13.710 |
224 | 2019-07-04 17:04:11.454 | 1.207 | 0.042 | 0.301 | 0.001 | 9.159 | 6 | 3 | 3 | -16.960 | 29.341 | 6.733 | -117.692 | 35.969 | 6.733 |
In [5]:
Copied!
picks[:3]
picks[:3]
Out[5]:
station_id | phase_index | phase_time | phase_score | phase_type | phase_polarity | phase_amplitude | id | timestamp | amp | type | prob | event_index | gamma_score | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | CI.WCS2..HN | 519 | 2019-07-04 17:00:05.188 | 0.467 | P | -0.066 | 1.670000e-06 | CI.WCS2..HN | 2019-07-04T17:00:05.188000 | 1.670000e-06 | P | 0.467 | -1 | -1.0 |
1 | CI.WRV2..EH | 789 | 2019-07-04 17:00:07.890 | 0.695 | P | -0.160 | 2.042000e-07 | CI.WRV2..EH | 2019-07-04T17:00:07.890000 | 2.042000e-07 | P | 0.695 | -1 | -1.0 |
2 | CI.WRV2..EH | 846 | 2019-07-04 17:00:08.460 | 0.473 | S | 0.000 | 1.342000e-07 | CI.WRV2..EH | 2019-07-04T17:00:08.460000 | 1.342000e-07 | S | 0.473 | -1 | -1.0 |
In [7]:
Copied!
# %%
stations = stations[stations.index.isin(picks["station_id"].unique())]
# %%
picks = picks.merge(events, on="event_index", suffixes=("_pick", "_event"))
picks = picks.merge(stations, on="station_id", suffixes=("_pick", "_station"))
neigh = NearestNeighbors(n_neighbors=min(len(stations), 10))
neigh.fit(stations[["longitude", "latitude"]].values)
# %%
neigh_ratios = []
selected_index = []
for event_index, event in tqdm(events.iterrows(), total=len(events)):
sid = neigh.kneighbors([event[["longitude", "latitude"]].values])[1][0]
neigh_stations = stations.iloc[sid].index
neigh_picks = picks[(picks["station_id"].isin(neigh_stations)) & (picks["event_index"] == event_index)]
neigh_stations_with_picks = neigh_picks["station_id"].unique()
neigh_ratios.append(len(neigh_stations_with_picks) / len(neigh_stations))
if len(neigh_stations_with_picks) / len(neigh_stations) > 0.3:
selected_index.append(event_index)
events = events.loc[selected_index]
# %%
picks = picks[picks["event_index"].isin(events.index)]
# %%
stations = stations[stations.index.isin(picks["station_id"].unique())]
# %%
picks = picks.merge(events, on="event_index", suffixes=("_pick", "_event"))
picks = picks.merge(stations, on="station_id", suffixes=("_pick", "_station"))
neigh = NearestNeighbors(n_neighbors=min(len(stations), 10))
neigh.fit(stations[["longitude", "latitude"]].values)
# %%
neigh_ratios = []
selected_index = []
for event_index, event in tqdm(events.iterrows(), total=len(events)):
sid = neigh.kneighbors([event[["longitude", "latitude"]].values])[1][0]
neigh_stations = stations.iloc[sid].index
neigh_picks = picks[(picks["station_id"].isin(neigh_stations)) & (picks["event_index"] == event_index)]
neigh_stations_with_picks = neigh_picks["station_id"].unique()
neigh_ratios.append(len(neigh_stations_with_picks) / len(neigh_stations))
if len(neigh_stations_with_picks) / len(neigh_stations) > 0.3:
selected_index.append(event_index)
events = events.loc[selected_index]
# %%
picks = picks[picks["event_index"].isin(events.index)]
100%|██████████| 2324/2324 [00:06<00:00, 366.54it/s]
Stations and events¶
In [8]:
Copied!
plt.figure(figsize=(10, 10))
plt.scatter(stations['longitude'], stations['latitude'], color='k', marker='^')
plt.scatter(events['longitude'], events['latitude'], s=1, color='r', marker='o')
plt.gca().set_aspect(1.0/np.cos(np.pi/180*stations['latitude'].mean()))
plt.xlabel('Longitude')
plt.ylabel('Latitude')
plt.show()
plt.figure(figsize=(10, 10))
plt.scatter(stations['longitude'], stations['latitude'], color='k', marker='^')
plt.scatter(events['longitude'], events['latitude'], s=1, color='r', marker='o')
plt.gca().set_aspect(1.0/np.cos(np.pi/180*stations['latitude'].mean()))
plt.xlabel('Longitude')
plt.ylabel('Latitude')
plt.show()
Convert the station list format¶
In [9]:
Copied!
# "LOCSRCE station LATLON latitude longitdue depth elevation"
station_fmt = "LOCSRCE {} LATLON {:.4f} {:.4f} 0 {}\n"
# the file containing station information
stafile = f'./{result_path}/stations.nll'
with open(stafile,'w') as fp:
for station_id, station in stations.iterrows():
fp.writelines(station_fmt.format(station_id, station['latitude'], station['longitude'], station['elevation_m']/1000))
# "LOCSRCE station LATLON latitude longitdue depth elevation"
station_fmt = "LOCSRCE {} LATLON {:.4f} {:.4f} 0 {}\n"
# the file containing station information
stafile = f'./{result_path}/stations.nll'
with open(stafile,'w') as fp:
for station_id, station in stations.iterrows():
fp.writelines(station_fmt.format(station_id, station['latitude'], station['longitude'], station['elevation_m']/1000))
Convert polarity and S/P picks¶
In [10]:
Copied!
# year month day hour minute seconds evla evlo evdp errh errz evid
eventlinefmt = "{:04d} {:02d} {:02d} {:02d} {:02d} {:05.2f} {:010.6f} {:010.6f} {:09.6f} {:05.2f} {:05.2f} {:}\n"
# station polarity "qp" S/P_ratio
stationlinefmt = "{} {:1s} 0 {:08.3f}\n"
picks_by_event = picks.groupby("event_index")
for event_index, picks_ in tqdm(picks_by_event):
# the file being created which should contain event information, polarity and S/P data
polfile = f'{result_path}/{event_index}.pol.hash'
event_info = events.loc[event_index]
with open(polfile, 'w') as pf:
# write event information into the file
pf.writelines(eventlinefmt.format(event_info['time'].year, event_info['time'].month, event_info['time'].day, event_info['time'].hour, event_info['time'].minute, event_info['time'].second+1e-6*event_info['time'].microsecond, event_info['latitude'], event_info['longitude'], event_info['depth_km'], 0.0, 0.0, event_index))
# loop for all station that have picks
# if no P pick, continue the next loop
# if P pick exists and its polarity prob exceeds a certain value
# assign 'U','u','+' or 'D','d','-', respectively, otherwise 'x'
# if both P and S picks exist, divide S by P amplitude as S/P ratio
# if no S pick, set the ratio to zero
picks_by_station = picks_.groupby("station_id")
for station_id, picks__ in picks_by_station:
if 'P' not in picks__['phase_type'].values:
continue
P_polarity = picks__[picks__['phase_type'] == 'P']['phase_polarity'].values[0]
if P_polarity < -0.3:
polarity = '-'
elif P_polarity > 0.3:
polarity = '+'
else:
polarity = 'x'
if 'S' in picks__['phase_type'].values:
S_polarity = picks__[picks__['phase_type'] == 'S']['phase_polarity'].values[0]
S_amplitdue = picks__[picks__['phase_type'] == 'S']['phase_amplitude'].values[0]
P_amplitdue = picks__[picks__['phase_type'] == 'P']['phase_amplitude'].values[0]
SP_ratio = S_amplitdue/P_amplitdue
else:
SP_ratio = 0
# write polarity and S/P ratio data at each station into the file
pf.writelines(stationlinefmt.format(station_id, polarity, SP_ratio))
# year month day hour minute seconds evla evlo evdp errh errz evid
eventlinefmt = "{:04d} {:02d} {:02d} {:02d} {:02d} {:05.2f} {:010.6f} {:010.6f} {:09.6f} {:05.2f} {:05.2f} {:}\n"
# station polarity "qp" S/P_ratio
stationlinefmt = "{} {:1s} 0 {:08.3f}\n"
picks_by_event = picks.groupby("event_index")
for event_index, picks_ in tqdm(picks_by_event):
# the file being created which should contain event information, polarity and S/P data
polfile = f'{result_path}/{event_index}.pol.hash'
event_info = events.loc[event_index]
with open(polfile, 'w') as pf:
# write event information into the file
pf.writelines(eventlinefmt.format(event_info['time'].year, event_info['time'].month, event_info['time'].day, event_info['time'].hour, event_info['time'].minute, event_info['time'].second+1e-6*event_info['time'].microsecond, event_info['latitude'], event_info['longitude'], event_info['depth_km'], 0.0, 0.0, event_index))
# loop for all station that have picks
# if no P pick, continue the next loop
# if P pick exists and its polarity prob exceeds a certain value
# assign 'U','u','+' or 'D','d','-', respectively, otherwise 'x'
# if both P and S picks exist, divide S by P amplitude as S/P ratio
# if no S pick, set the ratio to zero
picks_by_station = picks_.groupby("station_id")
for station_id, picks__ in picks_by_station:
if 'P' not in picks__['phase_type'].values:
continue
P_polarity = picks__[picks__['phase_type'] == 'P']['phase_polarity'].values[0]
if P_polarity < -0.3:
polarity = '-'
elif P_polarity > 0.3:
polarity = '+'
else:
polarity = 'x'
if 'S' in picks__['phase_type'].values:
S_polarity = picks__[picks__['phase_type'] == 'S']['phase_polarity'].values[0]
S_amplitdue = picks__[picks__['phase_type'] == 'S']['phase_amplitude'].values[0]
P_amplitdue = picks__[picks__['phase_type'] == 'P']['phase_amplitude'].values[0]
SP_ratio = S_amplitdue/P_amplitdue
else:
SP_ratio = 0
# write polarity and S/P ratio data at each station into the file
pf.writelines(stationlinefmt.format(station_id, polarity, SP_ratio))
100%|██████████| 1651/1651 [00:24<00:00, 66.05it/s]
Prepare input files for HASH¶
In [11]:
Copied!
velocity_model = """0.00 4.74000
1.00 5.01000
2.00 5.35000
3.00 5.71000
4.00 6.07000
5.00 6.17000
6.00 6.27000
7.00 6.34000
8.00 6.39000
30.00 7.80000
"""
with open(f'{result_path}/ca.vel', 'w') as fp:
fp.writelines(velocity_model)
# loop for all polarity data files
polfiles = glob(f'{result_path}/*.pol.hash')
# for polfile in tqdm(list(polfiles)):
def run(polfile):
event_id = os.path.basename(polfile).split('.')[0]
# the inputfile which should contain input and output files and parameters
iptfile = f'{result_path}/{event_id}.inp.hash'
# the file containing the best solutions
optfile1 = f'{result_path}/{event_id}.best.fps'
# the file containing all solutions
optfile2 = f'{result_path}/{event_id}.all.fps'
# the file containing ray parameters
optfile3 = f'{result_path}/{event_id}.rays'
# the file containing velocity profile
velfile = f'{result_path}/ca.vel'
# Angle increment for grid search
dang = 1
# Number of perutbations of take-off angles for different source depths and velocity models
nmc = 50
# Maximum number focal mechanisms that match misfit critria to return
maxout = 20
# maximum distance to consider (km)
maxdist = 100
# number of polarities assumed bad
nbadpol = 4
# log10 of uncertainty factor for s/p ratios.
qbadfac = 0.2
# Angular distance between different families of focal mechanisms.
cangle = 45
# Fraction of focal mechanisms that need to be within cangle to make up a new famlily of focal mechanisms.
prob_max = 0.2
# number of velocity models
nvelmod = 1
# write these information sequencially into the inputfile
with open(iptfile, 'w') as ipt:
ipt.writelines('{}\n{}\n{}\n{}\n{}\n'.format(polfile,stafile,optfile1,optfile2,optfile3))
ipt.writelines('{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n'.format(dang,nmc,maxout,maxdist,nbadpol,qbadfac,cangle,prob_max))
ipt.writelines('{}\n{}\n'.format(nvelmod, velfile))
# run the command
os.system(f'QuakeFlow/hashpy2/hash/hash_hashpy1D < {iptfile} > hash.log 2>&1')
velocity_model = """0.00 4.74000
1.00 5.01000
2.00 5.35000
3.00 5.71000
4.00 6.07000
5.00 6.17000
6.00 6.27000
7.00 6.34000
8.00 6.39000
30.00 7.80000
"""
with open(f'{result_path}/ca.vel', 'w') as fp:
fp.writelines(velocity_model)
# loop for all polarity data files
polfiles = glob(f'{result_path}/*.pol.hash')
# for polfile in tqdm(list(polfiles)):
def run(polfile):
event_id = os.path.basename(polfile).split('.')[0]
# the inputfile which should contain input and output files and parameters
iptfile = f'{result_path}/{event_id}.inp.hash'
# the file containing the best solutions
optfile1 = f'{result_path}/{event_id}.best.fps'
# the file containing all solutions
optfile2 = f'{result_path}/{event_id}.all.fps'
# the file containing ray parameters
optfile3 = f'{result_path}/{event_id}.rays'
# the file containing velocity profile
velfile = f'{result_path}/ca.vel'
# Angle increment for grid search
dang = 1
# Number of perutbations of take-off angles for different source depths and velocity models
nmc = 50
# Maximum number focal mechanisms that match misfit critria to return
maxout = 20
# maximum distance to consider (km)
maxdist = 100
# number of polarities assumed bad
nbadpol = 4
# log10 of uncertainty factor for s/p ratios.
qbadfac = 0.2
# Angular distance between different families of focal mechanisms.
cangle = 45
# Fraction of focal mechanisms that need to be within cangle to make up a new famlily of focal mechanisms.
prob_max = 0.2
# number of velocity models
nvelmod = 1
# write these information sequencially into the inputfile
with open(iptfile, 'w') as ipt:
ipt.writelines('{}\n{}\n{}\n{}\n{}\n'.format(polfile,stafile,optfile1,optfile2,optfile3))
ipt.writelines('{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n'.format(dang,nmc,maxout,maxdist,nbadpol,qbadfac,cangle,prob_max))
ipt.writelines('{}\n{}\n'.format(nvelmod, velfile))
# run the command
os.system(f'QuakeFlow/hashpy2/hash/hash_hashpy1D < {iptfile} > hash.log 2>&1')
In [12]:
Copied!
ncpu = mp.cpu_count()
bpar = tqdm(total=len(polfiles))
ctx = mp.get_context('fork')
with ctx.Pool(ncpu) as p:
for file in polfiles:
p.apply_async(run, args=(file,), callback=lambda _: bpar.update())
p.close()
p.join()
ncpu = mp.cpu_count()
bpar = tqdm(total=len(polfiles))
ctx = mp.get_context('fork')
with ctx.Pool(ncpu) as p:
for file in polfiles:
p.apply_async(run, args=(file,), callback=lambda _: bpar.update())
p.close()
p.join()
100%|█████████▉| 1648/1651 [00:22<00:00, 42.02it/s]
Check result of a single event¶
In [13]:
Copied!
filename = list(glob(f'{result_path}/*.best.fps'))
filename = random.choice(filename)
event_id = os.path.basename(filename).split('.')[0]
filename = list(glob(f'{result_path}/*.best.fps'))
filename = random.choice(filename)
event_id = os.path.basename(filename).split('.')[0]
In [14]:
Copied!
allmecafile = f'{result_path}/{event_id}.all.fps'
df_allmeca = pd.read_csv(allmecafile, header=None, delim_whitespace=True)
allmecafile = f'{result_path}/{event_id}.all.fps'
df_allmeca = pd.read_csv(allmecafile, header=None, delim_whitespace=True)
In [15]:
Copied!
bestmecafile= f'{result_path}/{event_id}.best.fps'
df_bestmeca= pd.read_csv(bestmecafile, header=None, delim_whitespace=True)
df_bestmeca
bestmecafile= f'{result_path}/{event_id}.best.fps'
df_bestmeca= pd.read_csv(bestmecafile, header=None, delim_whitespace=True)
df_bestmeca
Out[15]:
0 | 1 | 2 | 3 | 4 | |
---|---|---|---|---|---|
0 | 119 | 86 | 156 | 17 | A |
In [16]:
Copied!
from obspy.imaging.beachball import beach
fig, ax = plt.subplots()
# plot the best one first
strike, dip, rake = df_bestmeca.loc[0].to_numpy()[:3]
bball = beach(fm=[strike, dip, rake],facecolor='k', xy=(0,0), width=100)
ax.add_collection(bball)
# plot all solutions that meet the criterion
for event_index in df_allmeca.index:
strike, dip, rake = df_allmeca.loc[event_index].to_numpy()[:3]
bball = beach(fm=[strike, dip, rake], linewidth=1, nofill=True, xy=(0,0), width=100, edgecolor='gray', alpha=0.5)
ax.add_collection(bball)
plt.xlim(-50, 50)
plt.ylim(-50, 50)
ax.set_aspect('equal')
fig.tight_layout()
from obspy.imaging.beachball import beach
fig, ax = plt.subplots()
# plot the best one first
strike, dip, rake = df_bestmeca.loc[0].to_numpy()[:3]
bball = beach(fm=[strike, dip, rake],facecolor='k', xy=(0,0), width=100)
ax.add_collection(bball)
# plot all solutions that meet the criterion
for event_index in df_allmeca.index:
strike, dip, rake = df_allmeca.loc[event_index].to_numpy()[:3]
bball = beach(fm=[strike, dip, rake], linewidth=1, nofill=True, xy=(0,0), width=100, edgecolor='gray', alpha=0.5)
ax.add_collection(bball)
plt.xlim(-50, 50)
plt.ylim(-50, 50)
ax.set_aspect('equal')
fig.tight_layout()
Plot focal mechanisms of all earthquakes¶
In [22]:
Copied!
bestmecafiles = list(glob(f'{result_path}/*.best.fps'))
fig, ax = plt.subplots(figsize=(10, 10))
min_mag = events['magnitude'].min()
bballs = []
for bestmecafile in bestmecafiles:
# get the event information
event_id = os.path.basename(bestmecafile).split('.')[0]
evot, mag, evla, evlo, evdp = events.loc[int(event_id)][['time', 'magnitude', 'latitude', 'longitude', 'depth_km']]
# get strike, dip and rake angles
df_bestmeca= pd.read_csv(bestmecafile, header=None, delim_whitespace=True)
strike, dip, rake, RMS, Q = df_bestmeca.loc[0].to_numpy()
if Q != "A":
continue
# # plot and change the width/size according to the earthquake magnitude
# bball = beach(fm=[strike, dip, rake], linewidth=0.5, facecolor='k', xy=(evlo,evla), width=(mag-min_mag)*2e-3, alpha=alpha)
# ax.add_collection(bball)
bballs.append({
"fm": [strike, dip, rake],
"linewidth": 0.5,
"facecolor": 'k',
"xy": (evlo,evla),
"width": (mag-min_mag)*2e-3,
"alpha": 1.0,
"mag": mag
})
# sort bbals by magnitude
bballs = sorted(bballs, key=lambda x: x["mag"])
for bball in bballs:
bball = beach(fm=bball["fm"], linewidth=bball["linewidth"], facecolor=bball["facecolor"], xy=bball["xy"], width=bball["width"], alpha=bball["alpha"])
ax.add_collection(bball)
x0 = events['longitude'].mean()
dx = events['longitude'].std()
y0 = events['latitude'].mean()
dy = events['latitude'].std()
ax.set_xlim(x0-dx*2, x0+dx*2)
ax.set_ylim(y0-dy*2, y0+dy*2)
ax.set_aspect(1.0/np.cos(np.pi/180*stations['latitude'].mean()))
fig.tight_layout()
bestmecafiles = list(glob(f'{result_path}/*.best.fps'))
fig, ax = plt.subplots(figsize=(10, 10))
min_mag = events['magnitude'].min()
bballs = []
for bestmecafile in bestmecafiles:
# get the event information
event_id = os.path.basename(bestmecafile).split('.')[0]
evot, mag, evla, evlo, evdp = events.loc[int(event_id)][['time', 'magnitude', 'latitude', 'longitude', 'depth_km']]
# get strike, dip and rake angles
df_bestmeca= pd.read_csv(bestmecafile, header=None, delim_whitespace=True)
strike, dip, rake, RMS, Q = df_bestmeca.loc[0].to_numpy()
if Q != "A":
continue
# # plot and change the width/size according to the earthquake magnitude
# bball = beach(fm=[strike, dip, rake], linewidth=0.5, facecolor='k', xy=(evlo,evla), width=(mag-min_mag)*2e-3, alpha=alpha)
# ax.add_collection(bball)
bballs.append({
"fm": [strike, dip, rake],
"linewidth": 0.5,
"facecolor": 'k',
"xy": (evlo,evla),
"width": (mag-min_mag)*2e-3,
"alpha": 1.0,
"mag": mag
})
# sort bbals by magnitude
bballs = sorted(bballs, key=lambda x: x["mag"])
for bball in bballs:
bball = beach(fm=bball["fm"], linewidth=bball["linewidth"], facecolor=bball["facecolor"], xy=bball["xy"], width=bball["width"], alpha=bball["alpha"])
ax.add_collection(bball)
x0 = events['longitude'].mean()
dx = events['longitude'].std()
y0 = events['latitude'].mean()
dy = events['latitude'].std()
ax.set_xlim(x0-dx*2, x0+dx*2)
ax.set_ylim(y0-dy*2, y0+dy*2)
ax.set_aspect(1.0/np.cos(np.pi/180*stations['latitude'].mean()))
fig.tight_layout()
Moment tensor inversion¶
TODO: Week2a_Exercise and Week2b_Exercise
In [18]:
Copied!
!wget https://github.com/AI4EPS/INVerse/releases/download/inverse/MT-Exercises.tar && tar -xf MT-Exercises.tar
!wget https://github.com/AI4EPS/INVerse/releases/download/inverse/MT-Exercises.tar && tar -xf MT-Exercises.tar
--2023-11-30 17:49:35-- https://github.com/AI4EPS/INVerse/releases/download/inverse/MT-Exercises.tar Resolving github.com (github.com)... 140.82.114.3 Connecting to github.com (github.com)|140.82.114.3|:443... connected. HTTP request sent, awaiting response... 302 Found Location: https://objects.githubusercontent.com/github-production-release-asset-2e65be/659493626/37e78cb0-9264-48c5-bfd2-64f92b705645?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIWNJYAX4CSVEH53A%2F20231201%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20231201T014935Z&X-Amz-Expires=300&X-Amz-Signature=27ac6035d2d335ef76eaf2469cf96591388159a52d7ed72fceb25e1b6d8a0efb&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=659493626&response-content-disposition=attachment%3B%20filename%3DMT-Exercises.tar&response-content-type=application%2Foctet-stream [following] --2023-11-30 17:49:35-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/659493626/37e78cb0-9264-48c5-bfd2-64f92b705645?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIWNJYAX4CSVEH53A%2F20231201%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20231201T014935Z&X-Amz-Expires=300&X-Amz-Signature=27ac6035d2d335ef76eaf2469cf96591388159a52d7ed72fceb25e1b6d8a0efb&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=659493626&response-content-disposition=attachment%3B%20filename%3DMT-Exercises.tar&response-content-type=application%2Foctet-stream Resolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.108.133, 185.199.110.133, 185.199.109.133, ... Connecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.108.133|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 81459200 (78M) [application/octet-stream] Saving to: ‘MT-Exercises.tar.4’ MT-Exercises.tar.4 100%[===================>] 77.69M 85.2MB/s in 0.9s 2023-11-30 17:49:36 (85.2 MB/s) - ‘MT-Exercises.tar.4’ saved [81459200/81459200]
100%|██████████| 1651/1651 [00:38<00:00, 42.02it/s]