diff --git a/.gitignore b/.gitignore index 3e8ce2f1e..377e6cc8b 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,5 @@ doc/USER_MANUAL/Schedule .*swn *~ *.pyc +utils/.DS_Store +.DS_Store diff --git a/utils/dynamic_rupture/JUPYTER_LIB/Class_SPECFEM3D.py b/utils/dynamic_rupture/JUPYTER_LIB/Class_SPECFEM3D.py new file mode 100755 index 000000000..21ab7f5e3 --- /dev/null +++ b/utils/dynamic_rupture/JUPYTER_LIB/Class_SPECFEM3D.py @@ -0,0 +1,1713 @@ +import numpy as np +import os +import array +import struct +import matplotlib.pyplot as plt +import seaborn as sns +from scipy.interpolate import griddata as gd +#import pandas as pd +#from matplotlib.colors import LogNorm + + + +def get_M0(Mw=7.0): + return 10** (Mw* 1.5+ 9.1) +## + +def get_Mw(M0=None): + import numpy as np + return (np.log10(M0)-9.1)/ 1.5 +## + +def interpolate_velocity_profile_for_specfem3d(points, Vs_values, my_points,\ + rep='./',nproc=1,is_1D_1layer=False,is_1D_layered=False,\ + x_chosen=None,y_chosen=None,z_chosen=None, \ + is_damage=False): + + from scipy.interpolate import griddata + + my_Vs, my_Vp, all_points = [], [], [] + for iproc in range(nproc): + print ('iproc', iproc, nproc) + print ('Reading read_specfem3d_database_coords ...') + xproc, yproc, zproc = read_specfem3d_database_coords(iproc=iproc,rep=rep) + # interpolate my points based on reference + my_points_orig = np.array( [[_x,_y,_z] for _x,_y,_z in zip(xproc,yproc,zproc)] )# in meters + if not is_1D_1layer and not is_1D_layered: + print ('White et al. velocity model ...') + my_points = my_points_orig + elif is_1D_layered: + print ('1D layered velocity model ...') + my_points = np.array( [[x_chosen*1e3,y_chosen*1e3,_z] for _x,_y,_z in zip(xproc,yproc,zproc)] )# in meters + elif is_1D_1layer: + print ('1D single-layer velocity model ...') + my_points = np.array( [[x_chosen*1e3,y_chosen*1e3,z_chosen*1e3] for _x,_y,_z in zip(xproc,yproc,zproc)] )# in meters + # + print (' interpolation ...') + _my_Vs = griddata(points, Vs_values, my_points, method='nearest') + # correction for LVFZs, only for Vs + if is_damage: + print (' LVFZ correction ...') + try: + cdt1 = (yproc <= yend) & (yproc >= ybeg) & (xproc >= xbeg) & (xproc <= xend) & \ + (zproc >= zbot) & (zproc <= ztop) + print (' _my_Vs[cdt1].shape: ', _my_Vs[cdt1].shape) + print (' min, max before: ', min(_my_Vs[cdt1]), max(_my_Vs[cdt1])) + _my_Vs[cdt1] = _my_Vs[cdt1]* (1.0- reduction) + print (' min, max after: ', min(_my_Vs[cdt1]), max(_my_Vs[cdt1])) + except: + print (' _my_Vs[cdt1].shape: ', _my_Vs[cdt1].shape) + print (' SKIPPED!') + pass + # + my_Vs.append ( _my_Vs ) + my_Vp.append ( griddata(points, Vp_values, my_points, method='nearest') ) + all_points.append(my_points_orig) + print (' done!'); print () + return all_points, my_Vs, my_Vp +### + + + +def interpolate_from_reference_data(input_points, ref_dataframe=None,\ + data_type='mu', method='nearest'): + + import pandas as pd + from scipy.interpolate import griddata + from scipy.interpolate import LinearNDInterpolator + + + try: + df_velocprofile = pd.read_pickle(ref_dataframe) + except: + import pickle5 as pickle + with open(ref_dataframe, "rb") as fh: + df_velocprofile = pickle.load(fh) + # + # make a function for the reference model + points = df_velocprofile[['x_rot', 'y_rot', 'depth']].to_numpy() + points[:,2] *= -1 # depth to along-dip negative + points *= 1e3 # all coords in m + reference_data = df_velocprofile[data_type] + # interpolate + data2return = griddata(points,reference_data,input_points,method=method) + return data2return +## + + +def get_rotd50_optim_testing(acceleration_x, time_step_x, acceleration_y, time_step_y, periods, + percentile, damping=0.05, units="cm/s/s", method="Nigam-Jennings"): + + from intensity_measures import get_response_spectrum, rotate_horizontal + + # Get the time-series corresponding to the SDOF + sax, _, x_a, _, _ = get_response_spectrum(acceleration_x, + time_step_x, + periods, damping, + units, method) + say, _, y_a, _, _ = get_response_spectrum(acceleration_y, + time_step_y, + periods, damping, + units, method) + + angles = np.arange(0., 90., 1.) + max_a_theta = np.zeros([len(angles), len(periods)], dtype=float) + max_a_theta[0, :] = np.sqrt(np.max(np.fabs(x_a), axis=0) * + np.max(np.fabs(y_a), axis=0)) + + iloc, theta = 0, angles[0] + max_a_theta[iloc, :] = np.sqrt(np.max(np.fabs(x_a), axis=0) * + np.max(np.fabs(y_a), axis=0)) + + for iloc, theta in enumerate(angles[1:]): + rot_x, rot_y = rotate_horizontal(x_a, y_a, theta) + max_a_theta[iloc, :] = np.sqrt(np.max(np.fabs(rot_x), axis=0) * + np.max(np.fabs(rot_y), axis=0)) +### + + + + gmrotd = np.percentile(max_a_theta, percentile, axis=0) + return {"angles": angles, + "periods": periods, + "GMRotDpp": gmrotd, + "GeoMeanPerAngle": max_a_theta} +### + +def set_style(whitegrid=False, scale=0.85): + sns.set(font_scale = scale) + sns.set_style('white') + if whitegrid: sns.set_style('whitegrid') + sns.set_context('talk', font_scale=scale) +### + + +def get_FFT(dt=None, data=None): + from scipy.fft import fft + + # Spectrum + df=1.0/ dt + N = len(data) + f = np.linspace(0.0, 1.0/(2.0*dt), N//2) + spec = abs(fft(data))* dt + return f[1:], spec[:int(N/2)-1] +### + + +def prepare_station_file(xrange=(),yrange=(),zrange=(),n_xyz=(),USE_SOURCES_RECEIVERS_Z=True,\ + sta='EL',net='IF',file_stat='w'): + ''' prepares the station file STATIONS to get seismogram outputs''' + import numpy as np + + try: + nx, ny, nz = n_xyz[0], n_xyz[1], n_xyz[2] + if USE_SOURCES_RECEIVERS_Z: + print('Using z-coordinates, do not forget to set USE_SOURCES_RECEIVERS_Z=T in Par_file!') + ('VERIFY file format: which column is z-coord?') + else: + print('Using burial depth, do not forget to set USE_SOURCES_RECEIVERS_Z=F in Par_file!') + print ('z-coordinates in this file must be positive!') + ista = 0 + with open('STATIONS', file_stat) as f: + for _x in np.linspace(xrange[0], xrange[1], nx): + for _y in np.linspace(yrange[0], yrange[1], ny): + for _z in np.linspace(zrange[0], zrange[1], nz): + name = sta+'%06d' % (ista) + f.write('%s \t %s \t %.1f \t %.1f \t %s \t %s \n' \ + % (name, net, _y, _x, '0.0', _z) ) + ista += 1 + print ('Total number of stations: ', ista) + print ('STATIONS file ready!') + print ('*') + except: + print ('Provide xrange=(xmin,xmax),yrange=(ymin,ymax),zrange=(zmin,zmax),n_xyz=(nx,ny,nz)') + print () +# ## + +def get_JA19_spectrum(Mw=None, f=None): + ''' Ji and Archuleta (2020) spectrum with double corner + frequency. Here using only Mw>5.3 condition.''' + print ('get_JA19_spectrum: assumes Mw > 5.3 !') + fc1_JA19_2S = 2.375- 0.585* Mw # for M>5.3 + fc1_JA19_2S = 10.0** fc1_JA19_2S + fc2_JA19_2S = 3.250- 0.5* Mw + fc2_JA19_2S = 10.0** fc2_JA19_2S + coeff = (1.0+ (f/ fc1_JA19_2S)** 4.0)** 0.25 + coeff *= (1.0+ (f/ fc2_JA19_2S)** 4.0)** 0.25 + spec_JA19_2S = 1.0/ coeff + return fc1_JA19_2S, fc2_JA19_2S, spec_JA19_2S +## + +# taken from Huihui's (02/2019) +def _read_binary_fault_file(filename, single=False, ndata=14): + + # Precision + length = 8 + if single: length = 4 + + if os.path.exists(filename): + + BinRead = [] + data = type("",(),{})() + + with open(filename, 'rb') as fid: + for ii in range(ndata): + read_buf = fid.read(4) + number = struct.unpack('1i',read_buf)[0] + N = int(number/length) + read_buf = fid.read(number) + read_d = struct.unpack(str(N)+'f',read_buf) + read_d = np.array(read_d) + BinRead.append(read_d) + read_buf = fid.read(4) + number = struct.unpack('1i',read_buf)[0] + fid.close() + + # assign the values to parameters + data.X = BinRead[0]/ 1.e3 # in km + data.Y = BinRead[1]/ 1.e3 # in km + data.Z = BinRead[2]/ 1.e3 # in km + data.Dx = BinRead[3] + data.Dz = BinRead[4] + data.Vx = BinRead[5] + data.Vz = BinRead[6] + data.Tx = BinRead[7] # in MPa + data.Ty = BinRead[8] + data.Tz = BinRead[9] # in MPa + data.S = BinRead[10] + data.Sg = BinRead[11] # in MPa + data.Trup = BinRead[12] + data.Tproz = BinRead[13] + else: + print ('No such file or directory!') + return + return data +### + + +# Konno-Ohmachi smoothening +def ko(y,dx,bexp): + + from math import pi, log10, sin + + nx = len(y) + fratio = 10.0**(2.5/bexp) + ylis = np.zeros(nx) #np.arange( nx ) + ylis[0] = y[0] + + for ix in np.arange( 1,nx ): + fc = float(ix)*dx + fc1 = fc/fratio + fc2 = fc*fratio + ix1 = int(fc1/dx) + ix2 = int(fc2/dx) + 1 + if ix1 <= 0: ix1 = 1 + if ix2 >= nx: ix2 = nx + a1 = 0.0 + a2 = 0.0 + for j in np.arange( ix1,ix2 ): + if j != ix: + c1 = bexp* np.log10(float(j)* dx/ fc) + c1 = (sin(c1)/c1)**4 + a2 = a2+c1 + a1 = a1+c1*y[j] + else: + a2 = a2+1.0 + a1 = a1+y[ix] + ylis[ix] = a1 / a2 + + for ix in np.arange( nx ): + y[ix] = ylis[ix] + return y +### + + +def compute_STF(t, all_SR, all_mu, dx=0.1e3, dz=0.1e3, pad=False): + ''' computes STF from slip rates of fault stations. NOT fault grid. + See also compute_STF_from_faultgrid in lib_ridgecrest.py''' + from scipy import fft + N = len(all_SR[0]) + STF = np.zeros(N) + elemsize = dx*dz + # STF + for _mu, SR in zip(all_mu, all_SR): + STF += SR* elemsize* _mu + dt = t[1]- t[0] + if pad: + STF = np.pad(STF, pad_width=(0,len(STF)), mode='minimum') + t = np.arange(len(STF))* dt + # Spectrum + df=1.0/ dt + N = len(t) + f = np.linspace(0.0, 1.0/(2.0*dt), N//2) + spec = abs(fft(STF))* dt + specD=spec[:int(N/2)-1] + specA=(2*np.pi*f[1:])**2*specD + return t, STF, f[1:], specD, specA +### + + +def get_seismo_fast_1component(self, sta, filtering=False, fmin=None, fmax=1.0, + is_binary=False,is_acceleration=False,suffix='H',compo='XX.semv'): + + from obspy.core import Trace + + + fname = self.directory+'/'+sta+'.'+suffix+compo + + if is_binary: + with open(fname,'rb') as f: Vx = np.fromfile(f, dtype=np.float32) + t = np.arange(len(Vx))* self.dt + else: + t = np.genfromtxt(fname, usecols=0) + Vx = np.genfromtxt(fname, usecols=1) + ### + tr_x = Trace() + tr_x.data = Vx + tr_x.stats.delta = t[1]-t[0] + tr_x.stats.starttime = t[0] + if filtering: tr_x.filter(type='lowpass', freq=fmax, corners=2, zerophase=True) + if fmin != None: tr_x.filter(type='highpass', freq=fmin, corners=2, zerophase=True) + if is_acceleration: tr_x.differentiate() + + return tr_x, max(abs(Vx)), max(abs(tr_x.data)) +### + + +def get_scec_velocity_profile(zcoords): + # depth coords in km + zandrews = [abs(depth)/1e3+ 0.0073215 for depth in zcoords] # in km +# print ('Min and max depth of zandrews: ', min(zandrews), max(zandrews)) + vs0 = np.zeros(len(zandrews)) + vs_not_corrected = np.zeros(len(zandrews)) + zandrews = np.array(zandrews) + + # ATTENTION TO THE ORDER + cdt = (zandrews >= 8.0) + vs0[cdt] = 2.927* 8.0**0.086 + + cdt = (zandrews < 8.0) + vs0[cdt] = 2.927* zandrews[cdt]**0.086 + + cdt = (zandrews < 4.0) + vs0[cdt] = 2.505* zandrews[cdt]**0.199 + + cdt = (zandrews < 0.19) + vs0[cdt] = 3.542* zandrews[cdt]**0.407 + + cdt = (zandrews < 0.03) + vs0[cdt] = 2.206* zandrews[cdt]**0.272 + + vs_not_corrected[:] = vs0[:]*1e3 + vp0_not_corrected = [ max(1.4+ 1.14*vs, 1.68*vs) for vs in vs_not_corrected] + vp0_not_corrected = np.array(vp0_not_corrected) + + # clamp surface Vs_min to 760 m/s and update Vp and rho + # assuming deeper points have already > 760 ! +# vs0 [zcoords == 0.0] = 760.0e-3 + vs0 [vs0 == min(vs0)] = 760.0e-3 + vp0 = [ max(1.4+ 1.14*vs, 1.68*vs) for vs in vs0] + vp0 = np.array(vp0) + rho0 = [2.4405+ 0.10271*vs for vs in vs0] + rho0 = np.array(rho0) + vp0 *= 1e3 + vs0 *= 1e3 + rho0 *= 1e3 + + return vp0, vs0, rho0, rho0*vs0*vs0 +### + + +### Scaling law of Wells& Coppersmith (94) +def get_WC94_scaling(Mw=7.0): + ''' Earthquake scaling law based on the WC94 + regression formulas of strike-slip events''' + + WC94 = {} + + # Surface rupture length + a = [5.16-0.13, 5.16, 5.16+0.13] + b = [1.12-0.08, 1.12, 1.12+0.08] + WC94['SRL'] = [10.0** ( (Mw-_a)/_b ) for _a, _b in zip(a,b) ] + + # Subsurface rupture length + a = [4.33-0.06, 4.33, 4.33+0.06] + b = [1.49-0.05, 1.49, 1.49+0.05] + WC94['RLD'] = [10.0** ( (Mw-_a)/_b ) for _a, _b in zip(a,b) ] + + # Rupture width + a = [3.80-0.17, 3.80, 3.80+0.17] + b = [2.59-0.18, 2.59, 2.59+0.18] + WC94['RW'] = [10.0** ( (Mw-_a)/_b ) for _a, _b in zip(a,b) ] + + # Rupture area + a = [3.98-0.07, 3.98, 3.98+0.07] + b = [1.02-0.03, 1.02, 1.02+0.03] + WC94['RA'] = [10.0** ( (Mw-_a)/_b ) for _a, _b in zip(a,b) ] + + # Max surface displacement + a = [6.81-0.05, 6.81, 6.81+0.05] + b = [0.78-0.06, 0.78, 0.78+0.06] + WC94['MD'] = [10.0** ( (Mw-_a)/_b ) for _a, _b in zip(a,b) ] + + # Ave surface displacement + a = [7.04-0.05, 7.04, 7.04+0.05] + b = [0.89-0.09, 0.89, 0.89+0.09] + WC94['AD'] = [10.0** ( (Mw-_a)/_b ) for _a, _b in zip(a,b) ] + + return WC94 +### + +### CLASS ### +class specfem3d (object): + + def __init__(self, directory='', n_fault=1, n_iter=1, itd=1, is_single=False,is_snapshot=False, info=True): + self.directory = directory+ '/' + self.fault = {} + if info: print ('Directory: ', self.directory); print (); + self.itd = itd + if is_snapshot: self.__read_snapshots(n_fault=n_fault, n_iter=n_iter, itd=itd, single=is_single,_info=info) + ### + + def __read_snapshots(self, n_fault=1, n_iter=1, itd=1, single=False, _info=True): + + if _info: print ('Number of snapshots: ', n_iter) + if _info: print ('Reading...') + # For each snapshot: + snap=0; all_data=[]; + for j in range(n_iter) : + u = str(int(snap)) + BinFile = self.directory+ 'Snapshot'+u+'_F'+str(int(n_fault))+'.bin' + if _info: print ('Binary file: ', BinFile) + data = _read_binary_fault_file (BinFile, single=single, ndata=14) + all_data.append(data) + snap+=itd + ### + self.fault['x'] = all_data[0].X + self.fault['y'] = all_data[0].Y + self.fault['z'] = all_data[0].Z + self.fault['Dx'] = [data.Dx for data in all_data] + self.fault['Dz'] = [data.Dz for data in all_data] + self.fault['Vx'] = [data.Vx for data in all_data] + self.fault['Vz'] = [data.Vz for data in all_data] + self.fault['Tx'] = [data.Tx for data in all_data] + self.fault['Ty'] = [data.Ty for data in all_data] + self.fault['Tz'] = [data.Tz for data in all_data] + self.fault['S'] = [data.S for data in all_data] + self.fault['Sg'] = [data.Sg for data in all_data] + self.fault['Trup'] = [data.Trup for data in all_data] + self.fault['Tproz'] = [data.Tproz for data in all_data] + print('***') + ### + + + def plot_final_slip(self, vertical_fault=True, cmap='magma',tmax=0.0, Nx=1000, Nz=1000, + Xcut=None,Zcut=None,ext=[],info=True,Ncmap=6,plot=True,\ + **kwargs): + + if not vertical_fault: print('Modify the script for non-vertical faults!!!') + + if tmax ==0.0: nsnaps = len(self.fault['Dx']) + else: nsnaps = int(tmax/self.dt/self.itd) + + if info: print ('Total number of used snapshots: ', nsnaps) + Dx_all = self.fault['Dx'][:nsnaps] + Dz_all = self.fault['Dz'][:nsnaps] + + Xall = self.fault['x'] + Y = self.fault['y'] + Zall = self.fault['z'] + + if Zcut==None: Zcut = min(Zall) + if Xcut==None: + Xcutmin, Xcutmax = min(Xall), max(Xall) + else: + Xcutmin, Xcutmax = Xcut[0], Xcut[1] + + cdt = (Y == 0.0) & (Zall >= Zcut) & (Xall >= Xcutmin) & (Xall <= Xcutmax) + X = Xall [cdt] + Z = Zall [cdt] + + if ext==[]: ext = [min(X), max(X), min(Z), max(Z)] + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1], Nx), np.linspace(ext[2],ext[3],Nz)) + + D1 = Dx_all[len(Dx_all)-1] + D2 = Dz_all[len(Dz_all)-1] + D_net = [(_D1**2+ _D2**2)**0.5 for _D1,_D2 in zip(D1,D2) ] + D_net = np.array(D_net) + + slip_direction = kwargs.pop('slip_direction', 'along_strike') + if slip_direction == 'along_dip': + if info: print ('Using slip along dip! ') + D = D2 + vmin=min(map(min, Dz_all)); vmax=max(map(max, Dz_all)) + vmax = max(abs(vmin), vmax); vmin = -vmax; + elif slip_direction == 'both': + if info: print ('Using composite slip (both components))') + D = D_net + vmin=min(map(min, Dx_all)); vmax=max(map(max, Dx_all)) + vmax = max(abs(vmin), vmax); vmin=0.0 + elif slip_direction == 'along_strike': + if info: print ('Using slip along strike! ') + D = D1 + vmin=min(map(min, Dx_all)); vmax=max(map(max, Dx_all)) + vmax = max(abs(vmin), vmax); vmin=0.0 + # + + vmax = kwargs.pop('vmax', vmax) + + if info: + print ('*') + print ('ON FAULT PLANE ::') + print ('*') + print ('Along strike:') + print ('Max slip (m): ', max(D1) ) + print ('Ave slip (m): ', np.average(D1) ) + print ('*') + print ('Along dip:') + print ('Max slip (m): ', max(D2) ) + print ('Ave slip (m): ', np.average(D2) ) + print ('*') + print ('Both components:') + print ('Max slip (m): ', max(D_net) ) + print ('Ave slip (m): ', np.average(D_net) ) + print ('*') + + print ('*') + print ('ON SURFACE ::') + print ('*') + print ('Along strike:') + D_surf = D1 [ (abs(Zall)== min(abs(Zall))) ] + print ('Max slip (m): ', max(D_surf) ) + print ('Ave slip (m): ', np.average(D_surf) ) + print ('*') + print ('Along dip:') + D_surf = D2 [ (abs(Zall)== min(abs(Zall))) ] + print ('Max slip (m): ', max(D_surf) ) + print ('Ave slip (m): ', np.average(D_surf) ) + print ('*') + print ('Both components:') + D_surf = D_net [ (abs(Zall)== min(abs(Zall))) ] + + if not info: + D_surf = D_net [ (abs(Zall)== min(abs(Zall))) ] + cdt2 = (abs(Zall)== min(abs(Zall))) + x_surf = Xall[cdt2] + print ('Max slip (m): ', max(D_surf) ) + print ('Ave slip (m): ', np.average(D_surf) ) + print ('*') + ## + if plot: + plt.close('all') + fig = plt.figure(figsize=(6,4)) + cmap = plt.cm.get_cmap(cmap, Ncmap) + ### strike slip + D = D [ cdt] + y = gd( (X,Z), D, (xi,zi), method=kwargs.pop('interpolation', 'linear'), + fill_value=kwargs.pop('fill_value', 0.0)) + y = np.flipud(y) + ax = fig.add_subplot(111) + ax.set_title('Max slip on fault plane (m) = '+ '%.2f' % (max(D)) ) + ax.set_xlabel('Along strike (km)', fontsize=15) + ax.set_ylabel('Along dip (km)', fontsize=15) + im = ax.imshow(y, extent=ext, \ + vmin=vmin, vmax=vmax, cmap=cmap) + try: ax.plot(self.x_hypo, self.z_hypo, c='k', marker='*', alpha=0.5) + except: pass + c = plt.colorbar(im, fraction=0.046, pad=0.05, shrink=0.5, label='Strike slip (m)') + c.mappable.set_clim(vmin, vmax) + plt.tight_layout() + plt.show() + + return x_surf, D_surf + ### + + def plot_surface_slip(self,D_surf,x_surf,percent=5.0): + ''' plots surface slip variation''' + try: + cdt = [D_surf >= max(D_surf)*0.01*percent][0] + print ('min-max surface locations: ', min(x_surf[cdt]), max(x_surf[cdt])) + surfacelen = max(x_surf[cdt])- min(x_surf[cdt]) + print ('Surface length: ', surfacelen) + print ('Ave. slip here: ', np.average(D_surf[cdt])) + print ('percentage of clipping: ', percent) + # + plt.figure(figsize=(6,3)) + plt.subplot(111) + plt.scatter(x_surf[cdt], D_surf[cdt], c='tomato', s=50) + plt.scatter(x_surf, D_surf, c='gray', alpha=0.1) + plt.scatter(self.x_hypo, 0.1, marker='*', c='gold', s=100) + plt.title('Surface slip (m) vs Fault strike (km)') + plt.axvline(x=self.x_hypo, c='gray', alpha=0.5, linestyle=':') + plt.grid() + except: + print ('prerequisite: SEM.plot_final_slip()') + print ('input format: D_surf,x_surf,percent=5.0') + print ('*') + ## + + def plot_snapshots(self, vertical_fault=True, n_contour=5, dt_snap=1.0, contour=False, + cmap='magma',_asp=3, data_type='D',**kwargs): + + if not vertical_fault: print('Modify the script for non-vertical faults!!!') + + Dx_all = self.fault[data_type+'x'] + Dz_all = self.fault[data_type+'z'] + + X = self.fault['x'] + Y = self.fault['y'] + Z = self.fault['z'] + + X = X [ (Y == 0.0) ] + ext = [min(X), max(X), min(Z), max(Z)] + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1], 1e3), np.linspace(ext[2],ext[3],1e3)) + + vmin=min(map(min, Dx_all)); vmax=max(map(max, Dx_all)) + vzmin=min(map(min, Dz_all)); vzmax=max(map(max, Dz_all)) + print ('Min and max of overall strike slip (m): ', '%.0e %.0e' % (vmin, vmax) ) + print ('Min and max of overall dip slip (m): ', '%.0e %.0e' % (vzmin, vzmax)) + + vmax = max(abs(vmin), vmax); #vmin = -vmax + vzmax = max(abs(vzmin), vzmax); vzmin = -vzmax + + plt.close('all') + for n, (D, Dz)in enumerate( zip(Dx_all, Dz_all)): + print ('*') + print ('t (s) = ', n*dt_snap) + print ('STRIKE SLIP - Min and max: ', '%.0e %.0e' % (min(D), max(D)) ) + print ('DIP SLIP - Min and max: ', '%.0e %.0e' % (min(Dz), max(Dz)) ) + ### + + fig = plt.figure(figsize=(8,3.5)); set_style('whitegrid', scale=0.8) + + tit = 'Snapshot at time (s): '+ str(n*dt_snap) + plt.suptitle(tit) + + ### strike slip + D = D [ (Y== 0.0) ] + y = gd( (X,Z), D, (xi,zi), method='linear') + y = np.flipud(y) + + ax = fig.add_subplot(121, aspect=_asp) + ax.set_title('Strike slip (m)') + ax.set_xlabel('Along strike (km)') + ax.set_ylabel('Along dip (km)') + im = ax.imshow(y, extent=[min(X), max(X), min(Z), max(Z)], \ + vmin=vmin, vmax=vmax, cmap=cmap) + + levels = np.linspace(0.0, vmax, num=n_contour) + if contour: ax.contour(y, levels, colors='white', alpha=0.5, extent=[min(X), max(X), min(Z), max(Z)],\ + origin='upper') + + c = fig.colorbar(im, ax=ax, orientation='horizontal', fraction=.1, shrink=1, pad=0.25, format='%.0e') + c.mappable.set_clim(vmin, vmax) + + + ### dip slip + Dz = Dz [ (Y == 0.0) ] + y = gd( (X,Z), Dz, (xi,zi), method='linear') + y = np.flipud(y) + + ax = fig.add_subplot(122) + ax.set_title('Dip slip (m)') + ax.set_xlabel('Along strike (km)') + ax.set_ylabel('Along dip (km)') + + im = ax.imshow(y, extent=[min(X), max(X), min(Z), max(Z)], \ + vmin=vzmin, vmax=vzmax, cmap=cmap) + + levels = np.linspace(0.0, vzmax, num=n_contour) + if contour: ax.contour(y, levels, colors='white', alpha=0.5, extent=[min(X), max(X), min(Z), max(Z)],\ + origin='upper') + # c = plt.colorbar(im, fraction=0.046, pad=0.1,shrink=0.25) + c = fig.colorbar(im, ax=ax, orientation='horizontal', fraction=.1, shrink=1, pad=0.25, format='%.0e') + c.mappable.set_clim(vzmin, vzmax) + + plt.tight_layout() + plt.show() + ### + ### + + + def plot_snapshots_in1figure(self, vertical_fault=True, n_contour=5, contour=False, + cmap='magma',_asp=3,jump=0, nrows=3, ncols=2, figsize=(8,8), ext=[], \ + dt_snap=1.0,tmax=24.0, ylim=-20.0, _vmax=-1.0, \ + save=False, hypo=False, info=False, plot_cbar=False, + figname='',Ngrid=1000,xticks=[],yticks=[],\ + data_type='D'): + + if not vertical_fault: print('Modify the script for non-vertical faults!!!') + + if jump==0: jump = int(dt_snap) + print ('jump', jump) + print ('dt_snap ', dt_snap) + + if (nrows*ncols < int(tmax/dt_snap)): print('No space for all snapshots!') + + # plot time interval is jump + Dx_all = self.fault[data_type+'x'][::jump] + Dz_all = self.fault[data_type+'z'][::jump] + + X = self.fault['x'] + Y = self.fault['y'] + Z = self.fault['z'] + + X = X [ (Y == 0.0) ] # assuming fault is located at y=0 + if ext==[]: ext = [min(X), max(X), min(Z), max(Z)] + + print ('Snapshot extent:', ext) + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1], Ngrid), np.linspace(ext[2],ext[3],Ngrid)) + + vmin=min(map(min, Dx_all)); vmax=max(map(max, Dx_all)) + vzmin=min(map(min, Dz_all)); vzmax=max(map(max, Dz_all)) + if info: print ('Min and max of overall strike slip (m): ', '%.0e %.0e' % (vmin, vmax) ) + if info: print ('Min and max of overall dip slip (m): ', '%.0e %.0e' % (vzmin, vzmax)) + + vmax = max(abs(vmin), vmax); #vmin = -vmax + vzmax = max(abs(vzmin), vzmax); vzmin = -vzmax + + if _vmax > 0.0: vmax=_vmax + print ('vmax here: ', vmax) + + plt.close('all') + jj=0 + fig = plt.figure(figsize=figsize); set_style(whitegrid=False, scale=0.8) + for n, (D, Dz)in enumerate( zip(Dx_all[1:], Dz_all[1:])): + t_sim = (n+1)* dt_snap + + if info: print ('*') + if info: print ('t (s) = ', t_sim) + if info: print ('STRIKE SLIP - Min and max: ', '%.0e %.0e' % (min(D), max(D)) ) + ### + + # Subplot order: vertical arrangement + habitus=[] + for iax, icol in enumerate(np.arange(ncols)): + habitus += list(np.arange(iax+1, (nrows-1)*ncols+iax+1+1, ncols)) + habitus = habitus[jj] + + + ### strike slip + D = D [ (Y== 0.0) ] + y = gd( (X,Z), D, (xi,zi), method='linear') + y = np.flipud(y) + + ### + ax = fig.add_subplot(nrows,ncols,habitus, aspect=_asp) + tit = '%.0f' % (t_sim)+ ' s' + ax.set_title(tit) + ax.set_ylim(ylim, 0.0) + ax.set_yticks(yticks); ax.set_xticks(xticks) + plt.yticks(fontsize=9); plt.xticks(fontsize=9) + + # hypocenter + if hypo: ax.plot(self.x_hypo, self.z_hypo, c='k', marker='*', alpha=0.5) + im = ax.imshow(y, extent=ext, vmin=vmin, vmax=vmax, cmap=cmap) + + levels = np.linspace(0.0, vmax, num=n_contour) + if jj>=9: contour = True # to only show rupture extent in the last snap (change limit jj if necess.) + if contour: ax.contour(y, levels, colors='white', alpha=1, extent=ext,\ + origin='upper', linewidths=[1]) + + print ('jj, t_sim', jj, t_sim) + + jj += 1 + if t_sim >= tmax: break + ### + + plt.tight_layout() + if save: fig.savefig(figname, dpi=300) + plt.show() + ### + + def read_all_fault_stations(self, search_tip='/x*y_*dat',is_pickled=False,rep_store='./STORE/'): + ''' reads and pickles fault station outputs. Returns (x,z) coordinates, time and STF.''' + import glob + import pandas as pd + # + if not is_pickled: + # read and pickle + filenames = glob.glob(self.directory+search_tip) + all_x, all_z, all_SR, all_slip = [],[],[],[] + all_tau_xy, all_tau_yz, all_tau_yy = [],[],[] + # get all filenames of fault stations + print ('Reading fault station outputs...') + for fname in filenames: + # coordinates + _beg = fname.find('x_')+2 + _end = fname.find('_y') + _xsta = float(fname[_beg:_end]) + + _beg = fname.find('y_')+2 + _end = fname.find('.dat') + _zsta = float(fname[_beg:_end]) + # x: along strike; z: along dip + data = pd.read_csv(fname, names=('t','Dx','Vx','tau_xy', 'Dz', 'Vz', 'tau_yz', 'tau_yy' ), \ + delim_whitespace=True, header=20) + # slip-rate fnc + _t = data['t'].values + _Vx = data['Vx'].values + _Vz = data['Vz'].values + _SR = (_Vx**2+ _Vz**2)** 0.5 + _Dx = data['Dx'].values + _Dz = data['Dz'].values + _slip = (_Dx**2+ _Dz**2)** 0.5 + _tau_xy = data['tau_xy'].values + _tau_yz = data['tau_yz'].values + _tau_yy = data['tau_yy'].values + all_x.append(_xsta); all_z.append(_zsta) + all_SR.append(_SR); all_slip.append(_slip) + all_tau_xy.append(_tau_xy) + all_tau_yz.append(_tau_yz) + all_tau_yy.append(_tau_yy) + # pickle + print ('Pickling ...') + try: os.makedirs(rep_store) + except: pass + np.save(rep_store+'/all_x.npy', all_x) + np.save(rep_store+'/all_z.npy', all_z) + np.save(rep_store+'/all_t.npy', _t) + np.save(rep_store+'/all_SR.npy', all_SR) + np.save(rep_store+'/all_slip.npy', all_slip) + np.save(rep_store+'/all_tau_xy.npy', all_tau_xy) + np.save(rep_store+'/all_tau_yz.npy', all_tau_yz) + np.save(rep_store+'/all_tau_yy.npy', all_tau_yy) + else: + print ('Opening pickle box ...') + all_x = np.load(rep_store+'/all_x.npy') + all_z = np.load(rep_store+'/all_z.npy') + _t = np.load(rep_store+'/all_t.npy') + all_SR = np.load(rep_store+'/all_SR.npy') + all_slip = np.load(rep_store+'/all_slip.npy') + all_tau_xy = np.load(rep_store+'/all_tau_xy.npy') + all_tau_yz = np.load(rep_store+'/all_tau_yz.npy') + all_tau_yy = np.load(rep_store+'/all_tau_yy.npy') + # + print ('Done!') + self.faultsta_x = all_x + self.faultsta_z = all_z + self.fault_stf_t = _t + self.fault_stf_SR = all_SR + self.faultsta_slip = all_slip + self.faultsta_tau_xy = all_tau_xy + self.faultsta_tau_yz = all_tau_yz + self.faultsta_tau_yy = all_tau_yy + ## + + # def plot_STF_and_spectrum(self,plot_source=True,jump_ko=10,\ + # pow_ko=20.0,fmax=10.0, \ + # is_smooth=True,is_padding=False,\ + # is_pickled=False,rep_store='./STORE/'): + # ''' computes STF and its spectrum. compute Mw. plots time and frequncy functions + # with Brune\'s and Ji and Archuleta's spectra.''' + # if not is_pickled: + # try: + # import numpy.ma as ma + # from scipy import fft + # xx, zz = np.array(self.faultsta_x), np.array(self.faultsta_z) + # t = self.fault_stf_t + # fault_mu = self.faultsta_mu + # dx, dz = max(np.diff( np.sort(xx)) )* 1e3, max(np.diff( np.sort(zz)) )* 1e3 + # elemsize = dx* dz + # print ('Element size, dx, dz (all in m): ', elemsize, dx, dz) + # # STF + # print ('Computing STF ...') + # slip_rates = np.array(self.fault_stf_SR) + # slips = np.array(self.faultsta_slip) + # STF = np.zeros(t.shape) + # moment = np.zeros(t.shape) + # for mu, slip_rate, slip in zip(fault_mu, slip_rates, slips): + # STF += slip_rate* elemsize* mu + # moment += slip* elemsize* mu + # # PADDING + # dt = t[1]- t[0] + # if is_padding: + # self.t_STF = np.arange(20*len(t))* dt + # self.STF = np.zeros(self.t_STF.shape[0]) + # self.STF[:len(STF)] = STF + # self.moment = np.zeros(self.t_STF.shape[0])+ max(moment) + # self.moment[:len(moment)] = moment + # else: + # self.STF = STF + # self.t_STF = t + # self.moment = moment + # # + # print ('Computing spectrum ...') + # # Spectrum + # df=1.0/ dt + # N = len(self.t_STF) + # _f = np.linspace(0.0, 1.0/(2.0*dt), N//2) + # spec = abs(fft.fft(self.STF))* dt + # specMoment = abs(fft.fft(self.moment ))* dt + # self.specf = _f[1:] + # self.specD = spec[:int(N/2)-1] + # self.specMoment = specMoment[:int(N/2)-1] + # Mw = (np.log10(self.specD[0])- 9.1)/ 1.5 + # print('Mw: ', Mw) + # self.M0 = self.specD[0] + # self.Mw = Mw + # # + # print ('Smoothening by konno-ohmachi ...') + # # smooth by konno-ohmachi + # specD_sm = self.specD + # if is_smooth: + # cdt = (self.specf <= fmax) + # f = self.specf[cdt][::jump_ko] + # specD_sm = ko(self.specD[cdt][::jump_ko], df, pow_ko) + # self.specD_sm = specD_sm + # self.specf_sm = f + # # brune's + # fc1, fc2, spec_JA19 = get_JA19_spectrum(Mw=self.Mw, f=self.specf) + # self.specD_ja = self.specD[0]*spec_JA19 + # fc = (fc1* fc2)** 0.5 + # print ('fc: ', fc) + # self.brune = self.specD[0]/(1.0+(self.specf/fc)**2) + # self.fc = np.array([fc,fc1,fc2]) + # # + # print ('Pickling ...') + # try: os.makedirs(rep_store) + # except: pass + # np.save(rep_store+'/t_STF.npy', self.t_STF) + # np.save(rep_store+'/STF.npy', self.STF) + # np.save(rep_store+'/moment.npy', self.moment ) + # np.save(rep_store+'/specMoment.npy', self.specMoment) + # np.save(rep_store+'/specf_sm.npy', self.specf_sm) + # np.save(rep_store+'/specD_sm.npy', self.specD_sm) + # np.save(rep_store+'/specD.npy', self.specD) + # np.save(rep_store+'/specf_ja.npy', self.specf) + # np.save(rep_store+'/specD_ja.npy', self.specD[0]*spec_JA19) + # np.save(rep_store+'/specD_brune.npy', self.brune) + # np.save(rep_store+'Mw.npy', np.array([self.Mw])) + # np.save(rep_store+'fc.npy', self.fc) + # except: + # print ('prerequisite: read_all_fault_stations') + # print ('input format: get_magnitude(self,all_x,all_z,t,all_SR,fault_mu,plot_source=True,jump_ko=10,\ + # pow_ko=20.0,fmax=10.0, \ + # is_smooth=True,is_padding=False)') + # else: + # print ('Opening pickle box ...') + # self.t_STF = np.load(rep_store+'/t_STF.npy') + # self.STF = np.load(rep_store+'/STF.npy') + # self.moment = np.load(rep_store+'/moment.npy') + # self.specMoment = np.load(rep_store+'/specMoment.npy') + # if is_smooth: self.specf_sm = np.load(rep_store+'/specf_sm.npy') + # if is_smooth: self.specD_sm = np.load(rep_store+'/specD_sm.npy') + # self.specD = np.load(rep_store+'/specD.npy') + # self.specf = np.load(rep_store+'/specf_ja.npy') + # self.specD_ja = np.load(rep_store+'/specD_ja.npy') + # self.brune = np.load(rep_store+'/specD_brune.npy') + # self.Mw = np.load(rep_store+'/Mw.npy') + # self.fc = np.load(rep_store+'/fc.npy') + # print('Mw: ', self.Mw) + # print('Done!') + # if plot_source: + # plt.close('all') + # print ('Plotting ...') + # plt.subplot(221) + # plt.xlim(-1,20) + # plt.plot(self.t_STF,self.STF,'k',lw=1) + # plt.grid() + # plt.xlabel('Time (s)'); plt.ylabel('Moment rate (Nm/s)') + # # + # plt.subplot(222) + # plt.xlim(1e-3,fmax) + # try: plt.loglog(self.specf_sm, self.specD_sm,'k') + # except: plt.loglog(self.specf, self.specD,'k') + # plt.loglog(self.specf, self.brune, c='royalblue', lw=1.0, linestyle='-.', label='w2 model') + # plt.loglog(self.specf, self.specD_ja, c='royalblue', lw=1.0, linestyle=':', label='JA19_2S') + # plt.axvline(x=self.fc[1], c='k', linestyle=':', alpha=0.5) + # plt.axvline(x=self.fc[2], c='k', linestyle=':', alpha=0.5) + # plt.xlabel('Frequency (Hz)') + # # + # plt.subplot(223) + # plt.plot(self.t_STF,self.moment,'k',lw=1) + # plt.grid() + # # + # plt.subplot(224) + # plt.loglog(self.specf,self.specMoment,'k') + # plt.grid() + # # + # plt.show() + # ## + + def plot_rupture_time(self, constep=1.0,contour=False,cmap='Blues',Ncmap=6, \ + tmax=0.0,xy_lim=None,vmin=0,vmax=10,**kwargs): + import pandas as pd + # read file + try: + # only for a single fault + fname = self.directory+'/RuptureTime_Fault1' + data = pd.read_csv(fname, names=('x','z','trupt'), \ + delim_whitespace=True, header=10) + Trupt = data['trupt'].values + Trupt_not_overwritten = Trupt.copy() + X = data['x'].values/1.e3 + Z = data['z'].values/-1.e3 + except: + print ('No RuptureTime_Fault file, reading from snapshots ...') + Trupt = self.fault['Trup'][-1] # from last snapshot file + X, Z = self.fault['x'], self.fault['z'] + print ('Done!') + else: + print ('ERROR in reading file!') + if tmax>0.0 : vmax = tmax + if 'tmax' in kwargs: vmax = kwargs['tmax'] + print ('Max of rupture time (s): ', '%.1f' % (vmax)) + # for plotting purposes + Trupt[Trupt >= vmax] = 0.0 + ext = [min(X), max(X), min(Z), max(Z)] + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1],1000),np.linspace(ext[2],ext[3],1000)) + y = gd( (X,Z), Trupt, (xi,zi), method='linear') + y = np.flipud(y) + # + fig = plt.figure(figsize=(6,3)); set_style(whitegrid=False) + ax = fig.add_subplot(111) + ax.set_xlabel('Along strike (km)') + ax.set_ylabel('Along dip (km)') + if xy_lim==None: xy_lim=ext + ax.set_xlim(xy_lim[0], xy_lim[1]); ax.set_ylim(xy_lim[2], xy_lim[3]) + try: ax.scatter(self.x_hypo, self.z_hypo, marker='*', color='r', s=100) + except: pass + cmap = plt.cm.get_cmap(cmap, Ncmap) + im = ax.imshow(y, extent=ext,vmin=vmin, vmax=vmax, cmap=cmap,aspect='auto') + levels = np.linspace(vmin, vmax, num=int(vmax/constep)) + if contour: ax.contour(y, levels, colors='snow', alpha=0.5, extent=ext, origin='upper') + c = plt.colorbar(im, fraction=0.046, pad=0.1,shrink=0.75) + c.set_label('Rupture time (s)') + c.mappable.set_clim(vmin, vmax) + plt.tight_layout() + plt.show() + return + ## + + def get_rupture_speed(self,info=False,eps=0.5,is_Vrup_pickled=True,rep_store='./STORE/', \ + plot_rupture_speed=True,cmap='RdBu_r',Ncmap=6,xy_lim=None,\ + Nsample=1000,normalise_by_Vs=False,vmin=0.0,vmax=2.0,\ + aspect='auto'): + if not is_Vrup_pickled: + Vruptlist = [] + X, Z, Trupt = self.fault['x'], self.fault['z'], self.fault['Trup'][-1] # from last snapshot file + Vs = self.fault['Vs']/1e3 # km/s + # ~ to neglect nucleation process and non-broken parts + cdt = (Trupt > eps) + print ('Trupt[cdt].shape', Trupt[cdt].shape) + for i, (_trup, _Vs) in enumerate( zip(Trupt[cdt], Vs[cdt]) ): + _x = X[cdt][i] + _z = Z[cdt][i] + if info: print('i, _x, _z, _trup', i, _x, _z, _trup) + # to avoid too close points on nearly the same time contour + cdt1 = (Trupt < _trup- 0.25) + points_behind = (X[cdt1]-_x)**2+ (Z[cdt1]-_z)**2 + cdt2 = (points_behind == min(points_behind)) + if info: print ('closest point: ', X[cdt1][cdt2], Z[cdt1][cdt2] ) + dist = points_behind[cdt2]**0.5 + if info: print ('dist (km)', dist) + delta_t = _trup- Trupt[cdt1][cdt2] + if info: print ('delta_t (s)', delta_t) + Vruptlist.append ( max( dist/delta_t ) ) + if info: print ('Vrupt (km/s), Vrupt/Vs found: ', min(dist/delta_t), min(dist/delta_t)/_Vs) + if info: print ('*') + # + if max( dist/delta_t ) > 10.0: + print ('*') + print ('evaluated point: ', _x, _z, _trup) + print ('closest point: ', X[cdt1][cdt2], Z[cdt1][cdt2] ) + print ('dist', dist) + print ('delta_t', delta_t) + print ('Vrupt found: ', (dist/delta_t)) + print ('*') + Vrupt = np.zeros(Trupt.shape) + Vrupt[cdt] = np.array(Vruptlist) + self.Vrup = Vrupt # in km/s + self.Vrup_normalised = self.Vrup/ Vs + print ('Pickling ...') + try: os.makedirs(rep_store) + except: pass + np.save(rep_store+'/Vrup.npy', self.Vrup) + np.save(rep_store+'/Vrup_normalised.npy', self.Vrup_normalised) + else: + print('Opening pickle box ...') + print ('in the folder ', rep_store) + try: + self.Vrup = np.load(rep_store+'/Vrup.npy') + self.Vrup_normalised = np.load(rep_store+'/Vrup_normalised.npy') + except: + print('No \'Vrup.npy\' or \'Vrup_normalised.npy\' data found?') + print ('Rupture speed ready!') + + if plot_rupture_speed: + X, Z = self.fault['x'], self.fault['z'] + cmap = plt.cm.get_cmap(cmap, Ncmap) + # + data = self.Vrup + if normalise_by_Vs: data = self.Vrup_normalised + ext = [min(X), max(X), min(Z), max(Z)] + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1],Nsample),np.linspace(ext[2],ext[3],Nsample)) + y = gd( (X,Z), data, (xi,zi), method='linear') + y = np.flipud(y) + # + print ('Plotting ...') + plt.close('all') + fig = plt.figure(figsize=(6,3)) + ax = fig.add_subplot(111) + ax.set_xlabel('Along strike (km)') + ax.set_ylabel('Along dip (km)') + if xy_lim==None: xy_lim=ext + ax.set_ylim(xy_lim[2],xy_lim[3]) + ax.set_xlim(xy_lim[0],xy_lim[1]) + try: ax.scatter(self.x_hypo, self.z_hypo, marker='*', color='r', s=100) + except: pass + im = ax.imshow(y, extent=ext, vmin=vmin, vmax=vmax, cmap=cmap, aspect=aspect) + c = plt.colorbar(im, fraction=0.046, pad=0.1,shrink=0.75) + if normalise_by_Vs: c.set_label('Rupture speed (Vs)') + else: c.set_label('Rupture speed (km/s)') + c.mappable.set_clim(vmin, vmax) + plt.tight_layout() + plt.show() + # + return + ## + + + def plot_stress_drop(self,Nsample=1000,cmap='inferno',Ncmap=4,vmin=0,vmax=20): + ''' calculates and plots stress drop by using snapshot data.''' + + try: + from scipy.interpolate import griddata as gd + + # stress drop = initial stress - final stress + self.stress_drop = self.fault['Tx'][0]- self.fault['Tx'][-1] + self.stress_drop /= 1e6 # MPa + # average stress drop on ruptured parts + cdt = (self.stress_drop > 0.0) + data = self.stress_drop[cdt] + X = self.fault['x'][cdt] + Z = self.fault['z'][cdt] + print ('Average stress drop (MPa): ', np.average(data)) + print ('Max point location (x,z): ',X [data==max(data)], Z [data==max(data)]) + # interpolate + ext = [min(X), max(X), min(Z), max(Z)] + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1], Nsample), np.linspace(ext[2],ext[3],Nsample)) + y = gd( (X,Z), data, (xi,zi), method='linear') + y = np.flipud(y) + # + fig = plt.figure(figsize=(6,3)) + ax = fig.add_subplot(111, aspect='auto') + ax.set_xlabel('Along strike (km)') + ax.set_ylabel('Along dip (km)') + cmap = plt.cm.get_cmap(cmap, Ncmap) # 11 discrete colors + # + im = ax.imshow(y, extent=ext, vmin=vmin, vmax=vmax, cmap=cmap) + c = plt.colorbar(im, fraction=0.046, pad=0.05, shrink=0.5, label='Stress drop (MPa)') + c.mappable.set_clim(vmin, vmax) + plt.grid() + except: + print ('prerequisite: self.fault data') + print ('input format: plot_stress_drop(self,Nsample=1000,cmap=\'inferno\',Ncmap=4,vmin=0,vmax=20)') + ## + + + def plot_rupture_front(self, **kwargs): + + # Final state + Trupt = self.fault['Trup'][-1] + Tproz = self.fault['Tproz'][-1] + X = self.fault['x'] + Y = self.fault['y'] + Z = self.fault['z'] + + # strike-slip fault + # from top + # Only the horizontal fault line + X = X [ (Z == 0.0) ] + Trupt = Trupt [ (Z == 0.0)] + Tproz = Tproz [ (Z == 0.0)] + Xs, Tps = zip(*sorted(zip(X,Tproz))) + Xs, Ts = zip(*sorted(zip(X,Trupt))) + + max_arrival = X [(Trupt==0.0) & (X > 0.0)] + maxpt = max(X) + if len(max_arrival) > 0: maxpt = min(max_arrival) + xmax = max(X); xmin = min(X) + if 'xmax' in kwargs: xmax= kwargs['xmax'] + if 'xmin' in kwargs: xmin= kwargs['xmin'] + + ymax = max(max(Ts), max(Tps)); ymin = 0.0 + if 'ymax' in kwargs: ymax = kwargs['ymax'] + if 'ymin' in kwargs: ymin = kwargs['ymin'] + + ### + plt.close('all') + fig = plt.figure(); set_style(whitegrid=True, scale=0.8) + ax = fig.add_subplot(111) + ax.set_xlabel('Horizontal distance (km)') + ax.set_ylabel('Time (s)') + ax.set_xlim(xmin, xmax); ax.set_ylim(ymin, ymax) + ax.plot(Xs, Ts, color='k', label='Front') + ax.plot(Xs, Tps, color='royalblue', label='Tail') + tit = 'Rupture front reaches to \n a max distance of '+ str('%.1f' % (maxpt)) + ax.set_title(tit) + ax.legend(loc='best') + plt.tight_layout(); plt.show() + ### + + def plot_fault_station_data_from_snaphots(self, xpt=0.0, ypt=0.0, zpt=0.0): + + X = self.fault['x'] + Y = self.fault['y'] + Z = self.fault['z'] + Dx = self.fault['Dx'] + Dz = self.fault['Dz'] + Vx = self.fault['Vx'] + Vz = self.fault['Vz'] + Tx = self.fault['Tx'] + Ty = self.fault['Ty'] + Tz = self.fault['Tz'] + Sg = self.fault['Sg'] + + # Nearest station + dist = [(_x-xpt)**2+ (_y-ypt)**2+ (_z-zpt)**2 for _x,_y,_z in zip(X,Y,Z)] + idx = np.where(dist == min(dist))[0][0] + print('Found coordinates (x,y,z): ', X[idx], Y[idx], Z[idx]) + print('Index: ', idx) + + plt.close('all') + fig = plt.figure(figsize=(10,6)); set_style(whitegrid=True, scale=0.8) + plt.subplots_adjust(hspace=0.45, wspace=0.42, left=0.1, right=0.960) + + ax = fig.add_subplot(231) + ax.set(title='Strike-slip (m)') + ax.plot([D[idx] for D in Dx], c='k') + + ax = fig.add_subplot(234) + ax.set(title='Dip-slip (m)') + ax.plot([D[idx] for D in Dz], c='k') + + ax = fig.add_subplot(232) + ax.set(title='Vx (m/s)') + ax.plot([V[idx] for V in Vx], c='k') + + ax = fig.add_subplot(235) + ax.set(title='Vz (m/s)') + ax.plot([V[idx] for V in Vz], c='k') + + ax = fig.add_subplot(233) + ax.set(title='Stress (MPa)') + ax.plot([T[idx]/1.e6 for T in Tx], label='Tx', color='red') + ax.plot([T[idx]/1.e6 for T in Ty], label='Ty', color='b') + ax.plot([-T[idx]/1.e6 for T in Tz], label='-Tz', color='k') + ax.legend(loc='lower right', bbox_to_anchor=(1.0,-1.4)) + plt.show() + ### + + def get_and_plot_seismo(self, stations, filtering=False, fmin=0.0, fmax=1.0, + plot_spectogram=False, plot_timehistories=True, + is_binary=False, is_acceleration=False,suffix='H'): + + from obspy.core import Trace + + + # change spectogram colorbar plot + for sta in stations: + + plt.close('all') + + # x direction + fname = self.directory+'/'+sta+'.'+suffix+'XX.semv' + if is_binary: + with open(fname,'rb') as f: Vx = np.fromfile(f, dtype=np.float32) + t = np.arange(len(Vx))* self.dt + else: + t = np.genfromtxt(fname, usecols=0) + Vx = np.genfromtxt(fname, usecols=1) + tr_x = Trace() + tr_x.data = Vx + tr_x.time = t + tr_x.stats.delta = t[1]-t[0] + tr_x.stats.starttime = t[0] + if filtering: tr_x.filter(type='lowpass', freq=fmax, corners=2, zerophase=True) + if is_acceleration: tr_x.differentiate() + + # y direction + fname = self.directory+'/'+sta+'.'+suffix+'XY.semv' + if is_binary: + with open(fname,'rb') as f: Vy = np.fromfile(f, dtype=np.float32) + else: + Vy = np.genfromtxt(fname, usecols=1) + tr_y = Trace() + tr_y.data = Vy + tr_y.time = t + tr_y.stats.delta = t[1]-t[0] + tr_y.stats.starttime = t[0] + if filtering: tr_y.filter(type='lowpass', freq=fmax, corners=2, zerophase=True) + if is_acceleration: tr_y.differentiate() + + # z direction + fname = self.directory+'/'+sta+'.'+suffix+'XZ.semv' + if is_binary: + with open(fname,'rb') as f: Vz = np.fromfile(f, dtype=np.float32) + else: + Vz = np.genfromtxt(fname, usecols=1) + tr_z = Trace() + tr_z.data = Vz + tr_z.time = t + tr_z.stats.delta = t[1]-t[0] + tr_z.stats.starttime = t[0] + if filtering: tr_z.filter(type='lowpass', freq=fmax, corners=2, zerophase=True) + if is_acceleration: tr_z.differentiate() + + if plot_timehistories: + fig = plt.figure(figsize=(8,4)); #set_style(whitegrid=True, scale=0.8) + plt.subplots_adjust(top=0.8) + + ax = fig.add_subplot(311) + tit = 'Velocity-time histories (m/s)' + if is_acceleration: tit='Acceleration (m/s/s)' + ax.set_title(tit) + ax.set_ylabel('x') + ax.plot(t, tr_x.data, color='k', lw=0.7) + ax = fig.add_subplot(312) + ax.set_ylabel('y') + ax.plot(t, tr_y.data, color='k', lw=0.7) + ax = fig.add_subplot(313) + ax.set_ylabel('z') + ax.plot(t, tr_z.data, color='k', lw=0.7) + plt.tight_layout(); plt.show() + + if plot_spectogram: + plt.close('all') + tmin, tmax = min(t), max(t) + print (tmin, tmax) + fig = plt.figure(); set_style(whitegrid=True, scale=0.7) + ax1 = fig.add_subplot(321) + tr_x.spectrogram(axes=ax1) + ax1.set_ylim(fmin, fmax) + ax1.set_xlim(tmin, tmax) + ax1.set_ylabel('Frequency (Hz)') + ax2 = fig.add_subplot(322) + mappable = ax1.images[0] + plt.colorbar(mappable=mappable, cax=ax) + + ax1 = fig.add_subplot(323) + tr_y.spectrogram(axes=ax1) + ax1.set_ylim(fmin, fmax) + ax1.set_xlim(tmin, tmax) + ax1.set_ylabel('Frequency (Hz)') + ax2 = fig.add_subplot(324) + mappable = ax1.images[0] + plt.colorbar(mappable=mappable, cax=ax2) + + ax1 = fig.add_subplot(325) + tr_z.spectrogram(axes=ax1) + ax1.set_ylim(fmin, fmax) + ax1.set_xlim(tmin, tmax) + ax1.set_ylabel('Frequency (Hz)') + ax1.set_xlabel('Time (s)') + ax2 = fig.add_subplot(326) + mappable = ax1.images[0] + plt.colorbar(mappable=mappable, cax=ax2) + + plt.tight_layout(); plt.show() + ### + + return tr_x, tr_y, tr_z + ### + + + def plot_fault_station_data(self, filename=None): + + import pandas as pd + + # x: along strike; z: along dip + fname = self.directory+ filename + data = pd.read_csv(fname, names=('t','Dx','Vx','tau_xy', 'Dz', 'Vz', 'tau_yz', 'tau_yy' ), \ + delim_whitespace=True, header=20) + t = data['t'].values + Dx = data['Dx'].values + Vx = data['Vx'].values + tau_xy = data['tau_xy'].values + Dz = data['Dz'].values + Vz = data['Vz'].values + tau_yz = data['tau_yz'].values + tau_yy = data['tau_yy'].values + + plt.close('all') + fig = plt.figure(figsize=(10,6)); set_style(whitegrid=True, scale=0.8) + plt.subplots_adjust(hspace=0.45, wspace=0.42, left=0.1, right=0.960) + + tit = 'Fault station (x,y) '+ filename+'\n' + fig.suptitle(tit) + ax = fig.add_subplot(231) + ax.set(title='Strike-slip (m)') + ax.plot(t, Dx, c='k') + + ax = fig.add_subplot(234) + ax.set(title='Dip-slip (m)') + ax.plot(t, Dz, c='k') + + ax = fig.add_subplot(232) + ax.set(title='Vx (m/s)') + ax.plot(t, Vx, c='k') + + ax = fig.add_subplot(235) + ax.set(title='Vz (m/s)') + ax.plot(t, Vz, c='k') + + ax = fig.add_subplot(233) + ax.set(title='Stress (MPa)') + ax.plot(tau_xy, label=r'$\tau_{xy}$', color='red') + ax.plot(tau_yz, label=r'$\tau_{yz}$', color='b') + ax.plot(-tau_yy, label=r'$-\tau_{yy}$', color='k') + ax.legend(loc='lower right', bbox_to_anchor=(1.0,-1.4)) + plt.show() + ### + + + + def plot_slip_along_fault(self, search_tip='/*y_0.0*dat'): + + ''' I use this subroutine to plot surface slip''' + + import glob + import pandas as pd + + filenames = glob.glob(self.directory+search_tip) + strike_slip = []; dip_slip = []; position = [] + + # get all filenames of fault stations + for fname in filenames: + # x: along strike; z: along dip + data = pd.read_csv(fname, names=('t','Dx','Vx','tau_xy', 'Dz', 'Vz', 'tau_yz', 'tau_yy' ), \ + delim_whitespace=True, header=20) + + niter = len(data['Dx'].values) + Dx = data['Dx'].values[:niter] + Dz = data['Dz'].values[:niter] + + dum = max(abs(Dx)) + strike_slip.append(dum* np.sign(dum)) + dum = max(abs(Dz)) + dip_slip.append(dum* np.sign(dum)) + + # where this station is: + num1 = fname.find('x_'); + num2 = fname.find('_y'); + position.append(float(fname[num1+2:num2])) + ### + + # Order lists by strike position + ind = sorted(range(len(position)), key=lambda k: position[k]) + strike_slip = np.array(strike_slip) + dip_slip = np.array(dip_slip) + position = np.array(position) + set_style(whitegrid=True) + ### + + # Plot + plt.close('all') + plt.plot(position[ind], strike_slip[ind], c='red', label='along strike') + plt.plot(position[ind], dip_slip[ind], c='k', label='along dip') + plt.xlabel('Fault strike (km)') + plt.ylabel('Surface slip (m)') + plt.tight_layout() + plt.show() + ### + + + def get_ground_motion_data(self,net=None,suffix='C',f_LP=3.0, f_HP=None, infoint=100,\ + is_pickled=False,rep_store='./STORE/', \ + is_acceleration=False,ista_beg=0,ista_end=None): + + """reads output seismograms and computes PGV and PGV after LP filtering. + stores GMM into dataframe and seismograms in np array.""" + + import pandas as pd + + if not is_pickled: + # Stations + fname = self.directory+'/output_list_stations.txt' + stas = np.genfromtxt(fname,usecols=0,dtype=None,encoding='utf-8') + nets = np.genfromtxt(fname,usecols=1,dtype=None,encoding='utf-8') + xx = np.genfromtxt(fname, usecols=2) # along strike + yy = np.genfromtxt(fname, usecols=3) # off-fault + zz = np.genfromtxt(fname, usecols=4) + # make dataframe for seismograms + df = pd.DataFrame() + names = [net+ '.'+ sta for sta, net in zip(stas, nets)] + df['name'] = names + df['x'] = xx/ 1e3 + df['y'] = yy/ 1e3 + df['z'] = zz/ 1e3 + # epicentral distance (model 1) + self.y_hypo = 0.0 + try: + _dist = (df['x'].values- self.x_hypo)**2 + (df['y'].values- self.y_hypo)**2 + df['r_epi'] = _dist** 0.5 + except: + print('Hypocenter location is required for r_epi!') + # + print ('Computing PGV and PGA ...') + PGA_pol, PGV_pol, PGAs, PGVs = [], [], [], [] + sismos = [] + pol = np.array( ['x','y','z']) + sismos_x, sismos_y, sismos_z = [], [], [] + for i, sta in enumerate(df['name'].values[:]): + + mod = int(len(df)*infoint/ 100) + # print ('i, mod: ', i, mod) + if (i % mod == 0): print ('i, % done: ', i, '%.0f' % (i/mod*infoint) ) + + # acceleration x,y,z + tr_x, PGVx, PGAx = get_seismo_fast_1component(self, sta, filtering=True, fmax=f_LP, fmin=f_HP, + is_binary=True, is_acceleration=is_acceleration,suffix=suffix, compo='XX.semv') + tr_y, PGVy, PGAy = get_seismo_fast_1component(self, sta, filtering=True, fmax=f_LP, fmin=f_HP, + is_binary=True, is_acceleration=is_acceleration,suffix=suffix, compo='XY.semv') + tr_z, PGVz, PGAz = get_seismo_fast_1component(self, sta, filtering=True, fmax=f_LP, fmin=f_HP, + is_binary=True, is_acceleration=is_acceleration,suffix=suffix, compo='XZ.semv') + _PGAs = np.array( [PGAx, PGAy, PGAz] ) + _PGA = max(_PGAs) + _pol = pol[_PGAs == _PGA] + PGAs.append(_PGA) + PGA_pol.append(_pol) + + _PGVs = np.array( [PGVx, PGVy, PGVz] ) + _PGV = max(_PGVs) + _pol = pol[_PGVs == _PGV] + PGVs.append(_PGV) + PGV_pol.append(_pol) + + sismos_x.append(tr_x.data) # velocities + sismos_y.append(tr_y.data) # velocities + sismos_z.append(tr_z.data) # velocities + # + df['PGA (g)'] = [pga/9.81 for pga in PGAs] + df['PGV (m/s)'] = PGVs + df['PGA_polar'] = PGA_pol + df['PGV_polar'] = PGV_pol + NT = sismos_x[0].shape[0] + Nsta = len(sismos_x) + sismos_store = np.zeros((Nsta, NT, 3)) + sismos_store[:,:, 0] = np.array(sismos_x) + sismos_store[:,:, 1] = np.array(sismos_y) + sismos_store[:,:, 2] = np.array(sismos_z) + print ('Pickling ...') + try: os.makedirs(rep_store) + except: pass + df.to_pickle(rep_store+'/df_ground_motion') + np.save(rep_store+'/sismos_store.npy',sismos_store) + else: + print ('Opening pickle box ...') + df = pd.read_pickle(rep_store+'/df_ground_motion') + sismos_store = np.load(rep_store+'/sismos_store.npy') + ## + print ('Done!') + return df, sismos_store + ## + + + + def plot_PGV_and_polarisation(self,df,Ncmap=21,cmap='rainbow',vmin=0,vmax=3): + + fig = plt.figure(figsize=(6,6)) + ax = plt.subplot(211) + cmap = plt.cm.get_cmap(cmap,Ncmap) + im = plt.scatter(df['x'], df['y'], c=df['PGV (m/s)'], cmap=cmap, vmin=vmin,vmax=vmax) + plt.scatter(self.x_hypo, 0.0, marker='*', color='snow') + plt.title('Top view -- PGV (m/s)') + plt.ylabel('Off-fault distance (km)') + cbar_ax = fig.add_axes([0.85, 0.6, 0.025, 0.25]) + fig.colorbar(im, cax=cbar_ax) + ## + plt.subplot(212, sharex=ax) + cdt = (df['PGV_polar'] == 'x') + im = plt.scatter(df[cdt]['x'], df[cdt]['y'], c='peru', label='FP', alpha=0.75) + # + cdt = (df['PGV_polar'] == 'y') + im = plt.scatter(df[cdt]['x'], df[cdt]['y'], c='violet', label='FN', alpha=0.75) + # + cdt = (df['PGV_polar'] == 'z') + im = plt.scatter(df[cdt]['x'], df[cdt]['y'], c='k', label='Z') + plt.legend(bbox_to_anchor=(1, 0.5)) + plt.hlines(y=0, xmin=-20,xmax=25, color='k') + plt.scatter(self.x_hypo, 0.0, marker='*', color='snow') + plt.title('Top view -- PGV polarisation') + plt.xlabel('Along strike (km)'); plt.ylabel('Off-fault distance (km)') + fig.subplots_adjust(right=0.8, hspace=0.3) + # +### + def plot_PGA_and_polarisation(self,df,Ncmap=21,cmap='rainbow',vmin=0,vmax=0.1): + + fig = plt.figure(figsize=(6,6)) + ax = plt.subplot(211) + cmap = plt.cm.get_cmap(cmap,Ncmap) + im = plt.scatter(df['x'], df['y'], c=df['PGA (g)'], cmap=cmap, vmin=vmin,vmax=vmax) + plt.scatter(self.x_hypo, 0.0, marker='*', color='snow') + plt.title('Top view -- PGV (m/s)') + plt.ylabel('Off-fault distance (km)') + cbar_ax = fig.add_axes([0.85, 0.6, 0.025, 0.25]) + fig.colorbar(im, cax=cbar_ax) + ## + plt.subplot(212, sharex=ax) + cdt = (df['PGA_polar'] == 'x') + im = plt.scatter(df[cdt]['x'], df[cdt]['y'], c='peru', label='FP', alpha=0.75) + # + cdt = (df['PGA_polar'] == 'y') + im = plt.scatter(df[cdt]['x'], df[cdt]['y'], c='violet', label='FN', alpha=0.75) + # + cdt = (df['PGA_polar'] == 'z') + im = plt.scatter(df[cdt]['x'], df[cdt]['y'], c='k', label='Z') + plt.legend(bbox_to_anchor=(1, 0.5)) + plt.hlines(y=0, xmin=-20,xmax=25, color='k') + plt.scatter(self.x_hypo, 0.0, marker='*', color='snow') + plt.title('Top view -- PGA polarisation') + plt.xlabel('Along strike (km)'); plt.ylabel('Off-fault distance (km)') + fig.subplots_adjust(right=0.8, hspace=0.3) + # +### + + + + def plot_STF_and_spectrum(self, jump_ko=10,\ + pow_ko=20.0,fmax=10.0, lims=[-0.1,20.0], \ + is_smooth=True,is_padding=False, rep_store='./STORE/', is_plot=True): + ''' computes STF and its spectrum. requires STF and dt. compute spectrum + with Brune\'s and Ji and Archuleta's spectra.''' + from scipy import fft + + print ("*** TO CORRECT: for overstressed nucleation, correct initialpeak in STF! ***") + + # use padding only for spectrum calculation + dt = self.dt + self.time = np.arange(len(self.STF))* dt + if is_padding: + time = np.arange(20*len(self.STF))* dt + STF = np.zeros(time.shape) + STF[:len(self.STF)] = self.STF + else: + STF = self.STF + time = np.arange(len(self.STF))* dt + # + print ('Computing spectrum ...') + # Spectrum + df = 1.0/ dt + N = len(STF) + _f = np.linspace(0.0, 1.0/(2.0*dt), N//2) + spec = abs(fft.fft(STF))* dt + freq = _f[1:] + self.spec_moment_rate = spec[:int(N/2)-1] + self.freq_source = freq + self.M0 = self.spec_moment_rate[0] + self.Mw = (np.log10(self.M0)- 9.1)/ 1.5 + print('Mw: ', self.Mw) + # + if is_smooth: + print ('Smoothening by konno-ohmachi ...') + cdt = (freq <= fmax) + f_sm = freq[cdt][::jump_ko] + self.spec_moment_rate_sm = ko(self.spec_moment_rate[cdt][::jump_ko], df, pow_ko) + self.freq_source_sm = f_sm + # brune's and ji & archuleta + fc1, fc2, spec_JA19 = get_JA19_spectrum(Mw=self.Mw, f=self.freq_source) + self.specD_ja = self.M0* spec_JA19 + fc = (fc1* fc2)** 0.5 + print ('fc (geometric mean): ', '%.2f' %(fc) ) + self.brune = self.M0/ (1.0+(self.freq_source/ fc)**2) + # + print ('Pickling ...') + try: os.makedirs(rep_store) + except: pass + np.save(rep_store+'/STF.npy', self.STF) + np.save(rep_store+'/time.npy', self.time) + np.save(rep_store+'/freq_source.npy', self.freq_source) + np.save(rep_store+'/spec_brune.npy', self.brune) + np.save(rep_store+'/spec_JiArchuleta.npy', self.specD_ja) + np.save(rep_store+'/spec_moment_rate.npy', self.spec_moment_rate) + try: np.save(rep_store+'/spec_moment_rate_sm.npy', self.spec_moment_rate_sm) + except: pass + try: np.save(rep_store+'/freq_source_sm.npy', self.freq_source_sm) + except: pass + print ('*') + # + if is_plot: + plt.close('all') + print ('Plotting ...') + plt.figure(figsize=(8,4)) + plt.subplot(121) + plt.xlim(lims[0],lims[1]) + plt.plot(self.time, self.STF,'k',lw=1) + plt.grid() + plt.xlabel('Time (s)'); plt.ylabel('Moment rate (Nm/s)') + # + plt.subplot(122) + plt.xlim(1e-3,fmax) + try: plt.loglog(self.freq_source_sm, self.spec_moment_rate_sm,'k', lw=1) + except: plt.loglog(self.freq_source, self.spec_moment_rate, 'k', lw=1) + plt.loglog(self.freq_source, self.brune, c='gray', lw=1.0, linestyle='-.', label='w2 model') + plt.loglog(self.freq_source, self.specD_ja, c='gray', lw=1.0, linestyle=':', label='JA19_2S') + plt.axvline(x=fc1, c='pink', linestyle='-',lw=2, alpha=0.5) + plt.axvline(x=fc2, c='pink', linestyle='-',lw=2, alpha=0.5) + plt.xlabel('Frequency (Hz)') + plt.grid(True, which="both", ls=":", alpha=0.5) + plt.tight_layout() + plt.show() + ### \ No newline at end of file diff --git a/utils/dynamic_rupture/JUPYTER_LIB/lib_ridgecrest.py b/utils/dynamic_rupture/JUPYTER_LIB/lib_ridgecrest.py new file mode 100755 index 000000000..ebb1b58bc --- /dev/null +++ b/utils/dynamic_rupture/JUPYTER_LIB/lib_ridgecrest.py @@ -0,0 +1,987 @@ +# by Elif Oral (elifo@caltech.edu) +# Scripts for Ridgecrest analyses + + +import pandas as pd +import numpy as np +import matplotlib.pyplot as plt +import sys +import os +from Class_SPECFEM3D import get_FFT, ko + +## + +def write_out_velocity_profile_data(my_Vs, my_Vp, info=True,rep=None): + from scipy.io import FortranFile + # + try: os.makedirs(rep) + except: pass + # + for iproc, (_vs, _vp) in enumerate(zip(my_Vs, my_Vp)): + nglob = len(_vs) + if info: print ('Processor = ', iproc) + if info: print ('Number of points = ', nglob) + fout = rep+ 'proc' + fout += '%06d'% iproc+ '_user_velocity_profile2.bin' + if info: print ('Filename = ', fout) + print ('Writing ...') + f = FortranFile(fout, 'w') + f.write_record(np.array([nglob], dtype=np.int32)) + f.write_record(np.array(_vs).reshape((nglob,1)).T) + f.write_record(np.array(_vp).reshape((nglob,1)).T) + f.close() + print ('done!') + print (' ') + print('*') + return +### + + +def read_specfem3d_database_coords(iproc=None,rep=None): + from scipy.io import FortranFile + # + fname = rep+'proc'+'%06d' % (iproc)+'_x.bin' + f = FortranFile(fname, 'r' ) + xproc = f.read_reals( dtype='float32' ) + fname = rep+'proc'+'%06d' % (iproc)+'_y.bin' + f = FortranFile(fname, 'r' ) + yproc = f.read_reals( dtype='float32' ) + fname = rep+'proc'+'%06d' % (iproc)+'_z.bin' + f = FortranFile(fname, 'r' ) + zproc = f.read_reals( dtype='float32' ) + print ('iproc: ', iproc) + print ('len(xproc), len(yproc), len(zproc):', len(xproc), len(yproc), len(zproc)) + print ('*') + return xproc, yproc, zproc +### + + +def plot_2d_slice(xx,zz,data,Nx=500,Nz=500,cmap='rainbow_r',Ncmap=11,vmin=2.4e3,vmax=3.8e3,\ + x_hypo=0.0,d_hypo=0.0,xlim=None,ylim=None): + from scipy.interpolate import griddata as gd + import colorcet + + cmap = plt.cm.get_cmap(cmap, Ncmap) # 11 discrete colors + ext = [min(xx), max(xx), min(zz), max(zz)] + xi, zi = np.meshgrid(np.linspace(ext[0],ext[1], Nx), np.linspace(ext[2],ext[3],Nz)) + y = gd( (xx,zz), data, (xi,zi), method='linear') + + fig = plt.figure(figsize=(8,3)) + im = plt.imshow(y, extent=ext,vmin=vmin, vmax=vmax, cmap=cmap, origin='lower', aspect='auto') + plt.scatter(x_hypo, -d_hypo, marker='*', c='k') + + c = plt.colorbar(im, fraction=0.046, pad=0.05, shrink=0.75, label='Vs (m/s)') + c.mappable.set_clim(vmin, vmax) + plt.ylim(ylim[0],ylim[1]); plt.xlim(xlim[0],xlim[1]) +### + + +def get_FFT_data(sismos_store=None,sta_index=[],dt=None,fmax_ko=None,jump=1,pow_ko=20.0,is_smooth=True,\ + rep_store='./STORE/',is_stored=False): + ''' requires sismos_store and computes and stores FFT of given time histories.''' + + import numpy as np + + if not is_stored: + FFT_xlist, FFT_ylist, FFT_zlist = [], [], [] + FFT_xlist_sm, FFT_ylist_sm, FFT_zlist_sm = [], [], [] + # station loop + for ista in sta_index: + # time data + Vx, Vy, Vz = sismos_store[ista,:,0], sismos_store[ista,:,1], sismos_store[ista,:,2] + # FFT + f, FFT_x = get_FFT(dt=dt, data=Vx) + f, FFT_y = get_FFT(dt=dt, data=Vy) + f, FFT_z = get_FFT(dt=dt, data=Vz) + # local store + FFT_xlist.append(FFT_x) + FFT_ylist.append(FFT_y) + FFT_zlist.append(FFT_z) + # smooth by ko + if is_smooth: + cdt =(f <= fmax_ko) + f_sm = f[cdt][::jump] + df_sm = f_sm[1]- f_sm[0] + FFT_x_sm = ko(FFT_x[cdt][::jump], df_sm, pow_ko) + FFT_y_sm = ko(FFT_y[cdt][::jump], df_sm, pow_ko) + FFT_z_sm = ko(FFT_z[cdt][::jump], df_sm, pow_ko) + #local store + FFT_xlist_sm.append(FFT_x_sm) + FFT_ylist_sm.append(FFT_y_sm) + FFT_zlist_sm.append(FFT_z_sm) + # + NF = FFT_xlist[0].shape[0] + nsta = len(FFT_xlist) + # store raw data + FFT_store = np.zeros((nsta, NF, 3)) + FFT_store[:,:,0] = np.array(FFT_xlist) + FFT_store[:,:,1] = np.array(FFT_ylist) + FFT_store[:,:,2] = np.array(FFT_zlist) + # store smooth data + if is_smooth: + NF = FFT_xlist_sm[0].shape[0] + FFT_sm_store = np.zeros((nsta, NF, 3)) + FFT_sm_store[:,:,0] = np.array(FFT_xlist_sm) + FFT_sm_store[:,:,1] = np.array(FFT_ylist_sm) + FFT_sm_store[:,:,2] = np.array(FFT_zlist_sm) + # + # write store + np.save(rep_store+'/FFT_store.npy', FFT_store) + np.save(rep_store+'/FFT_store_freq.npy', f) + if is_smooth: + np.save(rep_store+'/FFT_sm_store.npy', FFT_sm_store) + np.save(rep_store+'/FFT_sm_store_freq.npy', f_sm) + return f, FFT_store, f_sm, FFT_sm_store + else: + return f, FFT_store + else: + print ('Opening pickle box ...') + FFT_store = np.load(rep_store+'/FFT_store.npy') + f = np.load(rep_store+'FFT_store_freq.npy') + try: + FFT_sm_store = np.load(rep_store+'/FFT_sm_store.npy') + f_sm = np.load(rep_store+'FFT_sm_store_freq.npy') + print ('Done!') + return f, FFT_store, f_sm, FFT_sm_store + except: + print('No smooth FFT found!') + return f, FFT_store +### + + +def plot_fabians_stransform(data=None,dt=None,tit='',vmin=-3,vmax=4,tmin=0.0,tmax=20.0, \ + fmin=0.0,fmax=5.0, + cmap=None,figname='',y_hline=None,\ + fig_store='./FIGURES/',fig_name='Stransform.pdf'): + + import numpy as np + import matplotlib.pyplot as p + import seaborn as sns + import librosa + import os + + t = np.linspace(0, len(data)*dt, num=len(data)) + print ('plot_fabians_stransform: min, max velocity: ', min(data), max(data)) + + # Time-window parameters + tw = 512* dt* 2 + # Computing STFT spectrogram + sr = 1.0/ dt + win = int(tw/ dt) + nfft = 2**(win.bit_length()+ 0) + hop = int((tw/4) / dt) + D = np.abs( librosa.stft(data, n_fft=nfft, win_length=win, hop_length=hop) ) + print('min max of D', D.min(), D.max()) + print('min max of log10(D)', np.log10(D).min(), np.log10(D).max()) + p.rcParams['font.size'] = 12 + p.rcParams['mathtext.fontset'] = 'stix' + p.rcParams['font.family'] = ['STIXGeneral'] + fig = p.figure(figsize=(8,8/1.618)) + p.subplots_adjust(left=0.2, bottom=0.1, right=0.85, top=0.9, hspace=0.3, wspace=0.3) + sns.set_style('ticks') + # + ax = p.subplot2grid((3,1), (0,0), rowspan=1, colspan=1) + p.suptitle(fig_name) + p.plot(t, data, lw=0.5, color='k') + p.xlim(tmin, tmax) + p.ylabel('Velocity \n (m/s)') + p.title(tit, fontsize=12) + p.ticklabel_format(style='sci', axis='y', scilimits=(0,0)) + p.setp(ax.get_xticklabels(), visible=False) + # + ax = p.subplot2grid((3,1), (1,0), rowspan=2, colspan=1) + im = p.imshow(np.log10(D), interpolation='bilinear', origin='lower', aspect='auto', \ + vmin=vmin, vmax=vmax, \ + extent=(t.min(),t.max(),0,50), cmap=cmap) + p.xlim(tmin, tmax); p.ylim(fmin, fmax) + p.xlabel('Time (s)'); p.ylabel('Frequency (Hz)') + if y_hline != None: p.axhline(y=y_hline, linestyle=':',color='k') + # cbar + cbar_ax = fig.add_axes([0.9, 0.25, 0.02, 0.25]) + fig.colorbar(im, cax=cbar_ax, format='%.1f',shrink=0.75,label='log(|STFT|) (m)') + sns.despine() + try: os.makedirs(fig_store) + except: pass + print ('Saving figure as ', fig_store+fig_name+'.pdf') + fig.savefig(fig_store+fig_name+'.pdf') + p.close() + return +### + + +def get_observation(st, df_obs, sta=None, cha=None): + cdt = (df_obs['Station'] == sta) & (df_obs['Channel'] == cha) + ista_obs = df_obs[cdt]['Index'].values[0] + tr = st[ista_obs] + print ('ista_obs', ista_obs, tr.stats.station, tr.stats.channel) + return tr.times(), tr.data +### + + +def plot_comparison_2_recordings(sismos_store,df_selected,t=None,\ + st_recordings=None,f_LP=3,f_HP=1,\ + angle=50.0,rotate=True,\ + fig_store='./FIGURES/', fig_name='figure',\ + xlim=None,t_shift=0): + from obspy import read + import pandas as pd + import numpy as np + import matplotlib.pyplot as plt + import os + + # PREPARE RECORDINGS + # filter + st = read(st_recordings) + st.filter('highpass', freq=f_HP) + st.filter('lowpass', freq=f_LP) + # + df_obs = pd.DataFrame() + df_obs['Station'] = [tr.stats.station for tr in st] + df_obs['Channel'] = [tr.stats.channel[2] for tr in st] + df_obs['Index'] = [ii for ii, tr in enumerate(st)] + # COMPARE + for ista in df_selected.index: + print (ista, df_selected.name[ista]) + name = df_selected.name[ista][3:] + #get synthetics + Vx, Vy, Vz = sismos_store[ista,:,0],sismos_store[ista,:,1],sismos_store[ista,:,2] + # get recordings + to, Vobs_E = get_observation(st, df_obs, sta=name, cha='E') + to, Vobs_N = get_observation(st, df_obs, sta=name, cha='N') + to, Vobs_Z = get_observation(st, df_obs, sta=name, cha='Z') + to -= t_shift + # rotate + FP, FN = Vobs_E, Vobs_N + if rotate: + FN = np.cos(np.deg2rad(angle))* Vobs_E+ np.sin(np.deg2rad(angle))* Vobs_N + FP = np.cos(np.deg2rad(angle))* Vobs_E- np.sin(np.deg2rad(angle))* Vobs_N + # plot + fig_name = 'Velocities_compare_real_'+name + fig = plt.figure(figsize=(8,4)) + plt.subplot(311) + plt.xlim(xlim[0],xlim[1]) + plt.plot(t, Vx, 'r', lw=1.5) + plt.plot(to, FP, 'k', lw=0.5) + plt.grid() + # + plt.subplot(312) + plt.xlim(xlim[0],xlim[1]) + plt.plot(t, Vy, 'r', lw=1.5) + plt.plot(to, FN, 'k', lw=0.5) + plt.grid() + # + plt.subplot(313) + plt.xlim(xlim[0],xlim[1]) + plt.plot(t, Vz, 'r', lw=1.5) + plt.plot(to, Vobs_Z, 'k', lw=0.5) + plt.grid() + # + plt.suptitle('Station '+ name) + plt.tight_layout() + try: os.makedirs(fig_store) + except: pass + fig.savefig(fig_store+fig_name+'.pdf') + plt.close('all') +### + + +def plot_time_comparison_2_synthetics(df_selected,data1=None, t1=None, data2=None,t2=None,\ + fig_store='./FIGURES/', fig_name='figure',\ + xlim=None,label1='Model 1',label2='Model 2'): + from obspy import read + import pandas as pd + import numpy as np + import matplotlib.pyplot as plt + + # COMPARE: index is different than time-dataframe! + for ista in df_selected.index: + name = df_selected.name[ista][3:] + print (ista, name) + #get synthetics + data_x1, data_y1, data_z1 = data1[ista,:,0], data1[ista,:,1], data1[ista,:,2] + data_x2, data_y2, data_z2 = data2[ista,:,0], data2[ista,:,1], data2[ista,:,2] + # plot + fig_name = 'Velocity_compare_other_model_'+name + fig = plt.figure(figsize=(8,4)) + plt.subplot(311) + plt.xlim(xlim[0],xlim[1]) + plt.plot(t1, data_x1, 'royalblue', lw=1) + plt.plot(t2, data_x2, 'r', lw=1) + plt.grid() + plt.xlabel('Time (s)') + # + plt.subplot(312) + plt.xlim(xlim[0],xlim[1]) + plt.plot(t1, data_y1, 'royalblue', lw=1) + plt.plot(t2, data_y2, 'r', lw=1) + plt.grid() + plt.xlabel('Time (s)') + plt.ylabel('Velocity (m/s)') + # + plt.subplot(313) + plt.xlim(xlim[0],xlim[1]) + plt.plot(t1, data_z1, 'royalblue', lw=1, label=label1) + plt.plot(t2, data_z2, 'r', lw=1, label=label2) + plt.xlabel('Time (s)') + plt.legend() + plt.grid() + # + plt.suptitle('Station '+ name) + try: os.makedirs(fig_store) + except: pass + fig.savefig(fig_store+fig_name+'.pdf') + plt.close('all') +### + + +def plot_FFT_comparison_2_recordings(FFT_sm,df_selected,f=None,\ + st_recordings=None,f_LP=3,f_HP=1,\ + angle=50.0,rotate=True,\ + fig_store='./FIGURES/', fig_name='figure',\ + xlim=None,ylim=(1e-5,1e1),twin=None,t_shift=0, \ + is_smooth=True,jump=1,\ + fmax_ko=10.0,pow_ko=20): + from obspy import read + import pandas as pd + import numpy as np + import matplotlib.pyplot as plt + import os + + # PREPARE RECORDINGS + # filter + st = read(st_recordings) + # st.filter('highpass', freq=f_HP) + # st.filter('lowpass', freq=f_LP) + # + df_obs = pd.DataFrame() + df_obs['Station'] = [tr.stats.station for tr in st] + df_obs['Channel'] = [tr.stats.channel[2] for tr in st] + df_obs['Index'] = [ii for ii, tr in enumerate(st)] + # COMPARE: index is different than time-dataframe! + for ista in range(len(df_selected)): + print (ista, df_selected.name[ista]) + name = df_selected.name[ista][3:] + #get synthetics + FFT_x, FFT_y, FFT_z = FFT_sm[ista,:,0],FFT_sm[ista,:,1],FFT_sm[ista,:,2] + # get recordings + to, Vobs_E = get_observation(st, df_obs, sta=name, cha='E') + to, Vobs_N = get_observation(st, df_obs, sta=name, cha='N') + to, Vobs_Z = get_observation(st, df_obs, sta=name, cha='Z') + to -= t_shift + cdt = (to >= twin[0]) & (to <= twin[1]) + # rotate + FP, FN = Vobs_E[cdt], Vobs_N[cdt] + if rotate: + FN = np.cos(np.deg2rad(angle))* Vobs_E[cdt]+ np.sin(np.deg2rad(angle))* Vobs_N[cdt] + FP = np.cos(np.deg2rad(angle))* Vobs_E[cdt]- np.sin(np.deg2rad(angle))* Vobs_N[cdt] + # FFT + fo_x, FFTo_x = get_FFT(dt=to[1]-to[0], data=FP) + fo_y, FFTo_y = get_FFT(dt=to[1]-to[0], data=FN) + fo_z, FFTo_z = get_FFT(dt=to[1]-to[0], data=Vobs_Z[cdt]) + # smooth by ko + cdt2 =(fo_x <= fmax_ko) + fo_x_sm = fo_x[cdt2][::jump] + df_sm = fo_x_sm[1]- fo_x_sm[0] + # assuming x,y,z arrays have the same len. + FFT_x_sm = ko(FFTo_x[cdt2][::jump], df_sm, pow_ko) + FFT_y_sm = ko(FFTo_y[cdt2][::jump], df_sm, pow_ko) + FFT_z_sm = ko(FFTo_z[cdt2][::jump], df_sm, pow_ko) + + # plot + fig_name = 'FFT_compare_real_'+name + fig = plt.figure(figsize=(8,4)) + plt.subplot(131) + plt.xlim(xlim[0],xlim[1]); plt.ylim(ylim[0],ylim[1]) + plt.loglog(f, FFT_x, 'r', lw=1.5) + plt.loglog(fo_x_sm, FFT_x_sm, 'k', lw=0.5) + plt.grid() + plt.xlabel('Frequency (Hz)') + plt.ylabel('Fourier amplitude (m/s*s)') + # + plt.subplot(132) + plt.xlim(xlim[0],xlim[1]); plt.ylim(ylim[0],ylim[1]) + plt.loglog(f, FFT_y, 'r', lw=1.5) + plt.loglog(fo_x_sm, FFT_y_sm, 'k', lw=0.5) + plt.grid() + plt.xlabel('Frequency (Hz)') + # + plt.subplot(133) + plt.xlim(xlim[0],xlim[1]); plt.ylim(ylim[0],ylim[1]) + plt.loglog(f, FFT_z, 'r', lw=1.5,label='Syn.') + plt.loglog(fo_x_sm, FFT_z_sm, 'k', lw=0.5, label='Rec.') + plt.xlabel('Frequency (Hz)') + plt.legend() + plt.grid() + # + plt.suptitle('Station '+ name) + try: os.makedirs(fig_store) + except: pass + fig.savefig(fig_store+fig_name+'.pdf') + plt.close('all') +### + + +def plot_FFT_comparison_2_synthetics(df_selected,FFT1=None, f1=None, FFT2=None,f2=None,\ + fig_store='./FIGURES/', fig_name='figure',\ + xlim=None,label1='Model1',label2='Model2'): + from obspy import read + import pandas as pd + import numpy as np + import matplotlib.pyplot as plt + import os + + Nsta = FFT1.shape[0] + + # COMPARE: index is different than time-dataframe! + for ista in range(Nsta): + name = df_selected.name[ista][3:] + print (ista, name) + #get synthetics + FFT_x1, FFT_y1, FFT_z1 = FFT1[ista,:,0],FFT1[ista,:,1],FFT1[ista,:,2] + FFT_x2, FFT_y2, FFT_z2 = FFT2[ista,:,0],FFT2[ista,:,1],FFT2[ista,:,2] + + # plot + fig_name = 'FFT_compare_other_models_'+name + fig = plt.figure(figsize=(8,4)) + plt.subplot(131) + plt.xlim(xlim[0],xlim[1]) + plt.loglog(f1, FFT_x1, c='royalblue', lw=1.) + plt.loglog(f2, FFT_x2, c='r', lw=1.0) + plt.grid() + plt.xlabel('Frequency (Hz)') + plt.ylabel('Fourier amplitude (m/s*s)') + # + plt.subplot(132) + plt.xlim(xlim[0],xlim[1]) + plt.loglog(f1, FFT_y1, c='royalblue', lw=1) + plt.loglog(f2, FFT_y2, c='r', lw=1) + plt.grid() + plt.xlabel('Frequency (Hz)') + # + plt.subplot(133) + plt.xlim(xlim[0],xlim[1]) + plt.loglog(f1, FFT_z1, c='royalblue', lw=1, label=label1) + plt.loglog(f2, FFT_z2, c='r', lw=1, label=label2) + plt.xlabel('Frequency (Hz)') + plt.legend() + plt.grid() + # + try: os.makedirs(fig_store) + except: pass + fig.savefig(fig_store+fig_name+'.pdf') + plt.close('all') +### + + +def get_javiers_grid(directory='',mu_value=None,flength=None,fwidth=None,show_fig=False): + ''' reads outputs of Javier\'s code ''' + filename = directory+ 'tau3d.d' + xgrid = np.genfromtxt(filename, usecols=0)*1e3- flength*1e3/2.0 + zgrid = np.genfromtxt(filename, usecols=1)*1e3- fwidth*1e3 + tau_xy = np.genfromtxt(filename, usecols=2)/mu_value + tau_yz = np.genfromtxt(filename, usecols=3)/mu_value + print ('len(xgrid)', len(xgrid)) + print ('File is read : ', filename); print('***') + if show_fig: + fig = plt.figure(figsize=(8,4)); + ax = fig.add_subplot(111) + ax.scatter(xgrid/1e3, zgrid/1e3, s=0.1, c='k') + ax.set_title('Prefered grid after translation') + ax.set_xlabel('Along strike (km)'); ax.set_ylabel('Along dip (km)') + plt.show() + return xgrid,zgrid,tau_xy,tau_yz +### + + +def read_specfem3d_db_files(xjavier=None,zjavier=None,info=True,double_precision=False,\ + reper='',show_fig=False,nproc=1): + + ''' Read fault database (.txt files) of specfem3d''' + + if double_precision: ff = np.float64 + else: ff = np.float32 # this condition is not tested. + filenames = [reper+'proc'+'%06d'% proc+ '_fault_db.txt' for proc in range(nproc)] + print ('Reading database files ...'); + # + x_per_proc, y_per_proc, z_per_proc, nglob_per_proc, procnums = [],[],[],[],[] + for proc, filename in enumerate(filenames): + with open(filename) as f: # open the file for reading + iflt = 0 + lines = f.readlines() + nfault = int(lines[iflt].split()[2]) + _, _, _, _, nspec, nglob, ngll = lines[iflt+2].split()[:] + nspec = int(nspec); nglob = int(nglob); ngll = int(ngll) + if nglob > 0: + if info: print ('Processor = ', proc) + if info: print ('Filename = ', filename) + if info: print ('nglob = ', nglob) + i_start = iflt+4 + dummy = lines[i_start:i_start+ 2*nglob] + iglobs = [int(dum.split()[0]) for dum in dummy] + xcoord = [float(dum.split()[1]) for dum in dummy] + ycoord = [float(dum.split()[2]) for dum in dummy] + zcoord = [float(dum.split()[3]) for dum in dummy] + nglob_per_proc.append(nglob) + x_per_proc.append(xcoord) + y_per_proc.append(ycoord) + z_per_proc.append(zcoord) + procnums.append(proc) + print ('*') + f.close() + print ('Done!'); + # flast lists of coordinate + xcoords = [item for sublist in x_per_proc for item in sublist] + ycoords = [item for sublist in y_per_proc for item in sublist] + zcoords = [item for sublist in z_per_proc for item in sublist] + # Check coordinates shape + print (len(xcoords), len(ycoords), len(zcoords) ) + print('***') + if show_fig: + # plot (in red: specfem3d; in black:Javier's) + fig = plt.figure(figsize=(8,4)); + ax = fig.add_subplot(111) + # specfem3d grid + ax.scatter( np.array(xcoords)/1e3, np.array(zcoords)/1e3, s=0.1, c='r') + # prefered grid to see if any SEM node is outside the prefered grid: + try: ax.scatter(xjavier/1e3,zjavier/1e3, s=0.1, c='k') + except: pass + ax.set_xlabel('Along strike (km)'); ax.set_ylabel('Along dip (km)') + ax.set_title('Specfem3d fault grid (in red) vs Javier (in black)') + plt.show() + return xcoords,ycoords,zcoords,x_per_proc, y_per_proc, z_per_proc, nglob_per_proc,procnums +### + + +def get_effective_stress_profile(zgrid, show_fig=False, taper_below_cutoff=False,zcreep=15.0,minval=0.5,\ + is_constant_stress=False,val=100.0): + ''' after Shebalin and Narteau (2017), effective stress by depth, suitable for CA.''' + if is_constant_stress: + P_eff = [-val for z in zgrid] + else: + # pore pressure + # eff_stress = (1- lambda) litho_stress + rho_r, rho_w = 2.3e3, 1e3 + L, z_c, eps = 1e3, 2e3, 0.1 + + lambda_z = [] + for z in zgrid: + # assuming max depth value = 0.0 (surface = 0.0) + _ratio = rho_w/rho_r + if abs(z) < z_c: + lambda_z.append(_ratio) + else: + _dum = (1.0-eps)+ (_ratio- (1.0-eps))* np.exp((z_c-abs(z))/ L) + lambda_z.append(_dum) + # + P_litho = [rho_r*9.81*z/1e6 for z in zgrid] + P_pore = [_lam*p for _lam, p in zip(lambda_z, P_litho)] + P_eff = [(1.0-_lam)*p for _lam, p in zip(lambda_z, P_litho)] + # + if taper_below_cutoff: + P_eff = np.array(P_eff) + dist = abs((zcreep*1e3- abs(zgrid)) ) # in m + cdt = (dist == min(dist)) + val = -P_eff[cdt][0] # value at cut-off depth zcreep + # exponential decay by depth below cut-off depth + cdt2 = (abs(zgrid)/1e3 > zcreep) + P_eff_tapered = np.array(P_eff) + dist = ( abs(zgrid)/1e3- zcreep ) # positive an in km + P_eff_tapered[cdt2] = -val*np.exp(-0.5* dist[cdt2]) + P_eff_tapered[abs(P_eff_tapered) < minval ] = -minval + P_eff = P_eff_tapered + # + if show_fig: + plt.close('all') + plt.figure(figsize=(4,4)) + plt.subplot(111) + plt.grid() + try: + plt.plot(-np.array(P_litho), -np.array(zgrid)/1e3, c='k', label='Litho') + plt.scatter(-np.array(P_pore), -np.array(zgrid)/1e3, c='blue', label='Pore', s=1) + except: + pass + plt.scatter(-np.array(P_eff), -np.array(zgrid)/1e3, c='r', label='Eff', s=1) + plt.legend() + plt.ylabel('Depth (km)') + plt.xlabel('Stress (MPa)') + plt.gca().invert_yaxis() + fig = plt.gcf() + plt.show() + # + return P_eff +### + + +def scale_javiers_stress_values(mu_0=None,mu_s=None,mu_d=None,P_eff=None,tau_xy=None,tau_yz=None): + # tau_xy_scaled and tau_yz are the parameters + # that I use to interpolate and write out. + tau_xy_scaled, stress_drop_inferred, mu_0_inferred,mu_0_orig = [],[],[],[] + mu_max_allowed = mu_s- 0.10* (mu_s- mu_d) + print ('mu_max_allowed: ', mu_max_allowed) + # tau_xy: shear stress along strike + for sigma_n, S_point in zip(P_eff, tau_xy) : + # (mu_0+ S_point* (mu_0- mu_d) ) + # this operation gives initial shear stress + # based on my interpretation of Javier's code's outputs + # _tau = abs(sigma_n)* (mu_0+ S_point* (mu_0- mu_d) ) + # + # Avoid mu=mu_sta by forcing max to mu_max_allowed + _mu = min((mu_0+ S_point* (mu_0- mu_d)), mu_max_allowed) + _tau = abs(sigma_n)* _mu + mu_0_inferred.append(_mu) + tau_xy_scaled.append(_tau) + mu_0_orig.append(mu_0+ S_point* (mu_0- mu_d)) + _stress_drop = _tau- mu_d* abs(sigma_n) + stress_drop_inferred.append(_stress_drop) + # + # tau_yz: shear stress along dip + # tau_yz_scaled = [abs(sigma_n)* (S_point* (_mu_0- mu_d)) for sigma_n, S_point, _mu_0 in zip(sigma_litho,tau_yz,mu_0_inferred ) ] + # ALSO TO TEST THIS CHANGE! + tau_yz_scaled = [abs(sigma_n)* (S_point* (mu_0- mu_d)) for sigma_n, S_point in zip(P_eff, tau_yz ) ] + # Normal stress + tau_yy = [-abs(sigma_n) for sigma_n in P_eff] + zzmin, zzmax = min(tau_yz_scaled), max(tau_yz_scaled) + # + return np.array(tau_yy),np.array(tau_xy_scaled),np.array(tau_yz_scaled),\ + np.array(stress_drop_inferred),np.array(mu_0_inferred),np.array(mu_0_orig) +### + + +def plot_javiers_scaled_values(xgrid=None,zgrid=None,stress_drop=None,mu0=None,xhypo=None,zhypo=None,Nx=None,Nz=None,\ + mu_s=None,mu_d=None,tau_xy=None,tau_yz=None): + + ext = [min(xgrid), max(xgrid), min(zgrid), max(zgrid)] + print ( 'Min and max stress drop ', min(stress_drop), max(stress_drop) ) + print ( 'Average stress drop: ', np.average(stress_drop)) + # + plt.close('all') + fig = plt.figure(figsize=(8,6)) + _asp = 'auto';_cmap = 'magma';aspect = 1.0 + # + ax1 = fig.add_subplot(221, aspect=aspect) + ax1.scatter(xhypo, zhypo, c='gray', marker='*') + extkm = [ee/1e3 for ee in ext] + im1 = ax1.imshow(stress_drop.reshape(Nx,Nz).T, origin='lower', aspect=aspect, extent=extkm, \ + cmap=_cmap, interpolation='bilinear' ); + c1 = fig.colorbar(im1, ax=ax1, orientation='horizontal', fraction=.1, shrink=0.5, label='Stress drop (MPa)') + c1.mappable.set_clim(0.0, 10.0) + # + ax1 = fig.add_subplot(222, aspect=aspect) + ax1.set_title('Initial stress ratio (mu)') + ax1.scatter(xhypo, zhypo, c='snow', marker='*') + im1 = ax1.imshow(mu0.reshape(Nx,Nz).T, origin='lower', aspect=aspect, extent=ext, \ + cmap='viridis', interpolation='bilinear' ); + c1 = fig.colorbar(im1, ax=ax1, orientation='horizontal', fraction=.1, shrink=0.75, pad=0.1) + c1.mappable.set_clim(mu_d, mu_s) + # + ax1 = fig.add_subplot(223, aspect=aspect) + ax1.set_title('Shear stress along strike') + im1 = ax1.imshow(tau_xy.reshape(Nx,Nz).T, origin='lower', aspect=aspect, extent=ext, \ + cmap=_cmap, interpolation='bilinear' ); + c1 = fig.colorbar(im1, ax=ax1, orientation='horizontal', fraction=.1, shrink=0.25) + # + ax1 = fig.add_subplot(224, aspect=aspect) + ax1.set_title('Shear stress along dip') + im1 = ax1.imshow(tau_yz.reshape(Nx,Nz).T, origin='lower', aspect=aspect, extent=ext, \ + cmap=_cmap, interpolation='bilinear' ); + c1 = fig.colorbar(im1, ax=ax1, orientation='horizontal', fraction=.1, shrink=0.25) + plt.tight_layout() +# fig.savefig('/Users/elifo/Desktop/stress.png', dpi=300) +### + + +# Make a function of 2D interpolation +def make_grid(x=None,z=None,x_db=None,z_db=None): + points = np.vstack([x, z]).T + print ('Checking input array sizes: ', x.shape, z.shape, points.shape) + # make a mesh grid with SEM nodes + xx = sorted(list(set((x_db)))) + xx = np.array(xx) + zz = sorted(list(set((z_db))) ) + zz = np.array(zz) + x_database, z_database = np.meshgrid(xx, zz) + print ('mesh grid size: ', x_database.shape, z_database.shape) + return x_database, z_database, points +### + + +def interpolate_data(x_database, z_database, points, raw_data=None, interpolation='nearest'): + from scipy.interpolate import griddata + # input: values; output:values_int_xy + values = raw_data; values_int = raw_data; + print ('Interpolation ...') + if interpolation == 'nearest': + values_int = griddata(points, values, (x_database, z_database), method='nearest') + # points: the prefered fault grid (Javier's code's output) that we prepared before SEM simulation + # values: stress values corresponding to points of Javier's grid + # (x_database, z_database): 3D code's grid + # dont set fill_value to get Nan + elif interpolation == 'linear': + value_int_linear = griddata(points, values, (x_database, z_database), method='linear') + value_int_nearest = griddata(points, values, (x_database, z_database), method='nearest') + value_int_linear[np.isnan(value_int_linear)] = value_int_nearest[np.isnan(value_int_linear)] + values_int = value_int_linear + print ('Done!') + return values_int +### + + +def plot2compare_before_after_interpolation(raw_data=None, int_data=None,interpolation='',N=None, + xgrid=None,zgrid=None,x_db=None,z_db=None): + + fig = plt.figure(figsize=(8,6)) + _asp = 0.75; _cmap = 'magma';aspect = 3 + Nx, Nz = N[0], N[1] + # + ext1 = [min(xgrid), max(xgrid), min(zgrid), max(zgrid)] + ext2 = [np.amin(x_db), np.amax(x_db), np.amin(z_db), np.amax(z_db)] + print ('Extent 1:', ext1 ) + print ('Extent 2:', ext2 ) + ### BEFORE + ax1 = fig.add_subplot(211, aspect=aspect) + ax1.set_title('Before interpolation') + ax1.set_ylim(-25e3,0.0); ax1.set_xlim(-37.5e3,37.5e3) + im1 = ax1.imshow(raw_data.reshape(Nx,Nz).T, origin='lower', aspect=_asp, extent=ext1, \ + cmap=_cmap, interpolation=interpolation ); + ### AFTER + ax2 = fig.add_subplot(212, aspect=_asp) + ax2.set_title('After interpolation ') + ax2.set_ylim(-25e3,0.0);ax2.set_xlim(-37.5e3,37.5e3) + jump=1 + im2=ax2.scatter(x_db[::jump], z_db[::jump], c=int_data[::jump], cmap='magma',s=0.25) + c1 = fig.colorbar(im1, ax=ax1, orientation='horizontal', fraction=.1, shrink=0.5) + c2 = fig.colorbar(im2, ax=ax2, orientation='horizontal', fraction=.1, shrink=0.5) + plt.tight_layout(); plt.show() +### + + +def get_dict_value(dct, number): + if number in dct.keys(): return dct[number] + else: print ('** NOT FOUND **', number) +### + + +def write_out_stress_files(write=True, double_precision=False,info=True,nproc=None,reper='',\ + x_per_proc=None,z_per_proc=None,nglob_per_proc=None,\ + tau_xy=None,tau_yz=None,tau_yy=None,x_db=None,z_db=None,procnums=None): + + import os + try: os.makedirs(reper) + except: print ('Out directory exists!') + + x_debug, z_debug, xy_debug, yz_debug, yy_debug = [],[],[],[],[] + if double_precision: ff=np.float64 + else: ff=np.float32 + + # making a dictionary for point-coordinate:stress value to save time + keys = [(_x,_y) for _x, _y in zip(x_db.flatten(), z_db.flatten())] + data = tau_xy.flatten() + dict_tauxy= dict(zip(keys, data)) + # + data = tau_yz.flatten() + dict_tauyz= dict(zip(keys, data)) + # + data = tau_yy.flatten() + dict_tauyy= dict(zip(keys, data)) + # + for nglob, _x_all, _z_all, iproc in zip(nglob_per_proc, x_per_proc, z_per_proc, procnums): + fout = reper+ 'proc' + fout += '%06d'% iproc+ '_fault_init_stress.bin' + if info: print ('Processor = ', iproc) + if info: print ('Filename = ', fout) + # remove repeated coordinates here + coords_non_repeated = [(x,z) for x, z in zip(_x_all, _z_all)] + coords_non_repeated = list(dict.fromkeys(coords_non_repeated)) + _x = [x for (x,z) in coords_non_repeated ] + _z = [z for (x,z) in coords_non_repeated ] + # + tau_xy_proc,tau_yz_proc,tau_yy_proc = [],[],[] + tau_xy_proc = [get_dict_value(dict_tauxy, (x,z)) for x,z in zip(_x, _z)] + tau_yz_proc = [get_dict_value(dict_tauyz, (x,z)) for x,z in zip(_x, _z)] + tau_yy_proc = [get_dict_value(dict_tauyy, (x,z)) for x,z in zip(_x, _z)] + # np array + data_tau = np.zeros([nglob, 3], dtype=ff) + data_tau[:,0] = np.array(tau_xy_proc) + data_tau[:,1] = np.array(tau_yz_proc) + data_tau[:,2] = np.array(tau_yy_proc) + # + x_debug.append(_x) + z_debug.append(_z) + xy_debug.append(data_tau[:,0]) + yz_debug.append(data_tau[:,1]) + yy_debug.append(data_tau[:,2]) + if info: print ('Number of points = ', nglob) + if info: print ('Data size = ', len(_x)) + if (len(_x) != len(_z)) or (len(_x) != len(tau_xy_proc)) or (len(_x) != len(tau_yz_proc)): + print ('PROBLEM!!!') + print ('Grid points of processor do not match interpolated value array') + break + print ('Writing ...') + if write: + numRows = nglob + numRowArr = np.array([numRows], dtype=ff) + # I - number of points, nglob + fileObj = open(fout, 'wb') + numRowArr.tofile(fileObj) + # II - tau_xy and tau_yz, and tau_yy + for i in range(numRows): + lineArr = data_tau[i,:] + lineArr.tofile(fileObj) + fileObj.close() + print ('done!') + print () + print('***') + return +### + + +def read_STF_file(filename='',NGLOB=None,NT=None): + import numpy as np + + try : + with open(filename, 'rb') as fid: + data_array = np.fromfile(fid,np.float32) + except : + print('Velocity file does not exist') + # + print ('length of read array: ', len(data_array)) + print ('expected length (nglob*nt): ', NGLOB*NT) + data = np.zeros((NGLOB,NT)) + for it in range(NT): + index1 = it* NGLOB + index2 = (it+1)*NGLOB + data[:,it] = data_array[index1:index2] + # + print ('*') + return data +### + + +def check_repeating_fault_nodes(self): + print ('check_repeating_fault_nodes ...') + # remove repeating nodes + nx, nz = len( set(list(self.fault['x'])) ), len( set(list(self.fault['z'])) ) + + print ('Non-repeating dimensions (nx,nz): ', nx, nz) + print ('nx*nz: ', nx*nz) + counted,repeated,index_2keep,index_repeateds = [],[],[],[] + for ii, (_x, _z) in enumerate( zip(self.fault['x'], self.fault['z']) ): + if (_x,_z) in counted: + repeated.append((_x, _z) ) + index_repeateds.append(ii) + else: + counted.append( (_x, _z) ) + index_2keep.append(ii) + ## + print ('uniques, repeated, uniques: ',len(counted), len(repeated), len(counted)+len(repeated)) + return nx,nz,index_2keep +### + + +def get_STF(rep='',nproc=1,info=True): + '''Reading stf file per proc and assembles them''' + import numpy as np + + STF_per_proc = [] + for iproc in range(nproc): + filename = rep+'proc_'+'%05i' % (iproc)+ '_STF.dat' + if info: print ('iproc, filename: ', iproc, filename) + try: + STF_per_proc.append(np.genfromtxt(filename, usecols=0)) + except: + pass + if info: print ('*') + ## + STF_all = np.sum ( np.array(STF_per_proc), axis=0 ) + print ('Shape of STF_all: ', STF_all.shape) + print ('Done!') + return STF_all +### + + +# def plot_STF_and_spectrum(self, jump_ko=10,\ +# pow_ko=20.0,fmax=10.0, lims=[-0.1,20.0], \ +# is_smooth=True,is_padding=False, rep_store='./STORE/', is_plot=True): +# ''' computes STF and its spectrum. requires STF and dt. compute spectrum +# with Brune\'s and Ji and Archuleta's spectra.''' +# from scipy import fft +# sys.path.append('./') +# from Class_SPECFEM3D import get_FFT,ko + + +# # use padding only for spectrum calculation +# dt = self.dt +# self.time = np.arange(len(self.STF))* dt +# if is_padding: +# time = np.arange(20*len(self.STF))* dt +# STF = np.zeros(time.shape) +# STF[:len(self.STF)] = self.STF +# else: +# STF = self.STF +# time = np.arange(len(self.STF))* dt +# # +# print ('Computing spectrum ...') +# # Spectrum +# df = 1.0/ dt +# N = len(STF) +# _f = np.linspace(0.0, 1.0/(2.0*dt), N//2) +# spec = abs(fft.fft(STF))* dt +# freq = _f[1:] +# self.spec_moment_rate = spec[:int(N/2)-1] +# self.freq_source = freq +# self.M0 = self.spec_moment_rate[0] +# self.Mw = (np.log10(self.M0)- 9.1)/ 1.5 +# print('Mw: ', self.Mw) +# # +# if is_smooth: +# print ('Smoothening by konno-ohmachi ...') +# cdt = (freq <= fmax) +# f_sm = freq[cdt][::jump_ko] +# self.spec_moment_rate_sm = ko(self.spec_moment_rate[cdt][::jump_ko], df, pow_ko) +# self.freq_source_sm = f_sm +# # brune's and ji & archuleta +# fc1, fc2, spec_JA19 = get_JA19_spectrum(Mw=self.Mw, f=self.freq_source) +# self.specD_ja = self.M0* spec_JA19 +# fc = (fc1* fc2)** 0.5 +# print ('fc (geometric mean): ', '%.2f' %(fc) ) +# self.brune = self.M0/ (1.0+(self.freq_source/ fc)**2) +# # +# print ('Pickling ...') +# try: os.makedirs(rep_store) +# except: pass +# np.save(rep_store+'/STF.npy', self.STF) +# np.save(rep_store+'/time.npy', self.time) +# np.save(rep_store+'/freq_source.npy', self.freq_source) +# np.save(rep_store+'/spec_brune.npy', self.brune) +# np.save(rep_store+'/spec_JiArchuleta.npy', self.specD_ja) +# np.save(rep_store+'/spec_moment_rate.npy', self.spec_moment_rate) +# try: np.save(rep_store+'/spec_moment_rate_sm.npy', self.spec_moment_rate_sm) +# except: pass +# try: np.save(rep_store+'/freq_source_sm.npy', self.freq_source_sm) +# except: pass +# print ('*') +# # +# if is_plot: +# plt.close('all') +# print ('Plotting ...') +# plt.figure(figsize=(8,4)) +# plt.subplot(121) +# plt.xlim(lims[0],lims[1]) +# plt.plot(self.time, self.STF,'k',lw=1) +# plt.grid() +# plt.xlabel('Time (s)'); plt.ylabel('Moment rate (Nm/s)') +# # +# plt.subplot(122) +# plt.xlim(1e-3,fmax) +# try: plt.loglog(self.freq_source_sm, self.spec_moment_rate_sm,'k', lw=1) +# except: plt.loglog(self.freq_source, self.spec_moment_rate, 'k', lw=1) +# plt.loglog(self.freq_source, self.brune, c='gray', lw=1.0, linestyle='-.', label='w2 model') +# plt.loglog(self.freq_source, self.specD_ja, c='gray', lw=1.0, linestyle=':', label='JA19_2S') +# plt.axvline(x=fc1, c='pink', linestyle='-',lw=2, alpha=0.5) +# plt.axvline(x=fc2, c='pink', linestyle='-',lw=2, alpha=0.5) +# plt.xlabel('Frequency (Hz)') +# plt.grid(True, which="both", ls=":", alpha=0.5) +# plt.tight_layout() +# plt.show() +# return self +# ### \ No newline at end of file