diff --git a/nzgmdb/calculation/distances.py b/nzgmdb/calculation/distances.py index 39c81e9..9adf2f2 100644 --- a/nzgmdb/calculation/distances.py +++ b/nzgmdb/calculation/distances.py @@ -4,25 +4,60 @@ """ import functools +import json import multiprocessing as mp from collections import defaultdict from pathlib import Path -from typing import Optional +from typing import Optional, TypedDict +import fiona import numpy as np import pandas as pd from obspy.clients.fdsn import Client as FDSN_Client from pyproj import Transformer -from shapely.geometry import Point -from shapely.geometry.polygon import LineString, Polygon +from shapely.geometry import LineString, Point, Polygon +from cmt_solutions import cmt_data from nzgmdb.CCLD import ccldpy +from nzgmdb.data_retrieval import tect_domain from nzgmdb.management import config as cfg from nzgmdb.management import file_structure from nzgmdb.management.data_registry import NZGMDB_DATA, REGISTRY from oq_wrapper import estimations from qcore import coordinates, geo, grid, src_site_dist -from source_modelling import srf +from source_modelling import magnitude_scaling, srf + + +class FocalMechanism(TypedDict): + """ + Represents the geometric and spatial parameters of a crustal domain + focal mechanism. + """ + + strike: float + """The strike angle of the fault in degrees.""" + dip: float + """The dip angle of the fault in degrees.""" + rake: float + """The rake angle of the fault in degrees.""" + ztor: float + """The depth to the top of the rupture in km.""" + dbottom: float + """The depth to the bottom of the rupture in km.""" + length: float + """The length of the fault along strike in km.""" + dip_dist: float + """The width of the fault down dip in km.""" + hyp_lat: float + """The latitude of the hypocentre.""" + hyp_lon: float + """The longitude of the hypocentre.""" + hyp_depth: float + """The depth of the hypocentre in km.""" + hyp_strike: float + """The hypocentre along-strike position (0 - 1).""" + hyp_dip: float + """The hypocentre down-dip position (0 - 1).""" def calc_fnorm_slip( @@ -65,39 +100,6 @@ def calc_fnorm_slip( return fnorm, slip -def get_domain_focal( - domain_no: int, domain_focal_df: pd.DataFrame -) -> tuple[int, int, int]: - """ - Get the focal mechanism for a given domain - If not found, return the default values - Strike: 220 - Rake: 45 - Dip: 90 - - Parameters - ---------- - domain_no : int - The domain number - domain_focal_df : pd.DataFrame - The domain focal data - - Returns - ------- - strike : int - The strike angle of the fault in degrees - rake : int - The rake angle of the fault in degrees - dip : int - The dip angle of the fault in degrees - """ - if domain_no == 0: - return 220, 45, 90 - else: - domain = domain_focal_df[domain_focal_df.Domain_No == domain_no].iloc[0] - return domain.strike, domain.rake, domain.dip - - def run_ccld_simulation( event_id: str, event_row: pd.Series, @@ -108,7 +110,7 @@ def run_ccld_simulation( strike2: float = None, dip2: float = None, rake2: float = None, -): +) -> FocalMechanism: """ Run the CCLD simulation for an event. Uses default values for the number of simulations based on the tectonic class mentioned @@ -137,30 +139,8 @@ def run_ccld_simulation( Returns ------- - dict - A dictionary containing the following keys: - 'strike' : float - The strike angle of the fault in degrees - 'dip' : float - The dip angle of the fault in degrees - 'rake' : float - The rake angle of the fault in degrees - 'ztor' : float - The depth to the top of the rupture in km - 'dbottom' : float - The depth to the bottom of the rupture in km - 'length' : float - The length of the fault along strike in km - 'dip_dist' : float - The width of the fault down dip in km - 'hyp_lat' : float - The latitude of the hypocentre - 'hyp_lon' : float - The longitude of the hypocentre - 'hyp_strike' : float - The hypocentre along-strike position (0 - 1) - 'hyp_dip' : float - The hypocentre down-dip position (0 - 1) + FocalMechanism + A dictionary containing the calculated fault geometry and hypocentre parameters. """ ccdl_tect_class = ccldpy.TECTONIC_MAPPING[event_row.tect_class] # Extra check for undetermined tectonic class @@ -213,13 +193,116 @@ def run_ccld_simulation( } +def get_crustal_domain_focal( + event_id: str, + event_row: pd.Series, + nz_mech: dict, + length_bin: str, + domain_no_backup: int, + domain_focal_df: pd.DataFrame, +) -> FocalMechanism: + """ + Select the appropriate nodal plane from the Crustal domain focal mechanism data. + If both cases are the same, select the highest probability and run CCLD simulations for that plane. + If the cases are different, select the highest probability from each case and run CCLD simulations for both planes. + If the domain number is not found, use the backup focal mechanism. + + Parameters + ---------- + event_id : str + The event id + event_row : pd.Series + The event row from the earthquake source table + nz_mech : dict + The domain focal mechanism data + length_bin : str + The length bin to use for the focal mechanism selection + domain_no_backup : int + The domain number for the backup focal mechanism. + domain_focal_df : pd.DataFrame + The focal mechanism data for the different domains. + + Returns + ------- + FocalMechanism + A dictionary containing the calculated fault geometry and hypocentre parameters. + """ + try: + domain_model = nz_mech[event_row["domain_no"]] + except KeyError: + # Use the backup focal mechanism + strike, dip, rake = get_backup_focal_mechanism( + domain_no_backup, domain_focal_df + ) + return run_ccld_simulation(event_id, event_row, strike, dip, rake, "D") + + case1 = domain_model["case1"][length_bin] + case2 = domain_model["case2"][length_bin] + + # Check if the cases are the same + cases_equal = ( + case1["strikeAn"] == case2["strikeAn"] + and case1["dipAn"] == case2["dipAn"] + and case1["rakeAn"] == case2["rakeAn"] + and case1["prob"] == case2["prob"] + ) + + # Select the highest probability for case 1 + idx_max = int(np.argmax(case1["prob"])) + strike = float(case1["strikeAn"][idx_max]) + dip = float(case1["dipAn"][idx_max]) + rake = float(case1["rakeAn"][idx_max]) + + if cases_equal: + # Compute the CCLD Simulations for the event + ccld_info = run_ccld_simulation(event_id, event_row, strike, dip, rake, "D") + else: + # Select the highest probability for case 2 + case2_idx_max = int(np.argmax(case2["prob"])) + strike2 = float(case2["strikeAn"][case2_idx_max]) + dip2 = float(case2["dipAn"][case2_idx_max]) + rake2 = float(case2["rakeAn"][case2_idx_max]) + # Compute the CCLD Simulations for the event with both possible planes + ccld_info = run_ccld_simulation( + event_id, event_row, strike, dip, rake, "C", strike2, dip2, rake2 + ) + return ccld_info + + +def get_backup_focal_mechanism( + domain_no_backup: int, domain_focal_df: pd.DataFrame +) -> tuple[float, float, float]: + """ + Retrieves the backup focal mechanism. + + Parameters + ---------- + domain_no_backup : int + The domain number for the backup focal mechanism. + domain_focal_df : pd.DataFrame + The focal mechanism data for the different domains. + + Returns + ------- + tuple + A tuple containing the strike, rake, and dip angles. + """ + if domain_no_backup == 0: + return 220, 45, 90 + domain = domain_focal_df[domain_focal_df.Domain_No == domain_no_backup].iloc[0] + return domain.strike, domain.rake, domain.dip + + def get_nodal_plane_info( event_id: str, event_row: pd.Series, - geonet_cmt_df: pd.DataFrame, - modified_cmt_df: pd.DataFrame, + cmt_df: pd.DataFrame, domain_focal_df: pd.DataFrame, srf_files: dict, + hik_objs: np.ndarray, + puy_objs: np.ndarray, + nz_mech: dict, + slab_faulting_geo: dict, ) -> dict: """ Determine the correct nodal plane for the event @@ -237,14 +320,20 @@ def get_nodal_plane_info( The event id event_row : pd.Series The event row from the earthquake source table - geonet_cmt_df : pd.DataFrame - The Geonet CMT data - modified_cmt_df : pd.DataFrame - The modified CMT data for the correct nodal plane + cmt_df : pd.DataFrame + The Centroid Moment Tensor data for New Zealand events domain_focal_df : pd.DataFrame - The focal mechanism data for the different domains + The focal mechanism data for the different domains as a backup srf_files : dict The srf files for specific events + hik_objs : np.ndarray + The Hikurangi RBF objects for strike and dip interpolation as well as the Hikurangi footprint for Crustal events + puy_objs : np.ndarray + The Puysegur RBF objects for strike and dip interpolation as well as the Puysegur footprint for Crustal events + nz_mech : dict + The domain focal mechanism data for each domain + slab_faulting_geo : dict + The slab faulting geometry data for both Hikurangi and Puysegur Returns ------- @@ -275,6 +364,10 @@ def get_nodal_plane_info( nodal_plane_info = defaultdict(lambda: None) ccld_info = None + # Split the cmt data into reviewed and unreviewed data + reviewed_cmt_data = cmt_df[cmt_df["reviewed"]] + unreviewed_cmt_data = cmt_df[~cmt_df["reviewed"]] + # Check if the event_id is in the srf_files if event_id in srf_files: # Read the srf file to determine the nodal plane information @@ -371,20 +464,19 @@ def get_nodal_plane_info( nodal_plane_info["hyp_dip"] = dip_idx / (ndip - 1) nodal_plane_info["plane_index"] = plane_index - elif event_id in modified_cmt_df.PublicID.values: - # Event is in the modified CMT data + elif event_id in reviewed_cmt_data.PublicID.values: + # Event is in the reviewed CMT data nodal_plane_info["f_type"] = "cmt" - cmt = modified_cmt_df[modified_cmt_df.PublicID == event_id].iloc[0] + cmt = reviewed_cmt_data[reviewed_cmt_data.PublicID == event_id].iloc[0] # Compute the CCLD Simulations for the event ccld_info = run_ccld_simulation( event_id, event_row, cmt.strike1, cmt.dip1, cmt.rake1, "A" ) - elif event_id in geonet_cmt_df.PublicID.values: - # Event is in the Geonet CMT data - # Need to determine the correct plane + elif event_id in unreviewed_cmt_data.PublicID.values: + # Event is in the Geonet CMT data, however it has not been reviewed nodal_plane_info["f_type"] = "cmt_unc" - cmt = geonet_cmt_df[geonet_cmt_df.PublicID == event_id].iloc[0] + cmt = unreviewed_cmt_data[unreviewed_cmt_data.PublicID == event_id].iloc[0] # Compute the CCLD Simulations for the event ccld_info = run_ccld_simulation( @@ -401,11 +493,105 @@ def get_nodal_plane_info( else: # Event is not found in any of the datasets # Use the domain focal - nodal_plane_info["f_type"] = "domain" - strike, rake, dip = get_domain_focal(event_row["domain_no"], domain_focal_df) + hik_strike_rbf, hik_dip_rbf, hik_footprint = hik_objs + puy_strike_rbf, puy_dip_rbf, puy_footprint = puy_objs + domain_no_backup = event_row["domain_no_backup"] + + if event_row["tect_class"] == "Crustal": + # First assume strike-slip to estimate length + length = magnitude_scaling.leonard_magnitude_to_length(event_row.mag, 15) + length_bin = ">45" if length > 45.0 else ">15" + + ccld_info = get_crustal_domain_focal( + event_id, + event_row, + nz_mech, + length_bin, + domain_no_backup, + domain_focal_df, + ) - # Compute the CCLD Simulations for the event - ccld_info = run_ccld_simulation(event_id, event_row, strike, dip, rake, "D") + # Check the new length to see if a different length bin should be used + new_length = ccld_info["length"] + new_length_bin = ">45" if new_length > 45.0 else ">15" + if new_length_bin != length_bin: + # Recompute with the new length bin + ccld_info = get_crustal_domain_focal( + event_id, + event_row, + nz_mech, + new_length_bin, + domain_no_backup, + domain_focal_df, + ) + elif event_row["tect_class"] == "Interface": + lat, lon = event_row["lat"], event_row["lon"] + + rake = None + # Check which subduction zone the event is in + if hik_footprint.contains(Point(lon, lat)): + strike = float(np.squeeze(hik_strike_rbf([[lon, lat]]))) + dip = float(np.squeeze(hik_dip_rbf([[lon, lat]]))) + elif puy_footprint.contains(Point(lon, lat)): + strike = float(np.squeeze(puy_strike_rbf([[lon, lat]]))) + dip = float(np.squeeze(puy_dip_rbf([[lon, lat]]))) + else: + strike, rake, dip = get_backup_focal_mechanism( + domain_no_backup, domain_focal_df + ) + # Check for infinite values + if not np.isfinite(strike) or not np.isfinite(dip): + strike, rake, dip = get_backup_focal_mechanism( + domain_no_backup, domain_focal_df + ) + rake = 90.0 if rake is None else rake + + # Run ccld to get length, width, ztor, dbottom + ccld_info = run_ccld_simulation(event_id, event_row, strike, dip, rake, "D") + + elif event_row["tect_class"] == "Slab": + lat, lon = event_row["lat"], event_row["lon"] + # Check which zone the event is in + if hik_footprint.contains(Point(lon, lat)): + tbl = slab_faulting_geo["hik"] + elif puy_footprint.contains(Point(lon, lat)): + tbl = slab_faulting_geo["puy"] + else: + strike, rake, dip = get_backup_focal_mechanism( + domain_no_backup, domain_focal_df + ) + # Run ccld to get length, width, ztor, dbottom + ccld_info = run_ccld_simulation( + event_id, event_row, strike, dip, rake, "D" + ) + nodal_plane_info.update(ccld_info) + return nodal_plane_info + + # Find the closest point in the table + depth_bins = [int(b) for b in tbl.keys()] + bin = np.array( + tbl[ + str( + depth_bins[ + np.argmin(np.abs(np.array(depth_bins) - event_row["depth"])) + ] + ) + ] + ) + # Select the highest probability (doesn't matter which case as they are the same) + idx_max = int(np.argmax(bin[:, 3])) + strike = float(bin[idx_max, 0]) % 360 + dip = float(bin[idx_max, 1]) + rake = float(bin[idx_max, 2]) + + # Run ccld to get length, width, ztor, dbottom + ccld_info = run_ccld_simulation(event_id, event_row, strike, dip, rake, "D") + + else: + strike, rake, dip = get_backup_focal_mechanism( + domain_no_backup, domain_focal_df + ) + ccld_info = run_ccld_simulation(event_id, event_row, strike, dip, rake, "D") if ccld_info is not None: # Update the nodal plane info with the ccld info @@ -418,11 +604,14 @@ def compute_distances_for_event( event_row: pd.Series, im_df: pd.DataFrame, station_df: pd.DataFrame, - modified_cmt_df: pd.DataFrame, - geonet_cmt_df: pd.DataFrame, + cmt_df: pd.DataFrame, domain_focal_df: pd.DataFrame, taupo_polygon: Polygon, srf_files: dict, + hik_objs: np.ndarray, + puy_objs: np.ndarray, + nz_mech: dict, + slab_faulting_geo: dict, ) -> tuple[Optional[pd.DataFrame], Optional[pd.DataFrame]]: """ Compute the distances for a given event @@ -435,16 +624,22 @@ def compute_distances_for_event( The full IM data from the catalogue station_df : pd.DataFrame The full station data - modified_cmt_df : pd.DataFrame - The modified CMT data for the correct nodal plane - geonet_cmt_df : pd.DataFrame - The Geonet CMT data + cmt_df : pd.DataFrame + The Centroid Moment Tensor data domain_focal_df : pd.DataFrame The focal mechanism data for the different domains taupo_polygon : Polygon The Taupo VZ polygon srf_files : dict The srf files for specific events + hik_objs : np.ndarray + The Hikurangi RBF objects for strike and dip interpolation as well as the Hikurangi footprint for Crustal events + puy_objs : np.ndarray + The Puysegur RBF objects for strike and dip interpolation as well as the Puysegur footprint for Crustal events + nz_mech : dict + The domain focal mechanism data for each domain + slab_faulting_geo : dict + The slab faulting geometry data for both Hikurangi and Puysegur Returns ------- @@ -473,10 +668,13 @@ def compute_distances_for_event( nodal_plane_info = get_nodal_plane_info( event_id, event_row, - geonet_cmt_df, - modified_cmt_df, + cmt_df, domain_focal_df, srf_files, + hik_objs, + puy_objs, + nz_mech, + slab_faulting_geo, ) ( strike, @@ -992,14 +1190,8 @@ def calc_distances(main_dir: Path, n_procs: int = 1): # Get the flatfile directory flatfile_dir = file_structure.get_flatfile_dir(main_dir) - # Get the modified CMT data - modified_cmt_df = pd.read_csv( - NZGMDB_DATA.fetch("GeoNet_CMT_solutions_20201129_PreferredNodalPlane_v1.csv") - ) - - # Get the regular CMT data - config = cfg.Config() - geonet_cmt_df = pd.read_csv(config.get_value("cmt_url"), dtype={"PublicID": str}) + # Get the CMT solutions data + cmt_df = cmt_data.get_cmt_data() # Load the eq source table event_df = pd.read_csv( @@ -1007,6 +1199,31 @@ def calc_distances(main_dir: Path, n_procs: int = 1): dtype={"evid": str}, ) + NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.shp") + NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.dbf") + NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.shx") + with fiona.open( + Path(NZGMDB_DATA.abspath) / "TectonicDomains_Feb2021_8_NZTM.shp" + ) as collection: + shapes = list(collection) + + fallback_domain_values = tect_domain.find_domain_from_shapes( + event_df.loc[:, ["lat", "lon"]], + shapes, + ) + + # Add the fallback domain values to the event df + event_df["domain_no_backup"] = fallback_domain_values.loc[:, "domain_no"].values + + hik_objs = np.load(NZGMDB_DATA.fetch("hik_focmec.npy"), allow_pickle=True)[()] + puy_objs = np.load(NZGMDB_DATA.fetch("puy_focmec.npy"), allow_pickle=True)[()] + + with open(NZGMDB_DATA.fetch("slab-faulting2.json"), "r", encoding="utf-8") as f: + slab_faulting_geo = json.load(f) + + with open(NZGMDB_DATA.fetch("nzfocmecmod.json"), "r", encoding="utf-8") as f: + nz_mech = json.load(f) + # Get the focal domain domain_focal_df = pd.read_csv( NZGMDB_DATA.fetch("focal_mech_tectonic_domain_v1.csv"), @@ -1019,6 +1236,7 @@ def calc_distances(main_dir: Path, n_procs: int = 1): tvz_points = tect_domain_points[tect_domain_points.domain_no == 4][ ["latitude", "longitude"] ] + config = cfg.Config() ll_num = config.get_value("ll_num") nztm_num = config.get_value("nztm_num") wgs2nztm = Transformer.from_crs(ll_num, nztm_num) @@ -1074,11 +1292,14 @@ def calc_distances(main_dir: Path, n_procs: int = 1): compute_distances_for_event, im_df=im_df, station_df=station_df, - modified_cmt_df=modified_cmt_df, - geonet_cmt_df=geonet_cmt_df, + cmt_df=cmt_df, domain_focal_df=domain_focal_df, taupo_polygon=taupo_polygon, srf_files=srf_files, + hik_objs=hik_objs, + puy_objs=puy_objs, + nz_mech=nz_mech, + slab_faulting_geo=slab_faulting_geo, ), [row for idx, row in event_df.iterrows()], ) @@ -1092,6 +1313,9 @@ def calc_distances(main_dir: Path, n_procs: int = 1): # Merge the extra event data with the event data event_df = pd.merge(event_df, extra_event_data, on="evid", how="right") + # Remove the domain_no_backup column + event_df = event_df.drop(columns=["domain_no_backup"]) + # Save the results propagation_data.to_csv( flatfile_dir / file_structure.PreFlatfileNames.PROPAGATION_TABLE, index=False diff --git a/nzgmdb/data_retrieval/sites.py b/nzgmdb/data_retrieval/sites.py index dc03963..113b966 100644 --- a/nzgmdb/data_retrieval/sites.py +++ b/nzgmdb/data_retrieval/sites.py @@ -86,15 +86,16 @@ def create_site_table_response() -> pd.DataFrame: on="sta", how="left", ) + # Fill Elevation NaN values from sta_df + merged_df["elev"] = merged_df["Elevation"].combine_first(merged_df["elev"]) # Specify the required files for fiona - NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.shp") - NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.dbf") - NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.shx") + NZGMDB_DATA.fetch("nt_domains_kiran.shp") + NZGMDB_DATA.fetch("nt_domains_kiran.dbf") + NZGMDB_DATA.fetch("nt_domains_kiran.shx") # Shape file for determining neotectonic domain - shapes = list( - fiona.open(Path(NZGMDB_DATA.abspath) / "TectonicDomains_Feb2021_8_NZTM.shp") - ) + with fiona.open(Path(NZGMDB_DATA.abspath) / "nt_domains_kiran.shp") as collection: + shapes = list(collection) tect_merged_df = tect_domain.find_domain_from_shapes(merged_df, shapes) # Rename the domain column diff --git a/nzgmdb/data_retrieval/tect_domain.py b/nzgmdb/data_retrieval/tect_domain.py index fa8b13d..1c217b6 100644 --- a/nzgmdb/data_retrieval/tect_domain.py +++ b/nzgmdb/data_retrieval/tect_domain.py @@ -11,6 +11,7 @@ import pandas as pd from pyproj import Transformer +from cmt_solutions import cmt_data from nzgmdb.management import config as cfg from nzgmdb.management.data_registry import NZGMDB_DATA from qcore import geo, point_in_polygon @@ -473,18 +474,16 @@ def add_tect_domain( The number of processes to use """ # Specify the required files for fiona - NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.shp") - NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.dbf") - NZGMDB_DATA.fetch("TectonicDomains_Feb2021_8_NZTM.shx") + NZGMDB_DATA.fetch("nt_domains_kiran.shp") + NZGMDB_DATA.fetch("nt_domains_kiran.dbf") + NZGMDB_DATA.fetch("nt_domains_kiran.shx") # Shape file for determining neotectonic domain - shapes = list( - fiona.open(Path(NZGMDB_DATA.abspath) / "TectonicDomains_Feb2021_8_NZTM.shp") - ) + with fiona.open(Path(NZGMDB_DATA.abspath) / "nt_domains_kiran.shp") as collection: + shapes = list(collection) - # Read the geonet CMT and event data - config = cfg.Config() - geonet_cmt_df = pd.read_csv(config.get_value("cmt_url"), dtype={"evid": str}) + # Read the CMT and event data + cmt_df = cmt_data.get_cmt_data() event_df = pd.read_csv(event_csv_ffp, dtype={"evid": str}) # Merge in the Reyners Catalogue data for relocations @@ -493,8 +492,8 @@ def add_tect_domain( ) event_df = merge_reyners_catalogue_on_events(event_df, reyners_catalogue_df) - # Replace the geonet CMT data on the event data (override the reyners relocations) - event_df = replace_cmt_data_on_event(event_df, geonet_cmt_df) + # Replace the CMT data on the event data (override the reyners relocations) + event_df = replace_cmt_data_on_event(event_df, cmt_df) # Merge the NZSMDB data event_df = merge_NZSMDB_flatfile_on_events(event_df) diff --git a/nzgmdb/management/data_registry.py b/nzgmdb/management/data_registry.py index 9e9374e..56703a4 100644 --- a/nzgmdb/management/data_registry.py +++ b/nzgmdb/management/data_registry.py @@ -24,6 +24,13 @@ "TectonicDomains_Feb2021_8_NZTM.shx": "sha256:9ca42e270e5604f4740b3d99ea6a72daa75eaae0e6c6bda3c4e5c86a72369403", "TectonicDomains_Feb2021_8_NZTM.dbf": "sha256:534c644c5d4d6a08106752e2fe33894bc820645efa017e0137ef5b8d44e8200c", "tectonic_domain_polygon_points.csv": "sha256:a54c30a4e68bb078c6ce9d99bd30f902a9627d57fd39392b403064de61b1f1a4", + "nt_domains_kiran.dbf": "sha256:97e86ca9b8ea50ea0d8eec32025513a1ec0026a602a7bae2777999f6ba9f7ac4", + "nt_domains_kiran.shp": "sha256:edfa412b9395258a1ca0e9de61070108567b427ca59d309e09c531b462ccd490", + "nt_domains_kiran.shx": "sha256:721a99e549961dcc22a342750f8bd9d62946a73dfbf6a1dc4e9ba8a11d578ac0", + "hik_focmec.npy": "sha256:3a13e4c31b99b80da9976e2cb38b5f0f69cccf860140bea0598f32f9ed075767", + "puy_focmec.npy": "sha256:6493cba83722cdbea24ceafadd192d8b7a9e5160423e0b5fef3357748cf3e66f", + "nzfocmecmod.json": "sha256:1612c364b0ddf6251bb4044c3cde1a36b36e672525465e71cf0e4007bc97b446", + "slab-faulting2.json": "sha256:debe5e1a082716dd9d3deaad79fc859de7aeaaf4a41e40632f26a6593b020c83", "brendon_set.csv": "sha256:a62d8c69f7f7bc6ccf0318a113a152befb35cfb21c56ab4806c58f24f221ce2e", "lee_large.csv": "sha256:e4077746552937cd0a48be723b0188d33d4a73c2364716f085c0cd55ab78ec58", "lee_small.csv": "sha256:b274bc99fcacb2747623f754b92d594c3b6729cfcfd0d046365ad3c6fba706c1", @@ -42,6 +49,13 @@ "TectonicDomains_Feb2021_8_NZTM.dbf": "https://www.dropbox.com/scl/fi/o2z83zbg3yvxq8yqiwkqk/TectonicDomains_Feb2021_8_NZTM.dbf?rlkey=o4imoxtoj8psa3ndvryqrx2qk&st=qvcwp7hj&dl=1", "TectonicDomains_Feb2021_8_NZTM.shp": "https://www.dropbox.com/scl/fi/69qwsa48gvtzx056vqirs/TectonicDomains_Feb2021_8_NZTM.shp?rlkey=qjkzra3xu0z9xjkb3dcji4max&st=kfsh8gaw&dl=1", "TectonicDomains_Feb2021_8_NZTM.shx": "https://www.dropbox.com/scl/fi/3qyw5v3cw0dzi41d6wnp8/TectonicDomains_Feb2021_8_NZTM.shx?rlkey=ducg84xqhi2ua6ku2qhbiutrg&st=flmk11e0&dl=1", + "nt_domains_kiran.dbf": "https://www.dropbox.com/scl/fi/eexo36xpy8zw4m4kqua5i/nt_domains_kiran.dbf?rlkey=79buc3tge8glyerhcv1mvhur0&st=rjozdfq5&dl=1", + "nt_domains_kiran.shp": "https://www.dropbox.com/scl/fi/szdddwmefe19wcpgjpmcr/nt_domains_kiran.shp?rlkey=4vfrmtvbh17xezu9bovw94y7m&st=ohbz9bcw&dl=1", + "nt_domains_kiran.shx": "https://www.dropbox.com/scl/fi/17vh8bzrr0ahbxx3p49d4/nt_domains_kiran.shx?rlkey=mf5f5ibq0tiju0826anx7i18o&st=8obiawck&dl=1", + "hik_focmec.npy": "https://www.dropbox.com/scl/fi/1fn9yy68kacot6zsrhhan/hik_focmec.npy?rlkey=sgr1p2jlr4r4fbxegszzjfbep&st=12uqjyid&dl=1", + "puy_focmec.npy": "https://www.dropbox.com/scl/fi/9d0ycs5387lmdw3izkobv/puy_focmec.npy?rlkey=ognpq4lxr3bxxhw5iify8r9lt&st=zbrd62da&dl=1", + "nzfocmecmod.json": "https://www.dropbox.com/scl/fi/5zhftxo8uyfzzr6fj5kiz/nzfocmecmod.json?rlkey=a6hs5kgcqeagk4mosrg7akz13&st=o8xxr9v0&dl=1", + "slab-faulting2.json": "https://www.dropbox.com/scl/fi/u6o5jiuz9dwh1bl666nxj/slab-faulting2.json?rlkey=q7gzx6j8hmo61ic13co28vftj&st=hjnu7akb&dl=1", "2013p543824.srf": "https://www.dropbox.com/scl/fi/013i860bt3os1k80leai5/2013p543824.srf?rlkey=ww3clc60gud4sycwfzmgopvix&st=jpu99tse&dl=1", "2013p613797.srf": "https://www.dropbox.com/scl/fi/7wx7glahf8cufo1g7umzc/2013p613797.srf?rlkey=1x9e5yb2nkf5468rs0bogm8ev&st=dut8d2uw&dl=1", "2016p661332.srf": "https://www.dropbox.com/scl/fi/pp7s6iqilrdgbrs00wi55/2016p661332.srf?rlkey=bhgcddk3bcdpxoycorlhp2in6&st=b8o2bk4w&dl=1", diff --git a/pyproject.toml b/pyproject.toml index 5b7fbd9..ffe0199 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ ignore = [ convention = "numpy" [tool.ruff.lint.isort] -known-first-party = ["qcore", "oq_wrapper", "IM", "velocity_modelling", "source_modelling"] +known-first-party = ["qcore", "oq_wrapper", "IM", "velocity_modelling", "source_modelling", "cmt_solutions"] [tool.ruff.lint.per-file-ignores] # Ignore no docstring in __init__.py diff --git a/requirements.txt b/requirements.txt index 35add91..e987d2c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,3 +19,4 @@ source_modelling>=2025.12.1, openquake.engine==3.23.2, im-calculation>=2025.12.3, velocity-modelling>=2025.12.1 +cmt-solutions @ git+https://github.com/ucgmsim/cmt_solutions.git \ No newline at end of file diff --git a/wiki/Add-Tectonic-Domain.md b/wiki/Add-Tectonic-Domain.md index a1634c7..3d6c0c0 100644 --- a/wiki/Add-Tectonic-Domain.md +++ b/wiki/Add-Tectonic-Domain.md @@ -46,11 +46,12 @@ Relocations are applied from the Reyners Catalogue to improve location accuracy: - Sets `reloc` field to "reyners" for successfully relocated events ### 🔹 2. Centroid Moment Tensor (CMT) Solutions -GeoNet CMT location solutions are applied, which override Reyners relocations when overlapping: +GeoNet and John Townend CMT location solutions are applied, which override Reyners relocations when overlapping: -**GeoNet CMT Solutions** +**CMT Solutions** -- Applies Centroid Moment Tensor (CMT) solutions from GeoNet +- Applies Centroid Moment Tensor (CMT) solutions from GeoNet and John Townend study +- Overrides previous relocations (including Reyners) when CMT solutions are available - Updates source parameters: mag (Mw), lat, lon, depth - Sets metadata fields: mag_type="Mw", mag_method="CMT", loc_type="CMT", loc_grid="CMT", reloc="no" @@ -128,7 +129,6 @@ The enhanced earthquake source table contains all original fields plus: The step uses several configuration parameters from `config.yaml`: - **nzsmdb_url**: URL for NZ Strong Motion Database flatfile -- **cmt_url**: URL for GeoNet CMT solutions - **ll_num**: WGS84 coordinate system identifier - **nztm_num**: NZTM coordinate system identifier diff --git a/wiki/Calculate-Distances.md b/wiki/Calculate-Distances.md index 200dd6b..7dfd252 100644 --- a/wiki/Calculate-Distances.md +++ b/wiki/Calculate-Distances.md @@ -190,12 +190,12 @@ The system determines the correct nodal plane through the following hierarchy: - Extract nodal plane parameters and SRF points - Calculate weighted average of strike, dip, rake based on plane areas -2. **Check Modified CMT Solutions** (Custom review for most likely nodal plane): +2. **Check Reviewed CMT Solutions** (Custom review for most likely nodal plane): - Use predetermined preferred nodal plane - Extract strike, dip, rake values - Apply CCLD Method A -3. **Check Standard CMT Solutions**: Search GeoNet CMT catalogue: +3. **Check Standard CMT Solutions**: Search CMT catalogue: - Apply CCLD Method C with both nodal planes 4. **Use Domain Default**: For events without CMT solutions: @@ -244,9 +244,6 @@ Key parameters from `config.yaml` that control distance calculations: ### 🔹 Fault Discretisation - `points_per_km`: Resolution for SRF point generation (default: typically 2-4 points/km) -### 🔹 External Data Sources -- `cmt_url`: URL for GeoNet CMT solutions catalogue - --- ## 📦 Output