diff --git a/openquake/commands/consequences.py b/openquake/commands/consequences.py new file mode 100644 index 000000000000..b63ce6b8e29b --- /dev/null +++ b/openquake/commands/consequences.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# vim: tabstop=4 shiftwidth=4 softtabstop=4 +# +# Copyright (C) 2017-2023 GEM Foundation +# +# OpenQuake is free software: you can redistribute it and/or modify it +# under the terms of the GNU Affero General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# OpenQuake is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with OpenQuake. If not, see . +import os +import csv +import numpy as np +from openquake.baselib import performance +from openquake.commonlib import datastore +import pandas as pd + +CD = os.path.join(os.path.dirname(__file__), os.pardir, 'risklib', 'data') +square_footage_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_SquareFootage.csv') +collapse_rate_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_CollapseRates.csv') +interruption_time_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_InterruptionTimeMultipliers.csv') +casualty_rate_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_IndoorCasualtyRates_%s.csv') +repair_time_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_BuildingRepairTime.csv') +recovery_time_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_BuildingRecoveryTime.csv') +debris_unitweight_bwo_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_Debris_UnitWeight_BWO.csv') +debris_unitweight_rcs_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_Debris_UnitWeight_RCS.csv') +debris_bwo_structural_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_Debris_BWO_Structural.csv') +debris_bwo_nonstructural_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_Debris_BWO_Nonstructural.csv') +debris_rcs_structural_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_Debris_RCS_Structural.csv') +debris_rcs_nonstructural_file = os.path.join( + CD, 'Hazus_Consequence_Parameters_Debris_RCS_Nonstructural.csv') + + +def read_square_footage(square_footage_file): + square_footage_df = pd.read_csv(square_footage_file, index_col=0) + return square_footage_df + + +# NOTE: unused (if needed, we must change it to read from csv) +# def read_repair_ratio_str(xlsx): +# repair_ratio_str_df = pd.read_excel( +# xlsx, sheet_name="Structural Repair Ratios", skiprows=2, index_col=0) +# repair_ratio_str_df.index.name = "Occupancy" +# repair_ratio_str_df.rename_axis( +# "Structural Damage State", axis="columns", inplace=True) +# return repair_ratio_str_df/100 + + +# def read_repair_ratio_nsa(xlsx): +# repair_ratio_nsa_df = pd.read_excel( +# xlsx, sheet_name="NonstrAccel Repair Ratios", +# skiprows=2, index_col=0) +# repair_ratio_nsa_df.index.name = "Occupancy" +# repair_ratio_nsa_df.rename_axis( +# "Acceleration Sensitive Non-structural Damage State", +# axis="columns", inplace=True) +# return repair_ratio_nsa_df/100 + + +# def read_repair_ratio_nsd(xlsx): +# repair_ratio_nsd_df = pd.read_excel( +# xlsx, sheet_name="NonstrDrift Repair Ratios", +# skiprows=2, index_col=0) +# repair_ratio_nsd_df.index.name = "Occupancy" +# repair_ratio_nsd_df.rename_axis( +# "Drift Sensitive Non-structural Damage State", +# axis="columns", inplace=True) +# return repair_ratio_nsd_df/100 + + +# def read_repair_ratio_con(xlsx): +# repair_ratio_con_df = pd.read_excel( +# xlsx, sheet_name="Contents Damage Ratios", skiprows=2, index_col=0) +# repair_ratio_con_df.index.name = "Occupancy" +# repair_ratio_con_df.rename_axis( +# "Acceleration Sensitive Non-structural Damage State", +# axis="columns", inplace=True) +# return repair_ratio_con_df/100 + + +def read_collapse_rate(collapse_rate_file): + collapse_rate_df = pd.read_csv( + collapse_rate_file, index_col=0) + return collapse_rate_df/100 + + +def read_casualty_rate_in(casualty_rate_file): + casualty_rate_in_df = pd.read_csv( + casualty_rate_file, index_col=0) + return casualty_rate_in_df/100 + + +# NOTE: unused (if needed, we must change it to read from csv) +# def read_casualty_rate_out(xlsx): +# casualty_rate_out_df = pd.read_excel( +# xlsx, sheet_name="Outdoor Casualty Rates", +# skiprows=1, index_col=0, header=[0, 1]) +# casualty_rate_out_df.index.name = "Building Type" +# casualty_rate_out_df.columns.names = ["Damage State", "Severity Level"] +# return casualty_rate_out_df/100 + + +def read_debris(debris_file): + debris_df = pd.read_csv( + debris_file, index_col=0) + debris_df.index.name = "taxonomy" + return debris_df + + +def read_repair_time(repair_time_file): + repair_time_df = pd.read_csv( + repair_time_file, index_col=0) + repair_time_df.index.name = "Occupancy" + repair_time_df.rename_axis( + "Structural Damage State", axis="columns", inplace=True) + return repair_time_df + + +def read_recovery_time(recovery_time_file): + recovery_time_df = pd.read_csv( + recovery_time_file, index_col=0) + recovery_time_df.index.name = "Occupancy" + recovery_time_df.rename_axis( + "Structural Damage State", axis="columns", inplace=True) + return recovery_time_df + + +def read_interruption_time(interruption_time_file): + interruption_time_df = pd.read_csv( + interruption_time_file, index_col=0) + interruption_time_df.index.name = "Occupancy" + interruption_time_df.rename_axis( + "Structural Damage State", axis="columns", inplace=True) + return interruption_time_df + + +read_params = { + "Square Footage": read_square_footage, + # "Structural Repair Ratios": read_repair_ratio_str, # unused + # "NonstrAccel Repair Ratios": read_repair_ratio_nsa, # unused + # "NonstrDrift Repair Ratios": read_repair_ratio_nsd, # unused + # "Contents Damage Ratios": read_repair_ratio_con, # unused + "Collapse Rates": read_collapse_rate, + "Indoor Casualty Rates": read_casualty_rate_in, + # "Outdoor Casualty Rates": read_casualty_rate_out, # unused + "Debris": read_debris, + "Building Repair Time": read_repair_time, + "Building Recovery Time": read_recovery_time, + "Interruption Time Multipliers": read_interruption_time, +} + + +def calculate_consequences(calc_id, output_dir): + calc_id = datastore.get_last_calc_id() if calc_id == -1 else int(calc_id) + dstore = datastore.read(calc_id) + lt = 0 # structural damage + stat = 0 # damage state mean values + num_rlzs = len(dstore["weights"]) + assetcol = dstore['assetcol'] + taxonomies = assetcol.tagcol.taxonomy + + # Read the asset damage table from the calculation datastore + calculation_mode = dstore['oqparam'].calculation_mode + if calculation_mode == 'scenario_damage': + damages = dstore['damages-rlzs'] + elif calculation_mode == 'classical_damage': + damages = dstore['damages-stats'] + else: + print("Consequence calculations not supported for ", calculation_mode) + return + + # Read the various consequences tables from the spreadsheet + # square_footage_df = read_params["Square Footage"](xlsx) + square_footage_df = read_params["Square Footage"](square_footage_file) + + # NOTE: unused (if needed, we must change it to read from csv) + # repair_ratio_str_df = read_params["Structural Repair Ratios"](xlsx) + # repair_ratio_nsa_df = read_params["NonstrAccel Repair Ratios"](xlsx) + # repair_ratio_nsd_df = read_params["NonstrDrift Repair Ratios"](xlsx) + # repair_ratio_con_df = read_params["Contents Damage Ratios"](xlsx) + + collapse_rate_df = read_params["Collapse Rates"](collapse_rate_file) + + severity_levels = ["Severity1", "Severity2", "Severity3", "Severity4"] + casualty_rate_in = {} + for severity_level in severity_levels: + casualty_rate_in_df = read_params["Indoor Casualty Rates"]( + casualty_rate_file % severity_level) + casualty_rate_in[severity_level] = casualty_rate_in_df + + # NOTE: unused (if needed, we must change it to read from csv) + # casualty_rate_out_df = read_params["Outdoor Casualty Rates"](xlsx) + + repair_time_df = read_params["Building Repair Time"](repair_time_file) + + recovery_time_df = read_params["Building Recovery Time"]( + recovery_time_file) + + interruption_time_df = read_params["Interruption Time Multipliers"]( + interruption_time_file) + + debris_brick_wood_pct_structural_df = read_params["Debris"]( + debris_bwo_structural_file) + debris_brick_wood_pct_nonstructural_df = read_params["Debris"]( + debris_bwo_nonstructural_file) + debris_concrete_steel_pct_structural_df = read_params["Debris"]( + debris_rcs_structural_file) + debris_concrete_steel_pct_nonstructural_df = read_params["Debris"]( + debris_rcs_nonstructural_file) + + unit_weight_bwo_df = read_params["Debris"]( + debris_unitweight_bwo_file) + unit_weight_rcs_df = read_params["Debris"]( + debris_unitweight_rcs_file) + + # Initialize lists / dicts to store the asset level casualty estimates + casualties_day = { + "Severity 1": 0, "Severity 2": 0, "Severity 3": 0, "Severity 4": 0} + casualties_night = { + "Severity 1": 0, "Severity 2": 0, "Severity 3": 0, "Severity 4": 0} + casualties_transit = { + "Severity 1": 0, "Severity 2": 0, "Severity 3": 0, "Severity 4": 0} + + for rlzi in range(num_rlzs): + print("Processing realization {} of {}".format(rlzi+1, num_rlzs)) + filename = os.path.join( + output_dir, f"consequences-rlz-{rlzi:03}_{calc_id}.csv") + with open(filename, 'w') as f: + writer = csv.writer(f) + # Write the header row to the csv file + writer.writerow( + ["asset_ref", "number_of_buildings", + "value_structural", "value_nonstructural", "value_contents", + "occupants_day", "occupants_night", "occupants_transit", + "collapse_ratio", "mean_repair_time", + "mean_recovery_time", "mean_interruption_time", + "casualties_day_severity_1", "casualties_day_severity_2", + "casualties_day_severity_3", "casualties_day_severity_4", + "casualties_night_severity_1", "casualties_night_severity_2", + "casualties_night_severity_3", "casualties_night_severity_4", + "casualties_transit_severity_1", + "casualties_transit_severity_2", + "casualties_transit_severity_3", + "casualties_transit_severity_4", + "sc_Displ3", "sc_Displ30", "sc_Displ90", + "sc_Displ180", "sc_Displ360", + "sc_BusDispl30", "sc_BusDispl90", + "sc_BusDispl180", "sc_BusDispl360", + "debris_brick_wood_tons", "debris_concrete_steel_tons"]) + + for asset in assetcol: + asset_ref = asset['id'].decode() + asset_occ, asset_typ, code_level = taxonomies[ + asset['taxonomy']].split('-') + if calculation_mode == 'scenario_damage': + # Note: engine versions <3.10 require an additional + # 'stat' variable as the previous output includes mean and + # stddev fields + # asset_damages = damages[asset['ordinal'], rlzi, lt, stat] + asset_damages = damages[asset['ordinal'], rlzi, lt] + elif calculation_mode == 'classical_damage': + asset_damages = damages[asset['ordinal'], stat, rlzi] + asset_damages = [max(0, d) for d in asset_damages] + + # discarding 'no damage' + asset_damages = asset_damages[1:] + + asset_damage_ratios = [d/asset['value-number'] + for d in asset_damages] + + # Repair and recovery time estimates + # Hazus tables 15.9, 15.10, 15.11 + repair_time = np.dot( + asset_damage_ratios, repair_time_df.loc[asset_occ]) + recovery_time = np.dot( + asset_damage_ratios, recovery_time_df.loc[asset_occ]) + interruption_time = np.dot( + asset_damage_ratios, + recovery_time_df.loc[asset_occ] * interruption_time_df.loc[ + asset_occ]) + + # Debris weight estimates + # Hazus tables 12.1, 12.2, 12.3 + unit_weight_bwo = unit_weight_bwo_df.loc[asset_typ] + unit_weight_rcs = unit_weight_rcs_df.loc[asset_typ] + weight_brick_wood = ( + unit_weight_bwo + * square_footage_df.loc[asset_occ].values[0] / 1000 + * asset['value-number']) + weight_concrete_steel = ( + unit_weight_rcs + * square_footage_df.loc[asset_occ].values[0] / 1000 + * asset['value-number']) + + debris_brick_wood_pct_structural = \ + debris_brick_wood_pct_structural_df.loc[asset_typ] + debris_brick_wood_pct_nonstructural = \ + debris_brick_wood_pct_nonstructural_df.loc[asset_typ] + debris_concrete_steel_pct_structural = \ + debris_concrete_steel_pct_structural_df.loc[asset_typ] + debris_concrete_steel_pct_nonstructural = \ + debris_concrete_steel_pct_nonstructural_df.loc[asset_typ] + + debris_brick_wood_str = weight_brick_wood[ + "structural"] * np.dot( + asset_damage_ratios, + debris_brick_wood_pct_structural / 100) + debris_brick_wood_nst = weight_brick_wood[ + "nonstructural"] * np.dot( + asset_damage_ratios, + debris_brick_wood_pct_nonstructural / 100) + debris_concrete_steel_str = weight_concrete_steel[ + "structural"] * np.dot( + asset_damage_ratios, + debris_concrete_steel_pct_structural / 100) + debris_concrete_steel_nst = weight_concrete_steel[ + "nonstructural"] * np.dot( + asset_damage_ratios, + debris_concrete_steel_pct_nonstructural / 100) + + debris_brick_wood = ( + debris_brick_wood_str + debris_brick_wood_nst) + debris_concrete_steel = ( + debris_concrete_steel_str + debris_concrete_steel_nst) + + # Estimate number of displaced occupants based on heuristics + # provided by Murray + sc_Displ3 = ( + asset["occupants_night"] + if recovery_time > 3 and recovery_time < 30 else 0) + sc_Displ30 = ( + asset["occupants_night"] if recovery_time > 30 else 0) + sc_Displ90 = ( + asset["occupants_night"] if recovery_time > 90 else 0) + sc_Displ180 = ( + asset["occupants_night"] if recovery_time > 180 else 0) + sc_Displ360 = ( + asset["occupants_night"] if recovery_time > 360 else 0) + sc_BusDispl30 = ( + asset["occupants_day"] if recovery_time > 30 else 0) + sc_BusDispl90 = ( + asset["occupants_day"] if recovery_time > 90 else 0) + sc_BusDispl180 = ( + asset["occupants_day"] if recovery_time > 180 else 0) + sc_BusDispl360 = ( + asset["occupants_day"] if recovery_time > 360 else 0) + + # Split complete damage state into collapse and non-collapse + # This distinction is then used for the casualty estimates + # Collapse rates given complete damage are from Hazus table + # 13.8 + collapse_rate = collapse_rate_df.loc[asset_typ].values[0] + dmg = { + "Slight Damage": asset_damage_ratios[0], + "Moderate Damage": asset_damage_ratios[1], + "Extensive Damage": asset_damage_ratios[2], + "Complete Damage (No Collapse)": ( + asset_damage_ratios[3] * (1 - collapse_rate)), + "Complete Damage (With Collapse)": ( + asset_damage_ratios[3] * collapse_rate) + } + collapse_ratio = dmg["Complete Damage (With Collapse)"] + collapse_ratio_str = "{:.2e}".format( + collapse_ratio) if collapse_ratio else '0' + + # Estimate casualties (day/night/transit) at four + # severity levels + # Hazus tables 13.3, 13.4, 13.5, 13.6, 13.7 + for severity_level in severity_levels: + casualty_ratio = np.dot( + list(dmg.values()), + casualty_rate_in[severity_level].loc[asset_typ]) + casualties_day[severity_level] = ( + casualty_ratio * asset["occupants_day"]) + casualties_night[severity_level] = ( + casualty_ratio * asset["occupants_night"]) + casualties_transit[severity_level] = ( + casualty_ratio * asset["occupants_transit"]) + + # Write all consequence estimates for this asset to the csv + # file + writer.writerow( + [asset_ref, + "{0:,.1f}".format(asset['value-number']), + "{0:,.1f}".format(asset["value-structural"]), + "{0:,.1f}".format(asset["value-nonstructural"]), + "{0:,.1f}".format(asset["value-contents"]), + "{0:,.1f}".format(asset["occupants_day"]), + "{0:,.1f}".format(asset["occupants_night"]), + "{0:,.1f}".format(asset["occupants_transit"]), + collapse_ratio_str, + "{0:,.1f}".format(repair_time), + "{0:,.1f}".format(recovery_time), + "{0:,.1f}".format(interruption_time), + "{0:,.2f}".format(casualties_day["Severity 1"]), + "{0:,.2f}".format(casualties_day["Severity 2"]), + "{0:,.2f}".format(casualties_day["Severity 3"]), + "{0:,.2f}".format(casualties_day["Severity 4"]), + "{0:,.2f}".format(casualties_night["Severity 1"]), + "{0:,.2f}".format(casualties_night["Severity 2"]), + "{0:,.2f}".format(casualties_night["Severity 3"]), + "{0:,.2f}".format(casualties_night["Severity 4"]), + "{0:,.2f}".format(casualties_transit["Severity 1"]), + "{0:,.2f}".format(casualties_transit["Severity 2"]), + "{0:,.2f}".format(casualties_transit["Severity 3"]), + "{0:,.2f}".format(casualties_transit["Severity 4"]), + "{0:,.1f}".format(sc_Displ3), + "{0:,.1f}".format(sc_Displ30), + "{0:,.1f}".format(sc_Displ90), + "{0:,.1f}".format(sc_Displ180), + "{0:,.1f}".format(sc_Displ360), + "{0:,.1f}".format(sc_BusDispl30), + "{0:,.1f}".format(sc_BusDispl90), + "{0:,.1f}".format(sc_BusDispl180), + "{0:,.1f}".format(sc_BusDispl360), + "{0:,.1f}".format(debris_brick_wood), + "{0:,.1f}".format(debris_concrete_steel), + ]) + print(f'Saved {filename}') + + +def main(calc_id: int = -1, + output_dir='.'): + """ + TODO + """ + with performance.Monitor('consequences', measuremem=True) as mon: + calculate_consequences(calc_id, output_dir) + if mon.duration > 1: + print(mon) + + +main.calc_id = 'number of the calculation' +main.output_dir = 'directory where to write the consequence data' diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters.xlsx b/openquake/risklib/data/Hazus_Consequence_Parameters.xlsx new file mode 100644 index 000000000000..edca8332d01b Binary files /dev/null and b/openquake/risklib/data/Hazus_Consequence_Parameters.xlsx differ diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_BuildingRecoveryTime.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_BuildingRecoveryTime.csv new file mode 100644 index 000000000000..35928c6b0bd8 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_BuildingRecoveryTime.csv @@ -0,0 +1,34 @@ +occupancy,slight,moderate,extreme,complete +RES1,5,120,360,720 +RES2,5,20,120,240 +RES3A,10,120,480,960 +RES3B,10,120,480,960 +RES3C,10,120,480,960 +RES3D,10,120,480,960 +RES3E,10,120,480,960 +RES3F,10,120,480,960 +RES4,10,90,360,480 +RES5,10,90,360,480 +RES6,10,120,480,960 +COM1,10,90,270,360 +COM2,10,90,270,360 +COM3,10,90,270,360 +COM4,20,90,360,480 +COM5,20,90,180,360 +COM6,20,135,540,720 +COM7,20,135,270,540 +COM8,20,90,180,360 +COM9,20,90,180,360 +COM10,5,60,180,360 +IND1,10,90,240,360 +IND2,10,90,240,360 +IND3,10,90,240,360 +IND4,10,90,240,360 +IND5,20,135,360,540 +IND6,10,60,160,320 +AGR1,2,20,60,120 +REL1,5,120,480,960 +GOV1,10,90,360,480 +GOV2,10,60,270,360 +EDU1,10,90,360,480 +EDU2,10,120,480,960 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_BuildingRepairTime.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_BuildingRepairTime.csv new file mode 100644 index 000000000000..c4cddc559a57 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_BuildingRepairTime.csv @@ -0,0 +1,34 @@ +occupancy,slight,moderate,extreme,complete +RES1,2,30,90,180 +RES2,2,10,30,60 +RES3A,5,30,120,240 +RES3B,5,30,120,240 +RES3C,5,30,120,240 +RES3D,5,30,120,240 +RES3E,5,30,120,240 +RES3F,5,30,120,240 +RES4,5,30,120,240 +RES5,5,30,120,240 +RES6,5,30,120,240 +COM1,5,30,90,180 +COM2,5,30,90,180 +COM3,5,30,90,180 +COM4,5,30,120,240 +COM5,5,30,90,180 +COM6,10,45,180,360 +COM7,10,45,180,240 +COM8,5,30,90,180 +COM9,5,30,120,240 +COM10,2,20,80,160 +IND1,10,30,120,240 +IND2,10,30,120,240 +IND3,10,30,120,240 +IND4,10,30,120,240 +IND5,20,45,180,360 +IND6,5,20,80,160 +AGR1,2,10,30,60 +REL1,10,30,120,240 +GOV1,10,30,120,240 +GOV2,5,20,90,180 +EDU1,10,30,120,240 +EDU2,10,45,180,360 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_CollapseRates.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_CollapseRates.csv new file mode 100644 index 000000000000..b14e022accdb --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_CollapseRates.csv @@ -0,0 +1,39 @@ +taxonomy,percentage_of_collapse_given_complete_damage +W1,3 +W2,3 +W3,3 +W4,3 +S1L,8 +S1M,5 +S1H,3 +S2L,8 +S2M,5 +S2H,3 +S3,3 +S4L,8 +S4M,5 +S4H,3 +S5L,8 +S5M,5 +S5H,3 +C1L,13 +C1M,10 +C1H,5 +C2L,13 +C2M,10 +C2H,5 +C3L,15 +C3M,13 +C3H,10 +PC1,15 +PC2L,15 +PC2M,13 +PC2H,10 +RM1L,13 +RM1M,10 +RM2L,13 +RM2M,10 +RM2H,5 +URML,15 +URMM,15 +MH,3 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_BWO_Nonstructural.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_BWO_Nonstructural.csv new file mode 100644 index 000000000000..b199ad12c8d6 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_BWO_Nonstructural.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extensive,complete +W1,2,8,35,100 +W2,2,10,40,100 +W3,2,10,40,100 +W4,2,8,35,100 +S1L,1,7,35,100 +S1M,1,7,35,100 +S1H,1,7,35,100 +S2L,0,0,0,100 +S2M,0,0,0,100 +S2H,0,0,0,100 +S3,0,0,0,100 +S4L,1,7,35,100 +S4M,1,7,35,100 +S4H,1,7,35,100 +S5L,1,7,35,100 +S5M,1,7,35,100 +S5H,1,7,35,100 +C1L,1,7,35,100 +C1M,1,7,35,100 +C1H,1,7,35,100 +C2L,1,7,35,100 +C2M,1,7,35,100 +C2H,1,7,35,100 +C3L,1,7,35,100 +C3M,1,7,35,100 +C3H,1,7,35,100 +PC1,2,11,42,100 +PC2L,1,7,35,100 +PC2M,1,7,35,100 +PC2H,1,7,35,100 +RM1L,2,10,40,100 +RM1M,2,10,40,100 +RM2L,1,7,35,100 +RM2M,1,7,35,100 +RM2H,1,7,35,100 +URML,2,12,45,100 +URMM,2,12,45,100 +MH,2,8,35,100 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_BWO_Structural.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_BWO_Structural.csv new file mode 100644 index 000000000000..36b0421eaf8b --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_BWO_Structural.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extensive,complete +W1,0,5,34,100 +W2,0,6,33,100 +W3,0,6,33,100 +W4,0,5,34,100 +S1L,0,0,0,100 +S1M,0,0,0,100 +S1H,0,0,0,100 +S2L,0,0,0,100 +S2M,0,0,0,100 +S2H,0,0,0,100 +S3,0,0,0,100 +S4L,0,0,0,100 +S4M,0,0,0,100 +S4H,0,0,0,100 +S5L,5,25,60,100 +S5M,5,25,60,100 +S5H,5,25,60,100 +C1L,0,0,0,100 +C1M,0,0,0,100 +C1H,0,0,0,100 +C2L,0,0,0,100 +C2M,0,0,0,100 +C2H,0,0,0,100 +C3L,5,25,60,100 +C3M,5,25,60,100 +C3H,5,25,60,100 +PC1,0,6,32,100 +PC2L,0,0,0,100 +PC2M,0,0,0,100 +PC2H,0,0,0,100 +RM1L,3.5,20,50,100 +RM1M,3.5,20,50,100 +RM2L,5,25,60,100 +RM2M,5,25,60,100 +RM2H,5,25,60,100 +URML,5,25,55,100 +URMM,5,25,55,100 +MH,0,5,33,100 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_RCS_Nonstructural.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_RCS_Nonstructural.csv new file mode 100644 index 000000000000..a142d8d4c513 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_RCS_Nonstructural.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extensive,complete +W1,0,0,0,100 +W2,0,10,28,100 +W3,0,10,28,100 +W4,0,0,0,100 +S1L,0.1,8,28,100 +S1M,0.1,8,28,100 +S1H,0.1,8,28,100 +S2L,0.1,8,28,100 +S2M,0.1,8,28,100 +S2H,0.1,8,28,100 +S3,0,10,30,100 +S4L,0.1,10,30,100 +S4M,0.1,10,30,100 +S4H,0.1,10,30,100 +S5L,0.1,10,30,100 +S5M,0.1,10,30,100 +S5H,0.1,10,30,100 +C1L,0.1,8,28,100 +C1M,0.1,8,28,100 +C1H,0.1,8,28,100 +C2L,0.1,10,30,100 +C2M,0.1,10,30,100 +C2H,0.1,10,30,100 +C3L,0.1,10,30,100 +C3M,0.1,10,30,100 +C3H,0.1,10,30,100 +PC1,0.1,10,30,100 +PC2L,0.1,9,30,100 +PC2M,0.1,9,30,100 +PC2H,0.1,9,30,100 +RM1L,0.1,10,30,100 +RM1M,0.1,10,31,100 +RM2L,0.1,9,30,100 +RM2M,0.1,9,30,100 +RM2H,0.1,9,30,100 +URML,0,10,29,100 +URMM,0,10,29,100 +MH,0,0,0,100 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_RCS_Structural.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_RCS_Structural.csv new file mode 100644 index 000000000000..7e19874f9218 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_RCS_Structural.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extensive,complete +W1,0,3,27,100 +W2,0,2,25,100 +W3,0,2,25,100 +W4,0,3,27,100 +S1L,0,4,30,100 +S1M,0,4,30,100 +S1H,0,4,30,100 +S2L,0,4,30,100 +S2M,0,4,30,100 +S2H,0,4,30,100 +S3,0,5,30,100 +S4L,2,10,40,100 +S4M,2,10,40,100 +S4H,2,10,40,100 +S5L,0,4,30,100 +S5M,0,4,30,100 +S5H,0,4,30,100 +C1L,0,5,33,100 +C1M,0,5,33,100 +C1H,0,5,33,100 +C2L,1,8,35,100 +C2M,1,8,35,100 +C2H,1,8,35,100 +C3L,0,4,32,100 +C3M,0,4,32,100 +C3H,0,4,32,100 +PC1,2,10,35,100 +PC2L,2,7,35,100 +PC2M,2,7,35,100 +PC2H,2,7,35,100 +RM1L,0,3,25,100 +RM1M,0,3,25.5,100 +RM2L,0,3,30.5,100 +RM2M,0,3,30.5,100 +RM2H,0,3,30.5,100 +URML,0,2,25,100 +URMM,0,2,25,100 +MH,0,3,27,100 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_UnitWeight_BWO.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_UnitWeight_BWO.csv new file mode 100644 index 000000000000..85e76959d527 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_UnitWeight_BWO.csv @@ -0,0 +1,39 @@ +taxonomy,structural,nonstructural +W1,6.5,12.1 +W2,4,8.1 +W3,4,8.1 +W4,6.5,12.1 +S1L,0,5.3 +S1M,0,5.3 +S1H,0,5.3 +S2L,0,5.3 +S2M,0,5.3 +S2H,0,5.3 +S3,0,0 +S4L,0,5.3 +S4M,0,5.3 +S4H,0,5.3 +S5L,20,5.3 +S5M,20,5.3 +S5H,20,5.3 +C1L,0,5.3 +C1M,0,5.3 +C1H,0,5.3 +C2L,0,5.3 +C2M,0,5.3 +C2H,0,5.3 +C3L,20,5.3 +C3M,20,5.3 +C3H,20,5.3 +PC1,5.5,5.3 +PC2L,0,5.3 +PC2M,0,5.3 +PC2H,0,5.3 +RM1L,17.5,5.3 +RM1M,17.5,5.3 +RM2L,17.5,5.3 +RM2M,24.5,5.3 +RM2H,24.5,5.3 +URML,35,10.5 +URMM,35,10.5 +MH,10,18 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_UnitWeight_RCS.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_UnitWeight_RCS.csv new file mode 100644 index 000000000000..ae9dc02a35c8 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_Debris_UnitWeight_RCS.csv @@ -0,0 +1,39 @@ +taxonomy,structural,nonstructural +W1,15,0 +W2,15,1 +W3,15,1 +W4,15,0 +S1L,44,5 +S1M,44,5 +S1H,44,5 +S2L,44,5 +S2M,44,5 +S2H,44,5 +S3,67,1.5 +S4L,65,4 +S4M,65,4 +S4H,65,4 +S5L,45,4 +S5M,45,4 +S5H,45,4 +C1L,98,4 +C1M,98,4 +C1H,98,4 +C2L,112,4 +C2M,112,4 +C2H,112,4 +C3L,90,4 +C3M,90,4 +C3H,90,4 +PC1,40,1.5 +PC2L,100,4 +PC2M,100,4 +PC2H,100,4 +RM1L,28,4 +RM1M,28,4 +RM2L,78,4 +RM2M,78,4 +RM2H,78,4 +URML,41,4 +URMM,41,4 +MH,22,0 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity1.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity1.csv new file mode 100644 index 000000000000..c32c9d87bdb6 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity1.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extreme,complete,collapse +W1,0.05,0.25,1,5,40 +W2,0.05,0.20,1,5,40 +W3,0.05,0.20,1,5,40 +W4,0.05,0.25,1,5,40 +S1L,0.05,0.20,1,5,40 +S1M,0.05,0.20,1,5,40 +S1H,0.05,0.20,1,5,40 +S2L,0.05,0.20,1,5,40 +S2M,0.05,0.20,1,5,40 +S2H,0.05,0.20,1,5,40 +S3,0.05,0.20,1,5,40 +S4L,0.05,0.25,1,5,40 +S4M,0.05,0.25,1,5,40 +S4H,0.05,0.25,1,5,40 +S5L,0.05,0.20,1,5,40 +S5M,0.05,0.20,1,5,40 +S5H,0.05,0.20,1,5,40 +C1L,0.05,0.25,1,5,40 +C1M,0.05,0.25,1,5,40 +C1H,0.05,0.25,1,5,40 +C2L,0.05,0.25,1,5,40 +C2M,0.05,0.25,1,5,40 +C2H,0.05,0.25,1,5,40 +C3L,0.05,0.20,1,5,40 +C3M,0.05,0.20,1,5,40 +C3H,0.05,0.20,1,5,40 +PC1,0.05,0.25,1,5,40 +PC2L,0.05,0.25,1,5,40 +PC2M,0.05,0.25,1,5,40 +PC2H,0.05,0.25,1,5,40 +RM1L,0.05,0.20,1,5,40 +RM1M,0.05,0.20,1,5,40 +RM2L,0.05,0.20,1,5,40 +RM2M,0.05,0.20,1,5,40 +RM2H,0.05,0.20,1,5,40 +URML,0.05,0.35,2,10,40 +URMM,0.05,0.35,2,10,40 +MH,0.05,0.25,1,5,40 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity2.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity2.csv new file mode 100644 index 000000000000..f8d855c90ccd --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity2.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extreme,complete,collapse +W1,0,0.030,0.1,1,20 +W2,0,0.025,0.1,1,20 +W3,0,0.025,0.1,1,20 +W4,0,0.030,0.1,1,20 +S1L,0,0.025,0.1,1,20 +S1M,0,0.025,0.1,1,20 +S1H,0,0.025,0.1,1,20 +S2L,0,0.025,0.1,1,20 +S2M,0,0.025,0.1,1,20 +S2H,0,0.025,0.1,1,20 +S3,0,0.025,0.1,1,20 +S4L,0,0.030,0.1,1,20 +S4M,0,0.030,0.1,1,20 +S4H,0,0.030,0.1,1,20 +S5L,0,0.025,0.1,1,20 +S5M,0,0.025,0.1,1,20 +S5H,0,0.025,0.1,1,20 +C1L,0,0.030,0.1,1,20 +C1M,0,0.030,0.1,1,20 +C1H,0,0.030,0.1,1,20 +C2L,0,0.030,0.1,1,20 +C2M,0,0.030,0.1,1,20 +C2H,0,0.030,0.1,1,20 +C3L,0,0.025,0.1,1,20 +C3M,0,0.025,0.1,1,20 +C3H,0,0.025,0.1,1,20 +PC1,0,0.030,0.1,1,20 +PC2L,0,0.030,0.1,1,20 +PC2M,0,0.030,0.1,1,20 +PC2H,0,0.030,0.1,1,20 +RM1L,0,0.025,0.1,1,20 +RM1M,0,0.025,0.1,1,20 +RM2L,0,0.025,0.1,1,20 +RM2M,0,0.025,0.1,1,20 +RM2H,0,0.025,0.1,1,20 +URML,0,0.400,0.2,2,20 +URMM,0,0.400,0.2,2,20 +MH,0,0.030,0.1,1,20 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity3.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity3.csv new file mode 100644 index 000000000000..86e773cf27df --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity3.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extreme,complete,collapse +W1,0,0,0.001,0.01,3 +W2,0,0,0.001,0.01,5 +W3,0,0,0.001,0.01,5 +W4,0,0,0.001,0.01,3 +S1L,0,0,0.001,0.01,5 +S1M,0,0,0.001,0.01,5 +S1H,0,0,0.001,0.01,5 +S2L,0,0,0.001,0.01,5 +S2M,0,0,0.001,0.01,5 +S2H,0,0,0.001,0.01,5 +S3,0,0,0.001,0.01,3 +S4L,0,0,0.001,0.01,5 +S4M,0,0,0.001,0.01,5 +S4H,0,0,0.001,0.01,5 +S5L,0,0,0.001,0.01,5 +S5M,0,0,0.001,0.01,5 +S5H,0,0,0.001,0.01,5 +C1L,0,0,0.001,0.01,5 +C1M,0,0,0.001,0.01,5 +C1H,0,0,0.001,0.01,5 +C2L,0,0,0.001,0.01,5 +C2M,0,0,0.001,0.01,5 +C2H,0,0,0.001,0.01,5 +C3L,0,0,0.001,0.01,5 +C3M,0,0,0.001,0.01,5 +C3H,0,0,0.001,0.01,5 +PC1,0,0,0.001,0.01,5 +PC2L,0,0,0.001,0.01,5 +PC2M,0,0,0.001,0.01,5 +PC2H,0,0,0.001,0.01,5 +RM1L,0,0,0.001,0.01,5 +RM1M,0,0,0.001,0.01,5 +RM2L,0,0,0.001,0.01,5 +RM2M,0,0,0.001,0.01,5 +RM2H,0,0,0.001,0.01,5 +URML,0,0.001,0.002,0.02,5 +URMM,0,0.001,0.002,0.02,5 +MH,0,0,0.001,0.01,3 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity4.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity4.csv new file mode 100644 index 000000000000..130a189ff0ca --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_IndoorCasualtyRates_Severity4.csv @@ -0,0 +1,39 @@ +taxonomy,slight,moderate,extreme,complete,collapse +W1,0,0,0.001,0.01,5 +W2,0,0,0.001,0.01,10 +W3,0,0,0.001,0.01,10 +W4,0,0,0.001,0.01,5 +S1L,0,0,0.001,0.01,10 +S1M,0,0,0.001,0.01,10 +S1H,0,0,0.001,0.01,10 +S2L,0,0,0.001,0.01,10 +S2M,0,0,0.001,0.01,10 +S2H,0,0,0.001,0.01,10 +S3,0,0,0.001,0.01,5 +S4L,0,0,0.001,0.01,10 +S4M,0,0,0.001,0.01,10 +S4H,0,0,0.001,0.01,10 +S5L,0,0,0.001,0.01,10 +S5M,0,0,0.001,0.01,10 +S5H,0,0,0.001,0.01,10 +C1L,0,0,0.001,0.01,10 +C1M,0,0,0.001,0.01,10 +C1H,0,0,0.001,0.01,10 +C2L,0,0,0.001,0.01,10 +C2M,0,0,0.001,0.01,10 +C2H,0,0,0.001,0.01,10 +C3L,0,0,0.001,0.01,10 +C3M,0,0,0.001,0.01,10 +C3H,0,0,0.001,0.01,10 +PC1,0,0,0.001,0.01,10 +PC2L,0,0,0.001,0.01,10 +PC2M,0,0,0.001,0.01,10 +PC2H,0,0,0.001,0.01,10 +RM1L,0,0,0.001,0.01,10 +RM1M,0,0,0.001,0.01,10 +RM2L,0,0,0.001,0.01,10 +RM2M,0,0,0.001,0.01,10 +RM2H,0,0,0.001,0.01,10 +URML,0,0.001,0.002,0.02,10 +URMM,0,0.001,0.002,0.02,10 +MH,0,0,0.001,0.01,5 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_InterruptionTimeMultipliers.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_InterruptionTimeMultipliers.csv new file mode 100644 index 000000000000..f10673d0d588 --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_InterruptionTimeMultipliers.csv @@ -0,0 +1,34 @@ +taxonomy,slight,moderate,extreme,complete +RES1,0.00,0.50,1.00,1.00 +RES2,0.00,0.50,1.00,1.00 +RES3A,0.00,0.50,1.00,1.00 +RES3B,0.00,0.50,1.00,1.00 +RES3C,0.00,0.50,1.00,1.00 +RES3D,0.00,0.50,1.00,1.00 +RES3E,0.00,0.50,1.00,1.00 +RES3F,0.00,0.50,1.00,1.00 +RES4,0.00,0.50,1.00,1.00 +RES5,0.00,0.50,1.00,1.00 +RES6,0.00,0.50,1.00,1.00 +COM1,0.10,0.10,0.30,0.40 +COM2,0.10,0.20,0.30,0.40 +COM3,0.10,0.20,0.30,0.40 +COM4,0.10,0.10,0.20,0.30 +COM5,0.10,0.05,0.03,0.03 +COM6,0.10,0.50,0.50,0.50 +COM7,0.10,0.50,0.50,0.50 +COM8,0.10,1.00,1.00,1.00 +COM9,0.10,1.00,1.00,1.00 +COM10,0.10,1.00,1.00,1.00 +IND1,0.50,1.00,1.00,1.00 +IND2,0.10,0.20,0.30,0.40 +IND3,0.20,0.20,0.30,0.40 +IND4,0.20,0.20,0.30,0.40 +IND5,0.20,0.20,0.30,0.40 +IND6,0.10,0.20,0.30,0.40 +AGR1,0.00,0.05,0.10,0.20 +REL1,0.20,0.05,0.03,0.03 +GOV1,0.10,0.02,0.03,0.03 +GOV2,0.10,0.02,0.03,0.03 +EDU1,0.10,0.02,0.05,0.05 +EDU2,0.10,0.02,0.03,0.03 diff --git a/openquake/risklib/data/Hazus_Consequence_Parameters_SquareFootage.csv b/openquake/risklib/data/Hazus_Consequence_Parameters_SquareFootage.csv new file mode 100644 index 000000000000..c1923edc6cef --- /dev/null +++ b/openquake/risklib/data/Hazus_Consequence_Parameters_SquareFootage.csv @@ -0,0 +1,34 @@ +occupancy,area +RES1,1600 +RES2,1000 +RES3A,3000 +RES3B,3000 +RES3C,8000 +RES3D,12000 +RES3E,40000 +RES3F,60000 +RES4,50000 +RES5,25000 +RES6,25000 +COM1,8000 +COM2,30000 +COM3,5000 +COM4,20000 +COM5,4000 +COM6,55000 +COM7,7000 +COM8,5000 +COM9,12000 +COM10,100000 +IND1,50000 +IND2,30000 +IND3,30000 +IND4,30000 +IND5,30000 +IND6,30000 +AGR1,30000 +REL1,17000 +GOV1,18000 +GOV2,11000 +EDU1,45000 +EDU2,50000 diff --git a/openquake/risklib/scientific.py b/openquake/risklib/scientific.py index 55dfd9de30ee..eae7145805c7 100644 --- a/openquake/risklib/scientific.py +++ b/openquake/risklib/scientific.py @@ -40,8 +40,51 @@ U8 = numpy.uint8 TWO32 = 2 ** 32 -KNOWN_CONSEQUENCES = ['loss', 'loss_aep', 'loss_oep', 'losses', 'collapsed', - 'injured', 'fatalities', 'homeless', 'non_operational'] + + +def loss_agg_value_func(aval, xltype): + if xltype.endswith('_ins'): + xltype = xltype[:-4] + if '+' in xltype: # total loss type + return sum(aval[lt] for lt in xltype.split('+')) + return aval[xltype] + + +# NOTE: in order to add a new consequence, add a +# new item into the KNOWN_CONSEQUENCES dict, specifying the asset field to be +# used in multiplication and the function to be used for the aggregation. The +# asset field may depend from the loss_type or the time_event, so these values +# are passed as parameters to a lambda function. +# asset_field is used in consequence to compute: +# dmgdist @ coeffs * asset[asset_field] +# agg_value_func is uded in get_agg_value to compute: +# aval = agg_values[agg_id] +# agg_value_func(aval, xltype) +KNOWN_CONSEQUENCES = { + 'homeless': { + 'asset_field': lambda loss_type, time_event: 'value-residents', + 'agg_value_func': lambda aval, xltype, time_event: aval['residents'] + } +} +for cons in ('loss', 'loss_aep', 'loss_oep', 'losses'): + KNOWN_CONSEQUENCES[cons] = { + 'asset_field': lambda loss_type, time_event: f'value-{loss_type}', + 'agg_value_func': lambda aval, xltype, time_event: loss_agg_value_func( + aval, xltype) + } +for cons in ('collapsed', 'non_operational'): + KNOWN_CONSEQUENCES[cons] = { + 'asset_field': lambda loss_type, time_event: 'value-number', + 'agg_value_func': lambda aval, xltype, time_event: aval['number'] + } +for cons in ('injured', 'fatalities'): + # NOTE: time_event default is 'avg' + KNOWN_CONSEQUENCES[cons] = { + 'asset_field': lambda loss_type, time_event: f'occupants_{time_event}', + 'agg_value_func': lambda aval, xltype, time_event: aval[ + f'occupants_{time_event}'] + } + LOSSTYPE = numpy.array('''\ business_interruption contents nonstructural structural @@ -1657,46 +1700,31 @@ def consequence(consequence, coeffs, asset, dmgdist, loss_type, time_event): :param asset: asset record :param dmgdist: an array of probabilies of shape (E, D - 1) :param loss_type: loss type string + :param time_event: time of day at which the event occurrs (default avg) :returns: array of shape E """ if consequence not in KNOWN_CONSEQUENCES: raise NotImplementedError(consequence) - if consequence.startswith(('loss', 'losses')): - return dmgdist @ coeffs * asset['value-' + loss_type] - elif consequence in ['collapsed', 'non_operational']: - return dmgdist @ coeffs * asset['value-number'] - elif consequence in ['injured', 'fatalities']: - # NOTE: time_event default is 'avg' - return dmgdist @ coeffs * asset[f'occupants_{time_event}'] - elif consequence == 'homeless': - return dmgdist @ coeffs * asset['value-residents'] - else: - raise NotImplementedError(consequence) + asset_field = KNOWN_CONSEQUENCES[ + consequence]['asset_field'](loss_type, time_event) + return dmgdist @ coeffs * asset[asset_field] def get_agg_value(consequence, agg_values, agg_id, xltype, time_event): """ + :param consequence: kind of consequence + :param agg_values: FIXME + :param agg_id: FIXME + :param xltype: FIXME + :param time_event: time of day at which the event occurrs (default avg) :returns: sum of the values corresponding to agg_id for the given consequence """ if consequence not in KNOWN_CONSEQUENCES: raise NotImplementedError(consequence) aval = agg_values[agg_id] - if consequence in ['collapsed', 'non_operational']: - return aval['number'] - elif consequence in ['injured', 'fatalities']: - # NOTE: time_event default is 'avg' - return aval[f'occupants_{time_event}'] - elif consequence == 'homeless': - return aval['residents'] - elif consequence.startswith(('loss', 'losses')): - if xltype.endswith('_ins'): - xltype = xltype[:-4] - if '+' in xltype: # total loss type - return sum(aval[lt] for lt in xltype.split('+')) - return aval[xltype] - else: - raise NotImplementedError(consequence) + return KNOWN_CONSEQUENCES[consequence]['agg_value_func']( + aval, xltype, time_event) # ########################### u64_to_eal ################################# #