Skip to content

Commit

Permalink
Merge pull request #131 from pnnl/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
d3j331 committed Dec 5, 2023
2 parents 0d1534a + 9f67735 commit 4b31636
Show file tree
Hide file tree
Showing 43 changed files with 25,415 additions and 238 deletions.
18,497 changes: 18,497 additions & 0 deletions data/feeders/RECSwIncomeLvl.csv

Large diffs are not rendered by default.

90 changes: 46 additions & 44 deletions examples/analysis/consensus/code/post_processing/plots.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
# usage 'python ../plots.py metrics_root'
# run it from inside the metrics_root folder
import sys
import os
import matplotlib as mpl;
import matplotlib.pyplot as plt;
import sys

import matplotlib.pyplot as plt
import numpy as np

import process_gld_dsot as pg
import numpy as np;

# import tesp_support.api.process_inv as gp
# import tesp_support.api.process_gld as gp

Expand All @@ -19,45 +21,46 @@
os.mkdir("Figures")
print("*****current working directory is *** " + os.getcwd())

[hrs, times, hse_keys, data_h, HVAC_LOAD_AVG_IDX, WH_AVG_IDX, TOTAL_LOAD_AVG_IDX, DEV_COOL_IDX, data_m, MTR_REAL_POWER_AVG, MTR_VOLT_MIN_IDX, MTR_VOLT_MAX_IDX, \
data_s, SUB_POWER_IDX, SUB_LOSSES_IDX] = pg.process_gld("Substation")
[hrs, times, hse_keys, data_h, HVAC_LOAD_AVG_IDX, WH_AVG_IDX, TOTAL_LOAD_AVG_IDX, DEV_COOL_IDX, data_m,
MTR_REAL_POWER_AVG, MTR_VOLT_MIN_IDX, MTR_VOLT_MAX_IDX,
data_s, SUB_POWER_IDX, SUB_LOSSES_IDX] = pg.process_gld("Substation")

#discarded_hours = 24*2 # discarded hours
discarded_hours = 24*1
discard_secs= discarded_hours*60*60 # first discard_secs should be discarded while plotting
# discarded_hours = 24*2 # discarded hours
discarded_hours = 24 * 1
discard_secs = discarded_hours * 60 * 60 # first discard_secs should be discarded while plotting
for l in times:
if l >= discard_secs:
hrs_start = times.index(l)
break

#hrs_start = discard_secs/60/(hrs[1]-hrs[0])
# hrs_start = discard_secs/60/(hrs[1]-hrs[0])
hrs_start = int(hrs_start)
hrs = hrs - discard_secs/3600
hrs = hrs - discard_secs / 3600
hrs = hrs[hrs_start:]
# display an aggregated plot
fig1, ax1 = plt.subplots(2, 1, sharex='col')

hvac_load=np.sum(data_h, axis=0)[:, HVAC_LOAD_AVG_IDX]
wh_load=np.sum(data_h, axis=0)[:, WH_AVG_IDX]
hvac_load = np.sum(data_h, axis=0)[:, HVAC_LOAD_AVG_IDX]
wh_load = np.sum(data_h, axis=0)[:, WH_AVG_IDX]
total_load = np.sum(data_h, axis=0)[:, TOTAL_LOAD_AVG_IDX]
mtr_load = np.sum(data_m, axis=0)[:, MTR_REAL_POWER_AVG]/1000
sub_load = data_s[0, :, SUB_POWER_IDX]/1000
sub_losses = data_s[0, :, SUB_LOSSES_IDX]/1000
mtr_load = np.sum(data_m, axis=0)[:, MTR_REAL_POWER_AVG] / 1000
sub_load = data_s[0, :, SUB_POWER_IDX] / 1000
sub_losses = data_s[0, :, SUB_LOSSES_IDX] / 1000
net_load = hvac_load + wh_load
# if lst_i[time_key]:
# inv_load = np.sum(data_i, axis=0)[:, INV_P_AVG_IDX]/1000
# inv_load_var = np.sum(data_i, axis=0)[:, INV_Q_AVG_IDX] / 1000
# net_load = hvac_load+wh_load+inv_load

# estimating % of devices in ON state at each time
hvac_on_per = np.count_nonzero(data_h[:, :, HVAC_LOAD_AVG_IDX], 0)/len(data_h[:, 0, HVAC_LOAD_AVG_IDX])*100
wh_on_per = np.count_nonzero(data_h[:, :, WH_AVG_IDX], 0)/len(data_h[:, 0, WH_AVG_IDX])*100
hvac_on_per = np.count_nonzero(data_h[:, :, HVAC_LOAD_AVG_IDX], 0) / len(data_h[:, 0, HVAC_LOAD_AVG_IDX]) * 100
wh_on_per = np.count_nonzero(data_h[:, :, WH_AVG_IDX], 0) / len(data_h[:, 0, WH_AVG_IDX]) * 100

ax1[0].plot(hrs, hvac_load[hrs_start:], label="hvac")
ax1[0].plot(hrs, wh_load[hrs_start:], label="waterheater")
ax1[0].plot(hrs, total_load[hrs_start:] - hvac_load[hrs_start:] - wh_load[hrs_start:], label="ZIP")
ax1[0].plot(hrs, total_load[hrs_start:], label="total")
# ax1[0,0].plot(hrs, mtr_load[hrs_start:], "k--", label="net meter",)
# ax1[0,0].plot(hrs, mtr_load[hrs_start:], "k--", label="net meter",)
# if lst_i[time_key]:
# ax1[0,0].plot(hrs, -inv_load[hrs_start:], label="inverter_real")
# ax1[0, 0].plot(hrs, -inv_load_var[hrs_start:], label="inverter_var")
Expand All @@ -71,50 +74,50 @@
ax1[1].plot(hrs, sub_losses[hrs_start:], label="Total Losses")
ax1[1].plot(hrs, sub_load[hrs_start:], label="Net Load")
ax1[1].set_ylabel("kW")
ax1[1].set_title("Substation Real Power at " )
ax1[1].set_title("Substation Real Power at ")
ax1[1].legend(loc='upper left')


new_dir = cur_dir + "/" + rootname + "_Base/Substation"
new_dir = cur_dir + "/" + rootname + "_Base/Substation"
os.chdir(new_dir)

[ hrs, times, hse_keys, data_h, HVAC_LOAD_AVG_IDX, WH_AVG_IDX, TOTAL_LOAD_AVG_IDX, DEV_COOL_IDX, data_m, MTR_REAL_POWER_AVG, MTR_VOLT_MIN_IDX, MTR_VOLT_MAX_IDX, \
data_s, SUB_POWER_IDX, SUB_LOSSES_IDX] = pg.process_gld("Substation")
[hrs, times, hse_keys, data_h, HVAC_LOAD_AVG_IDX, WH_AVG_IDX, TOTAL_LOAD_AVG_IDX, DEV_COOL_IDX, data_m,
MTR_REAL_POWER_AVG, MTR_VOLT_MIN_IDX, MTR_VOLT_MAX_IDX,
data_s, SUB_POWER_IDX, SUB_LOSSES_IDX] = pg.process_gld("Substation")

#discarded_hours = 24*2 # discarded hours
discarded_hours = 24*1
discard_secs= discarded_hours*60*60 # first discard_secs should be discarded while plotting
# discarded_hours = 24*2 # discarded hours
discarded_hours = 24 * 1
discard_secs = discarded_hours * 60 * 60 # first discard_secs should be discarded while plotting
for l in times:
if l >= discard_secs:
hrs_start = times.index(l)
break

#hrs_start = discard_secs/60/(hrs[1]-hrs[0])
# hrs_start = discard_secs/60/(hrs[1]-hrs[0])
hrs_start = int(hrs_start)
hrs = hrs - discard_secs/3600
hrs = hrs - discard_secs / 3600
hrs = hrs[hrs_start:]

hvac_load=np.sum(data_h, axis=0)[:, HVAC_LOAD_AVG_IDX]
wh_load=np.sum(data_h, axis=0)[:, WH_AVG_IDX]
hvac_load = np.sum(data_h, axis=0)[:, HVAC_LOAD_AVG_IDX]
wh_load = np.sum(data_h, axis=0)[:, WH_AVG_IDX]
total_load = np.sum(data_h, axis=0)[:, TOTAL_LOAD_AVG_IDX]
mtr_load = np.sum(data_m, axis=0)[:, MTR_REAL_POWER_AVG]/1000
sub_load = data_s[0, :, SUB_POWER_IDX]/1000
sub_losses = data_s[0, :, SUB_LOSSES_IDX]/1000
mtr_load = np.sum(data_m, axis=0)[:, MTR_REAL_POWER_AVG] / 1000
sub_load = data_s[0, :, SUB_POWER_IDX] / 1000
sub_losses = data_s[0, :, SUB_LOSSES_IDX] / 1000
net_load = hvac_load + wh_load
# if lst_i[time_key]:
# inv_load = np.sum(data_i, axis=0)[:, INV_P_AVG_IDX]/1000
# inv_load_var = np.sum(data_i, axis=0)[:, INV_Q_AVG_IDX] / 1000
# net_load = hvac_load+wh_load+inv_load

# estimating % of devices in ON state at each time
hvac_on_per = np.count_nonzero(data_h[:, :, HVAC_LOAD_AVG_IDX], 0)/len(data_h[:, 0, HVAC_LOAD_AVG_IDX])*100
wh_on_per = np.count_nonzero(data_h[:, :, WH_AVG_IDX], 0)/len(data_h[:, 0, WH_AVG_IDX])*100
hvac_on_per = np.count_nonzero(data_h[:, :, HVAC_LOAD_AVG_IDX], 0) / len(data_h[:, 0, HVAC_LOAD_AVG_IDX]) * 100
wh_on_per = np.count_nonzero(data_h[:, :, WH_AVG_IDX], 0) / len(data_h[:, 0, WH_AVG_IDX]) * 100

ax1[0].plot(hrs, hvac_load[hrs_start:], label="hvac_base")
ax1[0].plot(hrs, wh_load[hrs_start:], label="waterheater_base")
ax1[0].plot(hrs, total_load[hrs_start:] - hvac_load[hrs_start:] - wh_load[hrs_start:], label="ZIP_base")
ax1[0].plot(hrs, total_load[hrs_start:], label="total_base")
# ax1[0,0].plot(hrs, mtr_load[hrs_start:], "k--", label="net meter",)
# ax1[0,0].plot(hrs, mtr_load[hrs_start:], "k--", label="net meter",)
# if lst_i[time_key]:
# ax1[0,0].plot(hrs, -inv_load[hrs_start:], label="inverter_real")
# ax1[0, 0].plot(hrs, -inv_load_var[hrs_start:], label="inverter_var")
Expand All @@ -128,16 +131,15 @@
ax1[1].plot(hrs, sub_losses[hrs_start:], label="Total Losses_base")
ax1[1].plot(hrs, sub_load[hrs_start:], label="Net Load_base")
ax1[1].set_ylabel("kW")
ax1[1].set_title("Substation Real Power at " )
ax1[1].set_title("Substation Real Power at ")
ax1[1].legend(loc='upper left')


#fig1.savefig('Figures/aggregated.png')
# fig1.savefig('Figures/aggregated.png')
plt.show(block=True)
plt.pause(0.5)
plt.close()

#ph.process_houses(rootname)
#pi.process_inv(rootname)
#pv.process_voltages(rootname)
plt.show()
# ph.process_houses(rootname)
# pi.process_inv(rootname)
# pv.process_voltages(rootname)
plt.show()
2 changes: 2 additions & 0 deletions examples/analysis/dsot/code/8_system_case_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -296,11 +296,13 @@
"dataPath": "../data",
"dsoAgentFile": "default_case_config.json",
"dsoPopulationFile": "8-metadata-lean.json",
"dsoRECSPopulationFile": "8-metadata-recs.json",
"WeatherDataSourcePath": "../data/8-node data/DAT formatted files/",
"hvacSetPoint": "hvac_setpt.json",
"dsoQuadraticFile": "8_DSO_quadratic_curves.json",
"dsoCommBldgFile": "DSOT_commercial_metadata.json",
"dsoResBldgFile": "DSOT_residential_metadata.json",
"dsoRECSResBldgFile": "RECS_residential_metadata.json",
"dsoBattFile": "DSOT_battery_metadata.json",
"dsoEvModelFile": "DSOT_ev_model_metadata.json",
"dsoEvDrivingFile": "DSOT_ev_driving_metadata.csv",
Expand Down
8 changes: 4 additions & 4 deletions examples/analysis/dsot/code/generate_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""

import json
import prepare_case_dsot_f as prep_case
import prepare_case_dsot as prep_case


def generate_case(case_name, port, pv=None, bt=None, fl=None, ev=None):
Expand Down Expand Up @@ -51,8 +51,8 @@ def generate_case(case_name, port, pv=None, bt=None, fl=None, ev=None):
# node = "200"

generate_case(node + "_system_case_config", 5570, pv=0, bt=0, fl=0, ev=0)
generate_case(node + "_system_case_config", 5570, pv=0, bt=1, fl=0, ev=0)
# generate_case(node + "_system_case_config", 5570, pv=0, bt=1, fl=0, ev=0)
# generate_case(node + "_system_case_config", 5570, pv=0, bt=0, fl=1, ev=0)
generate_case(node + "_hi_system_case_config", 5570, pv=1, bt=0, fl=0, ev=0)
generate_case(node + "_hi_system_case_config", 5570, pv=1, bt=1, fl=0, ev=1)
# generate_case(node + "_hi_system_case_config", 5570, pv=1, bt=0, fl=0, ev=0)
# generate_case(node + "_hi_system_case_config", 5570, pv=1, bt=1, fl=0, ev=1)
# generate_case(node + "_hi_system_case_config", 5570, pv=1, bt=0, fl=1, ev=1)
7 changes: 4 additions & 3 deletions examples/analysis/dsot/code/plots.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# usage 'python ../plots.py metrics_root'
# run it from inside the metrics_root folder
# usage 'python ./plots.py metrics_root'
# run it from inside the metrics_root folder
# .json file format only
import sys
import os
import matplotlib as mpl
Expand All @@ -11,7 +12,7 @@
rootname = sys.argv[1]

cur_dir = os.getcwd()
new_dir = cur_dir + "\\" + rootname + "\\Substation_3"
new_dir = cur_dir + "/" + rootname + "/Substation_3"
os.chdir(new_dir)

if not os.path.exists("Figures"):
Expand Down
26 changes: 21 additions & 5 deletions examples/analysis/dsot/code/prepare_case_dsot.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,29 @@
import numpy as np

from tesp_support.api.helpers import HelicsMsg
import tesp_support.original.commercial_feeder_glm as com_FG
import tesp_support.original.copperplate_feeder_glm as cp_FG

import tesp_support.dsot.helpers_dsot as helpers
import tesp_support.dsot.case_merge as cm
import tesp_support.dsot.glm_dictionary as gd
import tesp_support.dsot.residential_feeder_glm as res_FG

import prep_substation_dsot as prep


recs = ""
# recs_data = False
recs_data = True
if recs_data:
recs = "RECS"
sys.path.append('../')
import recs.commercial_feeder_glm as com_FG
import recs.copperplate_feeder_glm as cp_FG
import recs.residential_feeder_glm as res_FG
else:
import tesp_support.original.commercial_feeder_glm as com_FG
import tesp_support.original.copperplate_feeder_glm as cp_FG
import tesp_support.dsot.residential_feeder_glm as res_FG


# Simulation settings for the experimental case
def prepare_case(node, mastercase, pv=None, bt=None, fl=None, ev=None):

Expand Down Expand Up @@ -59,10 +72,10 @@ def prepare_case(node, mastercase, pv=None, bt=None, fl=None, ev=None):
with open(os.path.join(data_Path, sys_config['dsoAgentFile']), 'r', encoding='utf-8') as json_file:
case_config = json.load(json_file)
# loading building and DSO metadata
with open(os.path.join(data_Path, sys_config['dsoPopulationFile']), 'r', encoding='utf-8') as json_file:
with open(os.path.join(data_Path, sys_config['dso' + recs + 'PopulationFile']), 'r', encoding='utf-8') as json_file:
dso_config = json.load(json_file)
# loading residential metadata
with open(os.path.join(data_Path, sys_config['dsoResBldgFile']), 'r', encoding='utf-8') as json_file:
with open(os.path.join(data_Path, sys_config['dso' + recs + 'ResBldgFile']), 'r', encoding='utf-8') as json_file:
res_config = json.load(json_file)
# loading commercial building metadata
with open(os.path.join(data_Path, sys_config['dsoCommBldgFile']), 'r', encoding='utf-8') as json_file:
Expand Down Expand Up @@ -234,6 +247,9 @@ def prepare_case(node, mastercase, pv=None, bt=None, fl=None, ev=None):
sim['BulkpowerBus'] = dso_val['bus_number']
# case_config['BackboneFiles']['RandomSeed'] = dso_val['random_seed']
sim['DSO_type'] = dso_val['utility_type']
if recs_data:
sim['state'] = dso_val['state']
sim['income_level'] = dso_val['income_level']
sim['rooftop_pv_rating_MW'] = dso_val['rooftop_pv_rating_MW']
sim['scaling_factor'] = dso_val['scaling_factor']
sim['serverPort'] = 5150 + (int(bus) // 20)
Expand Down
Loading

0 comments on commit 4b31636

Please sign in to comment.